From 293abfd13968f2296e3a7f923a54efdac3df5fcc Mon Sep 17 00:00:00 2001 From: Gaurav Nelson <23069445+gaurav-nelson@users.noreply.github.com> Date: Mon, 27 Apr 2026 22:10:42 +1000 Subject: [PATCH 1/4] Add agent discovery features for AI readiness Add robots.txt AI crawler rules with Content Signals, Link discovery tags in HTML head (RFC 8288), .well-known/api-catalog (RFC 9727), MCP server card, and agent skills index. --- layouts/partials/head.html | 5 ++++ layouts/robots.txt | 33 ++++++++++++++++++++++ static/.well-known/agent-skills/index.json | 23 +++++++++++++++ static/.well-known/api-catalog | 23 +++++++++++++++ static/.well-known/mcp/server-card.json | 30 ++++++++++++++++++++ 5 files changed, 114 insertions(+) create mode 100644 layouts/robots.txt create mode 100644 static/.well-known/agent-skills/index.json create mode 100644 static/.well-known/api-catalog create mode 100644 static/.well-known/mcp/server-card.json diff --git a/layouts/partials/head.html b/layouts/partials/head.html index 26688cedbf..51d4655c6b 100644 --- a/layouts/partials/head.html +++ b/layouts/partials/head.html @@ -38,6 +38,11 @@ {{- /* Favicon */ -}} + + {{- /* Agent Discovery Links (RFC 8288 / RFC 9727) */ -}} + + + {{- /* DNS Prefetch and Preconnect for Performance */ -}} diff --git a/layouts/robots.txt b/layouts/robots.txt new file mode 100644 index 0000000000..b750e74775 --- /dev/null +++ b/layouts/robots.txt @@ -0,0 +1,33 @@ +User-agent: * +Allow: / + +User-agent: GPTBot +Allow: / + +User-agent: OAI-SearchBot +Allow: / + +User-agent: Claude-Web +Allow: / + +User-agent: Google-Extended +Allow: / + +User-agent: Applebot-Extended +Allow: / + +User-agent: CCBot +Allow: / + +User-agent: PerplexityBot +Allow: / + +User-agent: Bytespider +Disallow: / + +User-agent: AhrefsBot +Disallow: / + +Sitemap: {{ "sitemap.xml" | absURL }} + +Content-Signal: ai-train=no, search=yes, ai-input=yes diff --git a/static/.well-known/agent-skills/index.json b/static/.well-known/agent-skills/index.json new file mode 100644 index 0000000000..f5bd277571 --- /dev/null +++ b/static/.well-known/agent-skills/index.json @@ -0,0 +1,23 @@ +{ + "$schema": "https://agentskills.io/schema/index.json", + "skills": [ + { + "name": "validated-patterns-docs", + "type": "documentation", + "description": "Browse documentation for Red Hat Validated Patterns — GitOps-driven, automated application stacks for hybrid cloud deployments on OpenShift.", + "url": "https://validatedpatterns.io/llms.txt" + }, + { + "name": "pattern-catalog", + "type": "catalog", + "description": "Discover 30+ validated patterns organized by industry, Red Hat product, and partner, with deployment guides and architecture details.", + "url": "https://validatedpatterns.io/patterns/" + }, + { + "name": "quickstart-guide", + "type": "tutorial", + "description": "Step-by-step guide to deploying your first validated pattern on OpenShift using the GitOps framework.", + "url": "https://validatedpatterns.io/learn/quickstart/" + } + ] +} diff --git a/static/.well-known/api-catalog b/static/.well-known/api-catalog new file mode 100644 index 0000000000..2e1384fce9 --- /dev/null +++ b/static/.well-known/api-catalog @@ -0,0 +1,23 @@ +{ + "linkset": [ + { + "anchor": "https://validatedpatterns.io/", + "service-doc": [ + { + "href": "https://validatedpatterns.io/learn/quickstart/", + "type": "text/html" + } + ], + "describedby": [ + { + "href": "https://validatedpatterns.io/llms.txt", + "type": "text/plain" + }, + { + "href": "https://validatedpatterns.io/sitemap.xml", + "type": "application/xml" + } + ] + } + ] +} diff --git a/static/.well-known/mcp/server-card.json b/static/.well-known/mcp/server-card.json new file mode 100644 index 0000000000..25cdfc83b2 --- /dev/null +++ b/static/.well-known/mcp/server-card.json @@ -0,0 +1,30 @@ +{ + "serverInfo": { + "name": "Validated Patterns Documentation", + "version": "1.0.0", + "description": "Documentation site for Red Hat Validated Patterns — GitOps-driven application stacks for hybrid cloud deployments on OpenShift." + }, + "homepage": "https://validatedpatterns.io/", + "capabilities": { + "resources": true, + "tools": false, + "prompts": false + }, + "resources": [ + { + "uri": "https://validatedpatterns.io/llms.txt", + "name": "LLM-friendly documentation index", + "mimeType": "text/plain" + }, + { + "uri": "https://validatedpatterns.io/sitemap.xml", + "name": "Sitemap", + "mimeType": "application/xml" + }, + { + "uri": "https://validatedpatterns.io/patterns/", + "name": "All validated patterns", + "mimeType": "text/html" + } + ] +} From c492191c9190bdd7d2b4d024632d02833e29959b Mon Sep 17 00:00:00 2001 From: Gaurav Nelson <23069445+gaurav-nelson@users.noreply.github.com> Date: Wed, 18 Mar 2026 10:35:10 +1000 Subject: [PATCH 2/4] feat: Added Markdown generation for markdown versions of all pages. - updated llms.txt page buit by hugo - added llms-full.txt --- Makefile | 11 +- config.yaml | 9 + layouts/index.llmstxt.txt | 28 +++ layouts/partials/page-actions.html | 23 +-- static/js/page-actions.js | 19 +- static/llms.txt | 60 ------- utils/generate-md.js | 268 +++++++++++++++++++++++++++++ 7 files changed, 332 insertions(+), 86 deletions(-) create mode 100644 layouts/index.llmstxt.txt delete mode 100644 static/llms.txt create mode 100644 utils/generate-md.js diff --git a/Makefile b/Makefile index cb7eed98a0..f2cc9b42c3 100644 --- a/Makefile +++ b/Makefile @@ -5,7 +5,7 @@ UNAME=$(shell uname -s) # Also because of the proxy 127.0.0.1 doesn't work as a bind address. ifeq ($(UNAME), Darwin) PODMAN_OPTS ?= -it --security-opt label=disable --pull=newer -p 4000:4000 - HUGO_SERVER_OPTS = --bind 0.0.0.0 + HUGO_SERVER_OPTS = --bind 0.0.0.0 else PODMAN_OPTS ?= -it --security-opt label=disable --pull=newer --net=host endif @@ -35,12 +35,17 @@ test: htmltest ## Runs tests .PHONY: build build: ## Build the website locally in the public/ folder - podman run $(PODMAN_OPTS) -v $(PWD):/site:$(ATTRS) --entrypoint hugo $(HOMEPAGE_CONTAINER) + podman run $(PODMAN_OPTS) -v $(PWD):/site:$(ATTRS) $(HOMEPAGE_CONTAINER) -c "hugo && node utils/generate-md.js" + +.PHONY: generate-md +generate-md: ## Generate Markdown versions of all pages in public/ + node utils/generate-md.js .PHONY: serve serve: ## Build the website locally from a container and serve it @echo "Serving via container. Browse to http://localhost:4000" - podman run $(PODMAN_OPTS) -v $(PWD):/site:$(ATTRS) --entrypoint hugo $(HOMEPAGE_CONTAINER) server -p 4000 $(HUGO_SERVER_OPTS) + podman run $(PODMAN_OPTS) -v $(PWD):/site:$(ATTRS) $(HOMEPAGE_CONTAINER) -c "hugo && node utils/generate-md.js && hugo server -p 4000 $(HUGO_SERVER_OPTS)" + .PHONY: htmltest htmltest: build ## Runs htmltest against the site to find broken links diff --git a/config.yaml b/config.yaml index 52770e0d19..077c8d5e49 100644 --- a/config.yaml +++ b/config.yaml @@ -45,6 +45,11 @@ mediaTypes: outputFormats: patterns: mediatype: application/json + llmstxt: + mediatype: text/plain + baseName: llms + isPlainText: true + notAlternative: true menus: main: @@ -56,3 +61,7 @@ sitemap: priority: 0.5 filename: sitemap.xml enableRobotsTXT: true +outputs: + home: + - html + - llmstxt diff --git a/layouts/index.llmstxt.txt b/layouts/index.llmstxt.txt new file mode 100644 index 0000000000..56ebef3d4b --- /dev/null +++ b/layouts/index.llmstxt.txt @@ -0,0 +1,28 @@ +# {{ .Site.Title }} + +> {{ .Site.Params.description }} + +Important notes: + +- Validated Patterns are built on OpenShift Container Platform (Kubernetes) and leverage GitOps principles using ArgoCD, Red Hat Advanced Cluster Management (RHACM), and Tekton +- They are designed for IT architects, advanced developers, and system administrators familiar with Kubernetes and OpenShift +- Patterns can be deployed using either the OpenShift-based Validated Patterns framework or the Ansible GitOps Framework (AGOF) +- All patterns require an available OpenShift 4.12+ cluster with cluster-admin privileges, at least 8 CPU cores, 16GB RAM, and dynamic storage provisioning +- Each pattern includes values files (values-global.yaml, values-hub.yaml) for customization and secrets management without committing sensitive data to git repositories +- Markdown versions of all pages are available by appending index.md to any page URL. For example: https://validatedpatterns.io/learn/quickstart/index.md +- Full documentation in a single file: https://validatedpatterns.io/llms-full.txt + +## Learn +{{ range where .Site.RegularPages "Section" "learn" }} +- [{{ .Title }}]({{ .Permalink }}): {{ with .Params.summary }}{{ . }}{{ else }}{{ .Summary | plainify | truncate 150 }}{{ end }} +{{- end }} + +## Patterns +{{ range .Site.Sections }}{{ if eq .Section "patterns" }}{{ range .Sections }} +- [{{ .Title }}]({{ .Permalink }}): {{ with .Params.summary }}{{ . }}{{ else }}{{ .Summary | plainify | truncate 150 }}{{ end }} +{{- end }}{{ end }}{{ end }} + +## Contributing +{{ range where .Site.RegularPages "Section" "contribute" }} +- [{{ .Title }}]({{ .Permalink }}): {{ with .Params.summary }}{{ . }}{{ else }}{{ .Summary | plainify | truncate 150 }}{{ end }} +{{- end }} diff --git a/layouts/partials/page-actions.html b/layouts/partials/page-actions.html index 15b2adddd5..88f9edce3e 100644 --- a/layouts/partials/page-actions.html +++ b/layouts/partials/page-actions.html @@ -1,15 +1,4 @@ -{{- $repo := .Site.Params.github_repo | default "validatedpatterns/docs" -}} -{{- $branch := .Site.Params.github_branch | default "main" -}} -{{- $filePath := "" -}} -{{- $rawUrl := "" -}} -{{- $sourceLabel := "View source" -}} -{{- if .File -}} - {{- $filePath = .File.Path -}} - {{- $rawUrl = printf "https://raw.githubusercontent.com/%s/%s/content/%s" $repo $branch $filePath -}} - {{- if strings.HasSuffix $filePath ".md" -}} - {{- $sourceLabel = "View as Markdown" -}} - {{- end -}} -{{- end -}} +{{- $mdUrl := printf "%sindex.md" .Permalink -}}
- {{- if $rawUrl }}
  • - - {{ $sourceLabel }} + + View as Markdown
  • - {{- end }}
  • - + Open in Claude
  • - + Open in ChatGPT
  • diff --git a/static/js/page-actions.js b/static/js/page-actions.js index dba150d8c4..c81644f2cc 100644 --- a/static/js/page-actions.js +++ b/static/js/page-actions.js @@ -88,11 +88,20 @@ } function copyPageContent(triggerEl) { - var content = document.querySelector(".pf-c-content"); - if (!content) return; - - var text = content.innerText || content.textContent; - copyToClipboard(text, triggerEl); + var mdUrl = window.location.href.replace(/\/?$/, "/") + "index.md"; + fetch(mdUrl) + .then(function (res) { + if (!res.ok) throw new Error("not found"); + return res.text(); + }) + .then(function (text) { + copyToClipboard(text, triggerEl); + }) + .catch(function () { + var content = document.querySelector(".pf-c-content"); + if (!content) return; + copyToClipboard(content.innerText || content.textContent, triggerEl); + }); } function copyToClipboard(text, triggerEl) { diff --git a/static/llms.txt b/static/llms.txt deleted file mode 100644 index ec8e99a2db..0000000000 --- a/static/llms.txt +++ /dev/null @@ -1,60 +0,0 @@ -# Validated Patterns - -> Validated Patterns are GitOps-driven, automated, and rigorously tested application stacks for hybrid cloud deployments on Red Hat OpenShift. They go beyond traditional reference architectures by providing deployable, testable software artifacts with automated deployment that enhance speed, reliability, and consistency across environments. - -Important notes: - -- Validated Patterns are built on OpenShift Container Platform (Kubernetes) and leverage GitOps principles using ArgoCD, Red Hat Advanced Cluster Management (RHACM), and Tekton -- They are designed for IT architects, advanced developers, and system administrators familiar with Kubernetes and OpenShift -- Patterns can be deployed using either the OpenShift-based Validated Patterns framework or the Ansible GitOps Framework (AGOF) -- All patterns require an available OpenShift 4.12+ cluster with cluster-admin privileges, at least 8 CPU cores, 16GB RAM, and dynamic storage provisioning -- Each pattern includes values files (values-global.yaml, values-hub.yaml) for customization and secrets management without committing sensitive data to git repositories - -## Getting Started - -- [Patterns quick start](https://validatedpatterns.io/learn/quickstart/): Streamlined guide to deploying your first validated pattern with step-by-step instructions -- [About Validated Patterns](https://validatedpatterns.io/learn/about-validated-patterns/): Overview of what Validated Patterns are, why they exist, and their advantages over traditional reference architectures -- [Key concepts](https://validatedpatterns.io/learn/keyconcepts/): Core concepts and terminology used throughout the Validated Patterns framework -- [Workflow](https://validatedpatterns.io/learn/workflow/): Understanding the deployment workflow and GitOps processes - -## Core Documentation - -- [Validated Patterns frameworks](https://validatedpatterns.io/learn/validated_patterns_frameworks/): Overview of the OpenShift and Ansible-based frameworks -- [Using the Validated Pattern Operator](https://validatedpatterns.io/learn/using-validated-pattern-operator/): How to install and use the operator for pattern deployment -- [Values files](https://validatedpatterns.io/learn/values-files/): Understanding and customizing values files for pattern configuration -- [Secrets management](https://validatedpatterns.io/learn/secrets-management-in-the-Validated-Patterns-framework/): How secrets are managed securely in the framework -- [ClusterGroup in values files](https://validatedpatterns.io/learn/clustergroup-in-values-files/): Understanding ClusterGroup configuration for multi-cluster deployments -- [Importing a cluster](https://validatedpatterns.io/learn/importing-a-cluster/): How to import edge or managed clusters into the hub cluster - -## Pattern Examples - -- [Multicloud GitOps](https://validatedpatterns.io/patterns/multicloud-gitops/): Foundational pattern demonstrating GitOps across multiple clusters - recommended starting point -- [Industrial Edge](https://validatedpatterns.io/patterns/industrial-edge/): Edge computing pattern for manufacturing and industrial use cases -- [Medical Diagnosis](https://validatedpatterns.io/patterns/medical-diagnosis/): AI/ML pattern for medical imaging and diagnosis with Intel optimizations -- [Retail](https://validatedpatterns.io/patterns/retail/): Retail industry pattern with edge deployments for store locations -- [Ansible Edge GitOps](https://validatedpatterns.io/patterns/ansible-edge-gitops/): Pattern combining Ansible Automation Platform with GitOps at the edge -- [DevSecOps](https://validatedpatterns.io/patterns/devsecops/): Secure software supply chain and development pipeline pattern -- [RAG LLM GitOps](https://validatedpatterns.io/patterns/rag-llm-gitops/): Retrieval-Augmented Generation pattern for Large Language Models -- [OpenShift AI](https://validatedpatterns.io/patterns/openshift-ai/): Pattern for deploying and managing AI/ML workloads on OpenShift - -## Advanced Topics - -- [Pattern structure](https://validatedpatterns.io/learn/vp_structure_vp_pattern/): Deep dive into the structure and components of a validated pattern -- [Ansible GitOps Framework (AGOF)](https://validatedpatterns.io/learn/vp_agof/): Using Ansible for GitOps-based deployments -- [OpenShift framework](https://validatedpatterns.io/learn/vp_openshift_framework/): Details on the OpenShift-based framework -- [Infrastructure](https://validatedpatterns.io/learn/infrastructure/): Infrastructure considerations for pattern deployments -- [Cluster sizing](https://validatedpatterns.io/learn/ocp-cluster-general-sizing/): General guidance on OpenShift cluster sizing for patterns - -## Contributing - -- [Contribute to Validated Patterns](https://validatedpatterns.io/contribute/): How to contribute to the project -- [Documentation guidelines](https://validatedpatterns.io/contribute/documentation-guidelines/): Guidelines for contributing to documentation -- [Preview documentation locally](https://validatedpatterns.io/contribute/contribute-to-docs/): How to build and preview the documentation site - -## Optional - -- [FAQ](https://validatedpatterns.io/learn/faq/): Frequently asked questions about Validated Patterns -- [About pattern tiers and types](https://validatedpatterns.io/learn/about-pattern-tiers-types/): Understanding different pattern maturity levels and classifications -- [HashiCorp Vault](https://validatedpatterns.io/learn/vault/): Using Vault for secrets management in patterns -- [All available patterns](https://validatedpatterns.io/patterns/): Browse all 30+ patterns by industry, product, and use case - diff --git a/utils/generate-md.js b/utils/generate-md.js new file mode 100644 index 0000000000..1c138d6822 --- /dev/null +++ b/utils/generate-md.js @@ -0,0 +1,268 @@ +#!/usr/bin/env node +// Generates Markdown versions of all content pages and places them +// in public/ alongside the HTML output (as index.md). +// +// - AsciiDoc files: reduced (includes resolved) then converted via downdoc +// - Markdown files: raw content copied +// - Frontmatter is stripped and replaced with a clean title + llms.txt link +// - Sibling page links are added at the bottom +// +// Usage: node utils/generate-md.js + +const fs = require('fs') +const path = require('path') + +const Asciidoctor = require('asciidoctor')() +const reducer = require('@asciidoctor/reducer') +const downdoc = require('downdoc') + +const PROJECT_DIR = path.join(__dirname, '..') +const CONTENT_DIR = path.join(PROJECT_DIR, 'content') +const PUBLIC_DIR = path.join(PROJECT_DIR, 'public') +const SITE_URL = 'https://validatedpatterns.io' + +// Sections to skip (no useful content for LLMs) +const SKIP_SECTIONS = ['search', 'ci'] + +function findContentFiles (dir, files = []) { + for (const entry of fs.readdirSync(dir, { withFileTypes: true })) { + const fullPath = path.join(dir, entry.name) + if (entry.isDirectory()) { + findContentFiles(fullPath, files) + } else if (entry.name.endsWith('.adoc') || entry.name.endsWith('.md')) { + files.push(fullPath) + } + } + return files +} + +function getOutputPath (contentPath) { + const rel = path.relative(CONTENT_DIR, contentPath) + const parts = rel.split(path.sep) + + // Skip excluded sections + if (SKIP_SECTIONS.includes(parts[0])) return null + + const basename = path.basename(rel) + const dir = path.dirname(rel) + + if (basename.startsWith('_index.')) { + return path.join(PUBLIC_DIR, dir, 'index.md') + } else { + const slug = basename.replace(/\.(adoc|md)$/, '') + return path.join(PUBLIC_DIR, dir, slug, 'index.md') + } +} + +function getPageUrl (contentPath) { + const rel = path.relative(CONTENT_DIR, contentPath) + const basename = path.basename(rel) + const dir = path.dirname(rel) + + if (basename.startsWith('_index.')) { + return `${SITE_URL}/${dir}/` + } else { + const slug = basename.replace(/\.(adoc|md)$/, '') + return `${SITE_URL}/${dir}/${slug}/` + } +} + +function parseFrontmatter (content) { + const match = content.match(/^---\n([\s\S]*?)\n---\n?/) + if (!match) return { frontmatter: {}, body: content } + + const fm = {} + for (const line of match[1].split('\n')) { + const m = line.match(/^(\w[\w_-]*):\s*(.+)/) + if (m) { + let val = m[2].trim() + // Strip surrounding quotes + if ((val.startsWith("'") && val.endsWith("'")) || + (val.startsWith('"') && val.endsWith('"'))) { + val = val.slice(1, -1) + } + fm[m[1]] = val + } + } + + return { frontmatter: fm, body: content.slice(match[0].length) } +} + +function convertAsciidoc (filePath) { + const registry = Asciidoctor.Extensions.create() + reducer.register(registry) + + const doc = Asciidoctor.loadFile(filePath, { + extension_registry: registry, + safe: 'unsafe', + base_dir: PROJECT_DIR + }) + + const reducedSource = doc.getSource() + return downdoc(reducedSource) +} + +function fixImagePaths (markdown) { + return markdown.replace(/\([^)]*\/images\/(\/images\/[^)]+)\)/g, + (_, imgPath) => `(${SITE_URL}${imgPath})`) +} + +function getSiblings (contentPath) { + const dir = path.dirname(contentPath) + const basename = path.basename(contentPath) + const siblings = [] + + try { + for (const entry of fs.readdirSync(dir, { withFileTypes: true })) { + if (!entry.isFile()) continue + if (entry.name === basename) continue + if (!entry.name.endsWith('.adoc') && !entry.name.endsWith('.md')) continue + if (entry.name.startsWith('_index.')) continue + + const siblingPath = path.join(dir, entry.name) + const sibContent = fs.readFileSync(siblingPath, 'utf8') + const { frontmatter } = parseFrontmatter(sibContent) + const title = frontmatter.title || entry.name.replace(/\.(adoc|md)$/, '') + const url = getPageUrl(siblingPath) + siblings.push({ title, url }) + } + } catch (_) { + // Directory read failed, return empty + } + + return siblings +} + +function buildPage (contentPath, body, title, summary) { + const pageUrl = getPageUrl(contentPath) + const lines = [] + + // Header + lines.push(`# ${title}`) + lines.push('') + if (summary) { + lines.push(`> ${summary}`) + lines.push('') + } + lines.push(`> This page is part of the [Validated Patterns](${SITE_URL}) documentation.`) + lines.push(`> For a complete documentation index, see [llms.txt](${SITE_URL}/llms.txt)`) + lines.push(`> HTML version: ${pageUrl}`) + lines.push('') + + // Body + lines.push(body.trim()) + lines.push('') + + // Sibling pages + const siblings = getSiblings(contentPath) + if (siblings.length > 0) { + lines.push('---') + lines.push('') + lines.push('## Related pages') + lines.push('') + for (const s of siblings) { + lines.push(`- [${s.title}](${s.url}index.md)`) + } + lines.push('') + } + + return lines.join('\n') +} + +function processFile (filePath) { + let rawContent + if (filePath.endsWith('.adoc')) { + rawContent = fixImagePaths(convertAsciidoc(filePath)) + } else { + rawContent = fs.readFileSync(filePath, 'utf8') + } + + const { frontmatter, body } = parseFrontmatter(rawContent) + const title = frontmatter.title || path.basename(filePath).replace(/\.(adoc|md)$/, '') + const summary = frontmatter.summary || '' + + return buildPage(filePath, body, title, summary) +} + +function main () { + const files = findContentFiles(CONTENT_DIR) + let converted = 0 + let skipped = 0 + let errors = 0 + + for (const filePath of files) { + const outputPath = getOutputPath(filePath) + if (!outputPath) { + skipped++ + continue + } + + const outputDir = path.dirname(outputPath) + if (!fs.existsSync(outputDir)) { + skipped++ + continue + } + + try { + const markdown = processFile(filePath) + fs.writeFileSync(outputPath, markdown, 'utf8') + converted++ + } catch (err) { + console.error(`Error converting ${path.relative(CONTENT_DIR, filePath)}: ${err.message}`) + errors++ + } + } + + console.log(`Generated ${converted} markdown files (${skipped} skipped, ${errors} errors)`) + + // Generate llms-full.txt by concatenating all generated markdown files + generateLlmsFullTxt() +} + +function generateLlmsFullTxt () { + const sections = ['learn', 'patterns', 'contribute'] + const parts = [] + + parts.push(`# Validated Patterns — Full Documentation`) + parts.push('') + parts.push(`> This file contains the complete documentation for Validated Patterns.`) + parts.push(`> For a page index, see ${SITE_URL}/llms.txt`) + parts.push(`> For individual pages in markdown, append index.md to any page URL.`) + parts.push('') + + for (const section of sections) { + const sectionDir = path.join(PUBLIC_DIR, section) + if (!fs.existsSync(sectionDir)) continue + + const mdFiles = [] + findMdFiles(sectionDir, mdFiles) + mdFiles.sort() + + for (const mdFile of mdFiles) { + const content = fs.readFileSync(mdFile, 'utf8') + parts.push(content.trim()) + parts.push('') + parts.push('---') + parts.push('') + } + } + + const fullPath = path.join(PUBLIC_DIR, 'llms-full.txt') + fs.writeFileSync(fullPath, parts.join('\n'), 'utf8') + + const sizeMB = (fs.statSync(fullPath).size / 1024 / 1024).toFixed(1) + console.log(`Generated llms-full.txt (${sizeMB} MB)`) +} + +function findMdFiles (dir, files) { + for (const entry of fs.readdirSync(dir, { withFileTypes: true })) { + const fullPath = path.join(dir, entry.name) + if (entry.isDirectory()) { + findMdFiles(fullPath, files) + } else if (entry.name === 'index.md') { + files.push(fullPath) + } + } +} + +main() From b65d6732357ae202b5dd537294339bab8a70348e Mon Sep 17 00:00:00 2001 From: Gaurav Nelson <23069445+gaurav-nelson@users.noreply.github.com> Date: Tue, 28 Apr 2026 17:27:09 +1000 Subject: [PATCH 3/4] chore: Add Node.js setup and markdown generator dependencies to GitHub Actions workflow --- .github/workflows/gh-pages.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.github/workflows/gh-pages.yml b/.github/workflows/gh-pages.yml index c0821349ab..911bf78a14 100644 --- a/.github/workflows/gh-pages.yml +++ b/.github/workflows/gh-pages.yml @@ -26,9 +26,18 @@ jobs: hugo-version: '0.133.1' extended: true + - name: Setup Node.js + uses: actions/setup-node@v6 + with: + node-version: '22' + + - name: Install markdown generator dependencies + run: npm install asciidoctor @asciidoctor/reducer downdoc + - name: Build run: | hugo --minify + node utils/generate-md.js - name: Deploy uses: peaceiris/actions-gh-pages@v4 From 75203c4c74d07bffb9ca5e8ada25c0eb4703b010 Mon Sep 17 00:00:00 2001 From: Gaurav Nelson <23069445+gaurav-nelson@users.noreply.github.com> Date: Fri, 1 May 2026 12:12:07 +1000 Subject: [PATCH 4/4] fix: Add missing newline at end of file in search list layout --- layouts/search/list.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/layouts/search/list.html b/layouts/search/list.html index bed9e6ca97..a47025b48d 100644 --- a/layouts/search/list.html +++ b/layouts/search/list.html @@ -21,4 +21,4 @@

    {{ partial "footer.html" . }} -{{ end }} \ No newline at end of file +{{ end }}