Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .github/workflows/squad-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,9 @@ jobs:
- name: Build
run: npm run build

- name: Build docs search index
run: cd docs && npm ci && npm run build:search && mkdir -p dist && cp public/search-index.json dist/search-index.json

- name: Run tests
run: npm test

Expand Down
2 changes: 2 additions & 0 deletions docs/.gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
node_modules/
dist/
.astro/
public/search-index.json
public/pagefind/
4 changes: 3 additions & 1 deletion docs/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@
"private": true,
"scripts": {
"dev": "astro dev",
"build": "astro build && npx pagefind --site dist",
"build:search": "node scripts/build-search-index.mjs",
"build:pagefind": "npm run build && node -e \"require('fs').cpSync('dist/pagefind','public/pagefind',{recursive:true,force:true});console.log('Pagefind index copied to public/pagefind — dev search ready')\"",
"build": "node scripts/build-search-index.mjs && astro build && npx pagefind --site dist",
"preview": "astro preview",
"astro": "astro",
"test": "node --test tests/build-output.test.mjs && npx playwright test",
Expand Down
8 changes: 7 additions & 1 deletion docs/playwright.config.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,14 @@ export default defineConfig({
timeout: 30_000,
retries: 0,
use: {
baseURL: 'http://localhost:4321/squad/',
baseURL: 'http://localhost:4322/squad/',
browserName: 'chromium',
headless: true,
},
webServer: {
command: 'npm run build && npx astro preview --port 4322',
port: 4322,
timeout: 120_000,
reuseExistingServer: !process.env.CI,
},
});
153 changes: 153 additions & 0 deletions docs/scripts/build-search-index.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,153 @@
#!/usr/bin/env node
/**
* build-search-index.mjs
* Reads all .md files from docs/src/content/docs/, chunks by ## headings,
* and outputs a static search-index.json for client-side TF-IDF search.
*/

import { readdir, readFile, writeFile, mkdir } from 'node:fs/promises';
import { join, relative, dirname, sep } from 'node:path';
import { fileURLToPath } from 'node:url';

const __dirname = dirname(fileURLToPath(import.meta.url));
const DOCS_ROOT = join(__dirname, '..', 'src', 'content', 'docs');
const OUTPUT_DIR = join(__dirname, '..', 'public');
const OUTPUT_FILE = join(OUTPUT_DIR, 'search-index.json');

// Section display names derived from directory
const SECTION_NAMES = {
'get-started': 'Get Started',
guide: 'Guide',
features: 'Features',
reference: 'Reference',
scenarios: 'Scenarios',
concepts: 'Concepts',
cookbook: 'Cookbook',
};

async function collectMdFiles(dir) {
const entries = await readdir(dir, { withFileTypes: true });
const files = [];
for (const entry of entries) {
const full = join(dir, entry.name);
if (entry.isDirectory()) {
files.push(...(await collectMdFiles(full)));
} else if (entry.name.endsWith('.md')) {
files.push(full);
}
}
return files;
}

function stripFrontmatter(content) {
const match = content.match(/^---\r?\n[\s\S]*?\r?\n---\r?\n/);
return match ? content.slice(match[0].length) : content;
}

function extractTitle(content) {
const match = content.match(/^#\s+(.+)$/m);
return match ? match[1].trim() : 'Untitled';
}

function deriveSlug(filePath) {
let rel = relative(DOCS_ROOT, filePath)
.replace(/\\/g, '/')
.replace(/\.md$/, '');
if (rel.endsWith('/index')) rel = rel.replace(/\/index$/, '');
return rel;
}

function deriveSection(slug) {
const first = slug.split('/')[0];
return SECTION_NAMES[first] || first.charAt(0).toUpperCase() + first.slice(1);
}

function stripMarkdown(text) {
return text
.replace(/!\[.*?\]\(.*?\)/g, '') // images
.replace(/\[([^\]]*)\]\(.*?\)/g, '$1') // links → text
.replace(/(`{1,3})[\s\S]*?\1/g, '') // inline/fenced code
.replace(/^>\s?/gm, '') // blockquotes
.replace(/[*_~]{1,3}/g, '') // bold/italic/strikethrough
.replace(/^[-*+]\s/gm, '') // unordered list markers
.replace(/^\d+\.\s/gm, '') // ordered list markers
.replace(/\|/g, ' ') // table pipes
.replace(/^-{3,}$/gm, '') // horizontal rules
.replace(/<[^>]+>/g, '') // HTML tags
.replace(/\n{2,}/g, '\n') // collapse blank lines
.trim();
}

function chunkByHeadings(content, pageTitle, slug) {
const body = stripFrontmatter(content);
const section = deriveSection(slug);
const lines = body.split('\n');
const chunks = [];
let currentHeading = pageTitle;
let buffer = [];

function flush() {
const raw = buffer.join('\n').trim();
if (!raw) return;
const text = stripMarkdown(raw);
if (text.length < 20) return; // skip tiny chunks
chunks.push({
title: pageTitle,
slug,
section,
heading: currentHeading,
text,
});
}

for (const line of lines) {
const headingMatch = line.match(/^#{2,3}\s+(.+)/);
if (headingMatch) {
flush();
currentHeading = headingMatch[1].trim();
buffer = [];
} else {
buffer.push(line);
}
}
flush();

// If no chunks were produced, add the whole page as one chunk
if (chunks.length === 0) {
const text = stripMarkdown(body);
if (text.length >= 20) {
chunks.push({ title: pageTitle, slug, section, heading: pageTitle, text });
}
}

return chunks;
}

async function main() {
console.log('Building search index...');
const files = await collectMdFiles(DOCS_ROOT);
console.log(`Found ${files.length} markdown files`);

const allChunks = [];

for (const file of files) {
const content = await readFile(file, 'utf-8');
const title = extractTitle(content);
const slug = deriveSlug(file);
const chunks = chunkByHeadings(content, title, slug);
allChunks.push(...chunks);
}

await mkdir(OUTPUT_DIR, { recursive: true });
const json = JSON.stringify(allChunks);
await writeFile(OUTPUT_FILE, json, 'utf-8');

const sizeKB = (Buffer.byteLength(json) / 1024).toFixed(1);
console.log(`✓ ${allChunks.length} chunks from ${files.length} files`);
console.log(`✓ Output: search-index.json (${sizeKB} KB)`);
}

main().catch((err) => {
console.error('Build search index failed:', err);
process.exit(1);
});
Loading
Loading