Files
sunvpy-docs/build.py
2026-04-10 13:47:53 +08:00

180 lines
5.5 KiB
Python

"""
build.py - Generate static documentation site from sunvpy wiki markdown files.
Usage:
python build.py [--version VERSION_ID] [--output DIR]
"""
import json
import os
import re
import shutil
import sys
from pathlib import Path
BASE_DIR = Path(__file__).parent
WIKI_DIR = BASE_DIR / "wiki"
VERSIONS_DIR = WIKI_DIR / "versions"
def get_latest_version():
"""Auto-detect latest version by sorting version directory names."""
versions = sorted(VERSIONS_DIR.iterdir())
if not versions:
print("Error: No versions found in wiki/versions/", file=sys.stderr)
sys.exit(1)
return versions[-1].name
def load_wiki_manifest(version_dir: Path) -> dict:
"""Load wiki.json manifest."""
manifest_path = version_dir / "wiki.json"
if not manifest_path.exists():
print(f"Error: wiki.json not found at {manifest_path}", file=sys.stderr)
sys.exit(1)
with open(manifest_path, "r", encoding="utf-8") as f:
return json.load(f)
def build_nav(pages: list) -> dict:
"""Build navigation tree from wiki.json pages, grouped by section -> group."""
sections_ordered = []
sections_map = {}
for page in pages:
section_title = page["section"]
group_title = page.get("group", "")
if section_title not in sections_map:
section_entry = {"id": section_title, "title": section_title}
if group_title:
section_entry["groups"] = []
else:
section_entry["pages"] = []
sections_map[section_title] = section_entry
sections_ordered.append(section_title)
section = sections_map[section_title]
page_entry = {
"slug": page["slug"],
"title": page["title"],
"file": page["file"],
"level": page["level"],
}
if group_title:
# Find or create the group within this section
found = None
for g in section["groups"]:
if g["title"] == group_title:
found = g
break
if found is None:
found = {"title": group_title, "pages": []}
section["groups"].append(found)
found["pages"].append(page_entry)
else:
section["pages"].append(page_entry)
nav = {"sections": [sections_map[s] for s in sections_ordered]}
return nav
def build_search_index(pages: list, version_dir: Path) -> list:
"""Build search index from markdown files."""
index = []
heading_re = re.compile(r"^#{2,3}\s+(.+)$", re.MULTILINE)
sources_re = re.compile(r"^Sources:.*$", re.MULTILINE)
for page in pages:
md_path = version_dir / page["file"]
if not md_path.exists():
print(f" Warning: {page['file']} not found, skipping", file=sys.stderr)
continue
with open(md_path, "r", encoding="utf-8") as f:
content = f.read()
headings = [m.group(1).strip() for m in heading_re.finditer(content)]
cleaned = sources_re.sub("", content).strip()
# Truncate to control index size
if len(cleaned) > 2000:
cleaned = cleaned[:2000]
index.append({
"slug": page["slug"],
"title": page["title"],
"section": page["section"],
"headings": headings,
"content": cleaned,
})
return index
def copy_content(pages: list, version_dir: Path, output_dir: Path):
"""Copy markdown files to output/content/."""
content_dir = output_dir / "content"
content_dir.mkdir(parents=True, exist_ok=True)
for page in pages:
src = version_dir / page["file"]
dst = content_dir / page["file"]
if src.exists():
shutil.copy2(src, dst)
print(f" Copied {len(pages)} markdown files")
def write_json(data, path: Path):
"""Write JSON with pretty formatting."""
path.parent.mkdir(parents=True, exist_ok=True)
with open(path, "w", encoding="utf-8") as f:
json.dump(data, f, ensure_ascii=False, indent=2)
def main():
import argparse
parser = argparse.ArgumentParser(description="Build sunvpy docs site")
parser.add_argument("--version", default=None, help="Version ID (default: latest)")
parser.add_argument("--output", default=None, help="Output directory (default: .zread/docs/)")
args = parser.parse_args()
version_id = args.version or get_latest_version()
version_dir = VERSIONS_DIR / version_id
output_dir = Path(args.output) if args.output else BASE_DIR / "docs"
print(f"Building docs from version: {version_id}")
print(f"Source: {version_dir}")
print(f"Output: {output_dir}")
# Clean output directory (only data and content, not css/js/html)
for subdir in ["data", "content"]:
d = output_dir / subdir
if d.exists():
shutil.rmtree(d)
# Load manifest
manifest = load_wiki_manifest(version_dir)
pages = manifest["pages"]
print(f"Found {len(pages)} pages")
# Build nav.json
nav = build_nav(pages)
write_json(nav, output_dir / "data" / "nav.json")
print(" Generated data/nav.json")
# Build search index
search_index = build_search_index(pages, version_dir)
write_json(search_index, output_dir / "data" / "search-index.json")
print(f" Generated data/search-index.json ({len(search_index)} entries)")
# Copy markdown content
copy_content(pages, version_dir, output_dir)
print("Build complete!")
if __name__ == "__main__":
main()