diff --git a/.github/workflows/architecture.yml b/.github/workflows/architecture.yml deleted file mode 100644 index 5e6cae3..0000000 --- a/.github/workflows/architecture.yml +++ /dev/null @@ -1,48 +0,0 @@ -name: Architecture - -on: - pull_request: - paths: - - 'internal/**' - - 'cmd/**' - - 'main.go' - - 'scripts/check-architecture/**' - push: - branches: [main] - paths: - - 'internal/**' - - 'cmd/**' - - 'main.go' - -jobs: - vertical-slice: - name: vertical slice check - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v6 - with: - fetch-depth: 0 - - - uses: actions/setup-go@v6 - with: - go-version-file: go.mod - cache: true - - - name: Check vertical slice architecture - env: - SUPERMODEL_API_KEY: ${{ secrets.SUPERMODEL_API_KEY }} - run: | - if [ -z "$SUPERMODEL_API_KEY" ]; then - echo "::warning::SUPERMODEL_API_KEY not set — skipping architecture check (fork PR?)" - exit 0 - fi - output=$(go run ./scripts/check-architecture 2>&1) - exit_code=$? - echo "$output" - if [ $exit_code -ne 0 ]; then - if echo "$output" | grep -q "HTTP 4"; then - echo "::warning::Architecture check skipped — API returned an error (transient or key issue)" - exit 0 - fi - exit $exit_code - fi diff --git a/.golangci.yaml b/.golangci.yaml index b5a8f01..4db62d0 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -40,6 +40,9 @@ linters: - G122 # filepath.Walk TOCTOU — acceptable for repo archiving - G117 # secret field in marshaled struct — intentional (config file) - G104 # unhandled errors — covered by errcheck with targeted exclusions + - G301 # directory permissions 0755 — intentional for static site output + - G306 # file write permissions 0644 — intentional for static site output + - G703 # path traversal — output paths come from user-supplied CLI flags exclusions: rules: # Deferred and best-effort closes are idiomatic Go — errors not actionable. @@ -48,9 +51,13 @@ linters: # fmt.Fprintf/Fprintln to stdout/stderr — write errors are not actionable. - text: 'Error return value of `fmt\.Fp?rint' linters: [errcheck] - # Best-effort temp file cleanup. - - text: 'Error return value of `os\.Remove` is not checked' + # Best-effort temp file/dir cleanup. + - text: 'Error return value of `os\.Remove(All)?` is not checked' linters: [errcheck] + # Ported packages from supermodeltools/arch-docs — style/complexity issues are + # acceptable in vendored/ported code; fixing them would cause needless upstream divergence. + - path: 'internal/archdocs/(graph2md|pssg)/' + linters: [gocritic, gocyclo, gosec, revive, ineffassign, staticcheck] # Test files get more latitude. - path: _test\.go linters: [errcheck, gosec, gocritic] diff --git a/cmd/archdocs.go b/cmd/archdocs.go new file mode 100644 index 0000000..3e893e9 --- /dev/null +++ b/cmd/archdocs.go @@ -0,0 +1,57 @@ +package cmd + +import ( + "github.com/spf13/cobra" + + "github.com/supermodeltools/cli/internal/archdocs" + "github.com/supermodeltools/cli/internal/config" +) + +func init() { + var opts archdocs.Options + + c := &cobra.Command{ + Use: "arch-docs [path]", + Short: "Generate static architecture documentation for a repository", + Long: `Generate a static HTML site documenting the architecture of a codebase. + +The command uploads the repository to the Supermodel API, converts the +returned code graph to markdown, and builds a browsable static site with +search, dependency graphs, taxonomy navigation, and SEO metadata. + +The output directory can be served locally or deployed to any static host +(GitHub Pages, Vercel, Netlify, Cloudflare Pages, etc.). + +Examples: + supermodel arch-docs + supermodel arch-docs ./my-project --output ./docs-site + supermodel arch-docs --repo owner/repo --base-url https://owner.github.io/repo + supermodel arch-docs --site-name "My App Docs" --output /var/www/html`, + Args: cobra.MaximumNArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + cfg, err := config.Load() + if err != nil { + return err + } + if err := cfg.RequireAPIKey(); err != nil { + return err + } + dir := "." + if len(args) > 0 { + dir = args[0] + } + return archdocs.Run(cmd.Context(), cfg, dir, opts) + }, + } + + c.Flags().StringVar(&opts.SiteName, "site-name", "", "display title for the generated site (default: \" Architecture Docs\")") + c.Flags().StringVar(&opts.BaseURL, "base-url", "", "canonical base URL where the site will be hosted (default: https://example.com)") + c.Flags().StringVar(&opts.Repo, "repo", "", "GitHub repo slug owner/repo used to build source links") + c.Flags().StringVarP(&opts.Output, "output", "o", "", "output directory for the generated site (default: ./arch-docs-output)") + c.Flags().StringVar(&opts.TemplatesDir, "templates-dir", "", "override bundled HTML/CSS/JS templates with a custom directory") + c.Flags().IntVar(&opts.MaxSourceFiles, "max-source-files", 3000, "maximum source files to include in analysis (0 = unlimited)") + c.Flags().IntVar(&opts.MaxEntities, "max-entities", 12000, "maximum entity pages to generate (0 = unlimited)") + c.Flags().BoolVar(&opts.Force, "force", false, "bypass cache and re-upload even if a cached result exists") + + rootCmd.AddCommand(c) +} diff --git a/internal/api/client.go b/internal/api/client.go index cb96a01..6cda841 100644 --- a/internal/api/client.go +++ b/internal/api/client.go @@ -50,6 +50,17 @@ func (c *Client) Analyze(ctx context.Context, zipPath, idempotencyKey string) (* return &result.Graph, nil } +// AnalyzeRaw uploads a repository ZIP and runs the full analysis pipeline, +// returning the raw result JSON from the completed job. Use this when you need +// the full response payload (e.g. for graph2md / arch-docs generation). +func (c *Client) AnalyzeRaw(ctx context.Context, zipPath, idempotencyKey string) (json.RawMessage, error) { + job, err := c.pollUntilComplete(ctx, zipPath, idempotencyKey) + if err != nil { + return nil, err + } + return job.Result, nil +} + // AnalyzeDomains uploads a repository ZIP and runs the full analysis pipeline, // returning the complete SupermodelIR response (domains, summary, metadata, graph). // Use this instead of Analyze when you need high-level domain information. diff --git a/internal/archdocs/graph2md/graph2md.go b/internal/archdocs/graph2md/graph2md.go new file mode 100644 index 0000000..9c041c8 --- /dev/null +++ b/internal/archdocs/graph2md/graph2md.go @@ -0,0 +1,2248 @@ +package graph2md + +import ( + "encoding/json" + "fmt" + "html" + "log" + "os" + "path/filepath" + "regexp" + "sort" + "strings" +) + +var nonAlnum = regexp.MustCompile(`[^a-z0-9]+`) + +// toSlug converts a string to a URL-safe slug. +func toSlug(s string) string { + s = strings.ToLower(s) + s = nonAlnum.ReplaceAllString(s, "-") + s = strings.Trim(s, "-") + return s +} + +// Graph JSON structures matching Supermodel API response + +type APIResponse struct { + Status string `json:"status"` + JobID string `json:"jobId"` + Error json.RawMessage `json:"error"` + Result *GraphResult `json:"result"` +} + +type GraphResult struct { + GeneratedAt string `json:"generatedAt"` + Message string `json:"message"` + Stats GraphStats `json:"stats"` + Graph Graph `json:"graph"` + Metadata json.RawMessage `json:"metadata"` + Domains []DomainSummary `json:"domains"` + Artifacts []Artifact `json:"artifacts"` +} + +type GraphStats struct { + NodeCount int `json:"nodeCount"` + RelationshipCount int `json:"relationshipCount"` + NodeTypes map[string]int `json:"nodeTypes"` + RelationshipTypes map[string]int `json:"relationshipTypes"` +} + +type Graph struct { + Nodes []Node `json:"nodes"` + Relationships []Relationship `json:"relationships"` +} + +type Node struct { + ID string `json:"id"` + Labels []string `json:"labels"` + Properties map[string]interface{} `json:"properties"` +} + +type Relationship struct { + ID string `json:"id"` + Type string `json:"type"` + StartNode string `json:"startNode"` + EndNode string `json:"endNode"` + Properties map[string]interface{} `json:"properties"` +} + +type DomainSummary struct { + Name string `json:"name"` + Subdomains []SubdomainSummary `json:"subdomains"` + Files []string `json:"files"` +} + +type SubdomainSummary struct { + Name string `json:"name"` + DescriptionSummary string `json:"descriptionSummary"` + Files []string `json:"files"` + Functions []string `json:"functions"` + Classes []string `json:"classes"` +} + +type Artifact struct { + Type string `json:"type"` + Name string `json:"name"` + Graph *Graph `json:"graph"` + Stats json.RawMessage `json:"stats"` +} + +// Run executes graph2md conversion. inputFiles is a comma-separated list of +// paths to graph JSON files. outputDir is the directory for markdown output. +func Run(inputFiles, outputDir, repoName, repoURL string, maxEntities int) error { + if inputFiles == "" { + return fmt.Errorf("input is required (comma-separated paths to graph JSON files)") + } + + if err := os.MkdirAll(outputDir, 0755); err != nil { + return fmt.Errorf("creating output dir: %w", err) + } + + // Load and merge all graphs + var allNodes []Node + var allRels []Relationship + nodeMap := make(map[string]bool) + + for _, path := range strings.Split(inputFiles, ",") { + path = strings.TrimSpace(path) + if path == "" { + continue + } + log.Printf("Loading graph from %s...", path) + nodes, rels, err := loadGraph(path) + if err != nil { + log.Printf("Warning: failed to load %s: %v", path, err) + continue + } + for _, n := range nodes { + if !nodeMap[n.ID] { + nodeMap[n.ID] = true + allNodes = append(allNodes, n) + } + } + allRels = append(allRels, rels...) + log.Printf(" Loaded %d nodes, %d relationships", len(nodes), len(rels)) + } + + log.Printf("Total: %d unique nodes, %d relationships", len(allNodes), len(allRels)) + + // Build node lookup: id -> node + nodeLookup := make(map[string]*Node) + for i := range allNodes { + nodeLookup[allNodes[i].ID] = &allNodes[i] + } + + // Build relationship indices + imports := make(map[string][]string) + importedBy := make(map[string][]string) + callsRel := make(map[string][]string) + calledByRel := make(map[string][]string) + containsFile := make(map[string][]string) // directory -> files + definesFunc := make(map[string][]string) // file -> functions + declaresClass := make(map[string][]string) // file -> classes + definesType := make(map[string][]string) // file -> types + childDir := make(map[string][]string) // directory -> subdirectories + belongsToDomain := make(map[string]string) // node -> domain name + belongsToSubdomain := make(map[string]string) // node -> subdomain name + partOfDomain := make(map[string]string) // subdomain node ID -> domain name + extendsRel := make(map[string][]string) // class -> parent classes + + // Reverse lookups for "Defined In" + fileOfFunc := make(map[string]string) // function nodeID -> file nodeID + fileOfClass := make(map[string]string) // class nodeID -> file nodeID + fileOfType := make(map[string]string) // type nodeID -> file nodeID + + // Domain/subdomain node lookups by name + domainNodeByName := make(map[string]string) // domain name -> domain node ID + subdomainNodeByName := make(map[string]string) // subdomain name -> subdomain node ID + + // Domain -> subdomain mappings + domainSubdomains := make(map[string][]string) // domain name -> subdomain node IDs + + // Subdomain -> functions/classes + subdomainFuncs := make(map[string][]string) // subdomain name -> function node IDs + subdomainClasses := make(map[string][]string) // subdomain name -> class node IDs + + for _, rel := range allRels { + switch rel.Type { + case "IMPORTS": + imports[rel.StartNode] = append(imports[rel.StartNode], rel.EndNode) + importedBy[rel.EndNode] = append(importedBy[rel.EndNode], rel.StartNode) + case "calls": + callsRel[rel.StartNode] = append(callsRel[rel.StartNode], rel.EndNode) + calledByRel[rel.EndNode] = append(calledByRel[rel.EndNode], rel.StartNode) + case "CONTAINS_FILE": + containsFile[rel.StartNode] = append(containsFile[rel.StartNode], rel.EndNode) + case "DEFINES_FUNCTION": + definesFunc[rel.StartNode] = append(definesFunc[rel.StartNode], rel.EndNode) + fileOfFunc[rel.EndNode] = rel.StartNode + case "DECLARES_CLASS": + declaresClass[rel.StartNode] = append(declaresClass[rel.StartNode], rel.EndNode) + fileOfClass[rel.EndNode] = rel.StartNode + case "DEFINES": + definesType[rel.StartNode] = append(definesType[rel.StartNode], rel.EndNode) + fileOfType[rel.EndNode] = rel.StartNode + case "CHILD_DIRECTORY": + childDir[rel.StartNode] = append(childDir[rel.StartNode], rel.EndNode) + case "EXTENDS": + extendsRel[rel.StartNode] = append(extendsRel[rel.StartNode], rel.EndNode) + case "belongsTo": + endNode := nodeLookup[rel.EndNode] + if endNode == nil { + continue + } + name := getStr(endNode.Properties, "name") + if hasLabel(endNode, "Domain") { + belongsToDomain[rel.StartNode] = name + } else if hasLabel(endNode, "Subdomain") { + belongsToSubdomain[rel.StartNode] = name + } + case "partOf": + endNode := nodeLookup[rel.EndNode] + if endNode != nil { + partOfDomain[rel.StartNode] = getStr(endNode.Properties, "name") + } + } + } + + // Build domain/subdomain node-by-name lookups + for _, node := range allNodes { + if hasLabel(&node, "Domain") { + name := getStr(node.Properties, "name") + if name != "" { + domainNodeByName[name] = node.ID + } + } else if hasLabel(&node, "Subdomain") { + name := getStr(node.Properties, "name") + if name != "" { + subdomainNodeByName[name] = node.ID + } + } + } + + // Build domain -> subdomain mapping from partOf relationships + for subNodeID, domName := range partOfDomain { + domainSubdomains[domName] = append(domainSubdomains[domName], subNodeID) + } + + // Build subdomain -> functions/classes from belongsToSubdomain + for nodeID, subName := range belongsToSubdomain { + n := nodeLookup[nodeID] + if n == nil { + continue + } + if hasLabel(n, "Function") { + subdomainFuncs[subName] = append(subdomainFuncs[subName], nodeID) + } else if hasLabel(n, "Class") { + subdomainClasses[subName] = append(subdomainClasses[subName], nodeID) + } + } + + // Resolve domain for files via belongsTo on their functions/classes + for _, node := range allNodes { + if !hasLabel(&node, "File") { + continue + } + if _, ok := belongsToDomain[node.ID]; ok { + continue + } + for _, fnID := range definesFunc[node.ID] { + if d, ok := belongsToDomain[fnID]; ok { + belongsToDomain[node.ID] = d + break + } + } + if _, ok := belongsToDomain[node.ID]; ok { + continue + } + for _, clsID := range declaresClass[node.ID] { + if d, ok := belongsToDomain[clsID]; ok { + belongsToDomain[node.ID] = d + break + } + for _, fnID := range definesFunc[clsID] { + if d, ok := belongsToDomain[fnID]; ok { + belongsToDomain[node.ID] = d + break + } + } + if _, ok := belongsToDomain[node.ID]; ok { + break + } + } + } + + // Similarly resolve subdomain for files + for _, node := range allNodes { + if !hasLabel(&node, "File") { + continue + } + if _, ok := belongsToSubdomain[node.ID]; ok { + continue + } + for _, fnID := range definesFunc[node.ID] { + if s, ok := belongsToSubdomain[fnID]; ok { + belongsToSubdomain[node.ID] = s + break + } + } + if _, ok := belongsToSubdomain[node.ID]; ok { + continue + } + for _, clsID := range declaresClass[node.ID] { + if s, ok := belongsToSubdomain[clsID]; ok { + belongsToSubdomain[node.ID] = s + break + } + for _, fnID := range definesFunc[clsID] { + if s, ok := belongsToSubdomain[fnID]; ok { + belongsToSubdomain[node.ID] = s + break + } + } + if _, ok := belongsToSubdomain[node.ID]; ok { + break + } + } + } + + // Propagate domain from subdomain's partOf for any node that has a + // subdomain but no direct domain assignment. + for nodeID, subName := range belongsToSubdomain { + if _, ok := belongsToDomain[nodeID]; ok { + continue + } + subNodeID, ok := subdomainNodeByName[subName] + if !ok { + continue + } + if domName, ok := partOfDomain[subNodeID]; ok && domName != "" { + belongsToDomain[nodeID] = domName + } + } + + // Collect all domain members for Domain/Subdomain body sections + domainFiles := make(map[string][]string) // domain name -> file node IDs + subdomainFiles := make(map[string][]string) // subdomain name -> file node IDs + for nodeID, domName := range belongsToDomain { + n := nodeLookup[nodeID] + if n != nil && hasLabel(n, "File") { + domainFiles[domName] = append(domainFiles[domName], nodeID) + } + } + for nodeID, subName := range belongsToSubdomain { + n := nodeLookup[nodeID] + if n != nil && hasLabel(n, "File") { + subdomainFiles[subName] = append(subdomainFiles[subName], nodeID) + } + } + + // Which node types to generate pages for + generateLabels := map[string]bool{ + "File": true, "Function": true, "Class": true, "Type": true, + "Domain": true, "Subdomain": true, "Directory": true, + } + + // --- Pass 1: Generate all slugs and build nodeID -> slug lookup --- + slugLookup := make(map[string]string) + usedSlugs := make(map[string]int) + + type nodeEntry struct { + node Node + label string + slug string + } + var entries []nodeEntry + + for _, node := range allNodes { + if len(node.Labels) == 0 { + continue + } + primaryLabel := node.Labels[0] + if !generateLabels[primaryLabel] { + continue + } + + slug := generateSlug(node, primaryLabel) + if slug == "" { + continue + } + + // Handle slug collisions + if n, ok := usedSlugs[slug]; ok { + usedSlugs[slug] = n + 1 + slug = fmt.Sprintf("%s-%d", slug, n+1) + } else { + usedSlugs[slug] = 1 + } + + slugLookup[node.ID] = slug + entries = append(entries, nodeEntry{node: node, label: primaryLabel, slug: slug}) + } + + log.Printf("Pass 1 complete: %d slugs generated", len(entries)) + + // Cap entities by priority + connectivity to stay within CF Pages 20k file limit. + if maxEntities > 0 && len(entries) > maxEntities { + // Score each node by relationship degree (higher = more architecturally central) + degree := make(map[string]int) + for _, rel := range allRels { + degree[rel.StartNode]++ + degree[rel.EndNode]++ + } + // Label priority: structural first, then files, then symbols + labelPriority := map[string]int{ + "Domain": 0, "Subdomain": 1, "Directory": 2, + "File": 3, "Class": 4, "Type": 5, "Function": 6, + } + sort.SliceStable(entries, func(i, j int) bool { + pi := labelPriority[entries[i].label] + pj := labelPriority[entries[j].label] + if pi != pj { + return pi < pj + } + return degree[entries[i].node.ID] > degree[entries[j].node.ID] + }) + log.Printf("Capping entities from %d to %d (max-entities limit)", len(entries), maxEntities) + entries = entries[:maxEntities] + // Rebuild slugLookup to only reference kept entries; dropped entities + // will degrade to plain text in internal links. + newSlugLookup := make(map[string]string, len(entries)) + for _, e := range entries { + newSlugLookup[e.node.ID] = e.slug + } + slugLookup = newSlugLookup + } + + // --- Pass 2: Generate markdown with internal links --- + var count int + for _, e := range entries { + ctx := &renderContext{ + node: &e.node, + label: e.label, + slug: e.slug, + repoName: repoName, + repoURL: repoURL, + nodeLookup: nodeLookup, + slugLookup: slugLookup, + imports: imports, + importedBy: importedBy, + calls: callsRel, + calledBy: calledByRel, + containsFile: containsFile, + definesFunc: definesFunc, + declaresClass: declaresClass, + definesType: definesType, + childDir: childDir, + extendsRel: extendsRel, + belongsToDomain: belongsToDomain, + belongsToSubdomain: belongsToSubdomain, + partOfDomain: partOfDomain, + domainFiles: domainFiles, + subdomainFiles: subdomainFiles, + fileOfFunc: fileOfFunc, + fileOfClass: fileOfClass, + fileOfType: fileOfType, + domainNodeByName: domainNodeByName, + subdomainNodeByName: subdomainNodeByName, + domainSubdomains: domainSubdomains, + subdomainFuncs: subdomainFuncs, + subdomainClasses: subdomainClasses, + } + + md := ctx.generateMarkdown() + outPath := filepath.Join(outputDir, e.slug+".md") + if err := os.WriteFile(outPath, []byte(md), 0644); err != nil { + log.Printf("Warning: failed to write %s: %v", outPath, err) + continue + } + count++ + } + + log.Printf("Generated %d entity files in %s", count, outputDir) + return nil +} + +type renderContext struct { + node *Node + label, slug, repoName, repoURL string + nodeLookup map[string]*Node + slugLookup map[string]string + imports, importedBy map[string][]string + calls, calledBy map[string][]string + containsFile, definesFunc, declaresClass map[string][]string + definesType, childDir, extendsRel map[string][]string + belongsToDomain, belongsToSubdomain map[string]string + partOfDomain map[string]string + domainFiles, subdomainFiles map[string][]string + fileOfFunc, fileOfClass, fileOfType map[string]string + domainNodeByName, subdomainNodeByName map[string]string + domainSubdomains map[string][]string + subdomainFuncs, subdomainClasses map[string][]string +} + +// internalLink returns an HTML tag linking to the entity page for nodeID, +// or plain-text label if no slug is found. +func (c *renderContext) internalLink(nodeID, label string) string { + slug, ok := c.slugLookup[nodeID] + if !ok { + return html.EscapeString(label) + } + return fmt.Sprintf(`%s`, slug, html.EscapeString(label)) +} + +// internalLinkByName looks up a domain/subdomain node by name, then links to it. +func (c *renderContext) domainLink(domainName string) string { + nodeID, ok := c.domainNodeByName[domainName] + if !ok { + return html.EscapeString(domainName) + } + return c.internalLink(nodeID, domainName) +} + +func (c *renderContext) subdomainLink(subdomainName string) string { + nodeID, ok := c.subdomainNodeByName[subdomainName] + if !ok { + return html.EscapeString(subdomainName) + } + return c.internalLink(nodeID, subdomainName) +} + +func (c *renderContext) generateMarkdown() string { + var sb strings.Builder + + sb.WriteString("---\n") + + switch c.label { + case "File": + c.writeFileFrontmatter(&sb) + case "Function": + c.writeFunctionFrontmatter(&sb) + case "Class": + c.writeClassFrontmatter(&sb) + case "Type": + c.writeTypeFrontmatter(&sb) + case "Domain": + c.writeDomainFrontmatter(&sb) + case "Subdomain": + c.writeSubdomainFrontmatter(&sb) + case "Directory": + c.writeDirectoryFrontmatter(&sb) + } + + // Write graph_data, mermaid_diagram, arch_map frontmatter fields + c.writeGraphData(&sb) + c.writeMermaidDiagram(&sb) + c.writeArchMap(&sb) + + sb.WriteString("---\n\n") + + switch c.label { + case "File": + c.writeFileBody(&sb) + case "Function": + c.writeFunctionBody(&sb) + case "Class": + c.writeClassBody(&sb) + case "Type": + c.writeTypeBody(&sb) + case "Domain": + c.writeDomainBody(&sb) + case "Subdomain": + c.writeSubdomainBody(&sb) + case "Directory": + c.writeDirectoryBody(&sb) + } + + // FAQ section at the end of body + c.writeFAQSection(&sb) + + return sb.String() +} + +// --- Frontmatter writers --- + +func (c *renderContext) writeFileFrontmatter(sb *strings.Builder) { + props := c.node.Properties + path := getStr(props, "path") + name := getStr(props, "name") + lang := getStr(props, "language") + if name == "" { + name = filepath.Base(path) + } + + title := fmt.Sprintf("%s — %s Source File", name, c.repoName) + desc := fmt.Sprintf("Architecture documentation for %s", name) + if lang != "" { + desc += fmt.Sprintf(", a %s file", lang) + } + desc += fmt.Sprintf(" in the %s codebase.", c.repoName) + + depCount := len(c.imports[c.node.ID]) + ibCount := len(c.importedBy[c.node.ID]) + if depCount > 0 || ibCount > 0 { + desc += fmt.Sprintf(" %d imports, %d dependents.", depCount, ibCount) + } + + fmt.Fprintf(sb, "title: %q\n", title) + fmt.Fprintf(sb, "description: %q\n", desc) + sb.WriteString("node_type: \"File\"\n") + fmt.Fprintf(sb, "file_path: %q\n", path) + fmt.Fprintf(sb, "file_name: %q\n", name) + if lang != "" { + fmt.Fprintf(sb, "language: %q\n", lang) + } + fmt.Fprintf(sb, "repo: %q\n", c.repoName) + fmt.Fprintf(sb, "repo_url: %q\n", c.repoURL) + + dir := filepath.Dir(path) + if dir != "" && dir != "." { + fmt.Fprintf(sb, "directory: %q\n", dir) + parts := strings.Split(dir, "/") + if len(parts) > 0 { + fmt.Fprintf(sb, "top_directory: %q\n", parts[0]) + } + } + + ext := filepath.Ext(name) + if ext != "" { + fmt.Fprintf(sb, "extension: %q\n", ext) + } + + if d, ok := c.belongsToDomain[c.node.ID]; ok { + fmt.Fprintf(sb, "domain: %q\n", d) + } + if s, ok := c.belongsToSubdomain[c.node.ID]; ok { + fmt.Fprintf(sb, "subdomain: %q\n", s) + } + + // File line count: use lineCount property, or compute from endLine + if lc := getNum(props, "lineCount"); lc > 0 { + fmt.Fprintf(sb, "line_count: %d\n", lc) + } else if endLine := getNum(props, "endLine"); endLine > 0 { + startLine := getNum(props, "startLine") + if startLine <= 0 { + startLine = 1 + } + fmt.Fprintf(sb, "start_line: %d\n", startLine) + fmt.Fprintf(sb, "end_line: %d\n", endLine) + fmt.Fprintf(sb, "line_count: %d\n", endLine-startLine+1) + } + + fmt.Fprintf(sb, "import_count: %d\n", depCount) + fmt.Fprintf(sb, "imported_by_count: %d\n", ibCount) + + funcCount := len(c.definesFunc[c.node.ID]) + classCount := len(c.declaresClass[c.node.ID]) + typeCount := len(c.definesType[c.node.ID]) + fmt.Fprintf(sb, "function_count: %d\n", funcCount) + fmt.Fprintf(sb, "class_count: %d\n", classCount) + fmt.Fprintf(sb, "type_count: %d\n", typeCount) + + c.writeTags(sb) +} + +func (c *renderContext) writeFunctionFrontmatter(sb *strings.Builder) { + props := c.node.Properties + name := getStr(props, "name") + filePath := getStr(props, "filePath") + lang := getStr(props, "language") + startLine := getNum(props, "startLine") + endLine := getNum(props, "endLine") + + title := fmt.Sprintf("%s() — %s Function Reference", name, c.repoName) + desc := fmt.Sprintf("Architecture documentation for the %s() function", name) + if filePath != "" { + desc += fmt.Sprintf(" in %s", filepath.Base(filePath)) + } + desc += fmt.Sprintf(" from the %s codebase.", c.repoName) + + fmt.Fprintf(sb, "title: %q\n", title) + fmt.Fprintf(sb, "description: %q\n", desc) + sb.WriteString("node_type: \"Function\"\n") + fmt.Fprintf(sb, "function_name: %q\n", name) + if filePath != "" { + fmt.Fprintf(sb, "file_path: %q\n", filePath) + dir := filepath.Dir(filePath) + if dir != "" && dir != "." { + fmt.Fprintf(sb, "directory: %q\n", dir) + } + } + if lang != "" { + fmt.Fprintf(sb, "language: %q\n", lang) + } + if startLine > 0 { + fmt.Fprintf(sb, "start_line: %d\n", startLine) + } + if endLine > 0 { + fmt.Fprintf(sb, "end_line: %d\n", endLine) + fmt.Fprintf(sb, "line_count: %d\n", endLine-startLine+1) + } + fmt.Fprintf(sb, "repo: %q\n", c.repoName) + fmt.Fprintf(sb, "call_count: %d\n", len(c.calls[c.node.ID])) + fmt.Fprintf(sb, "called_by_count: %d\n", len(c.calledBy[c.node.ID])) + + if d, ok := c.belongsToDomain[c.node.ID]; ok { + fmt.Fprintf(sb, "domain: %q\n", d) + } + if s, ok := c.belongsToSubdomain[c.node.ID]; ok { + fmt.Fprintf(sb, "subdomain: %q\n", s) + } + + c.writeTags(sb) +} + +func (c *renderContext) writeClassFrontmatter(sb *strings.Builder) { + props := c.node.Properties + name := getStr(props, "name") + filePath := getStr(props, "filePath") + lang := getStr(props, "language") + startLine := getNum(props, "startLine") + endLine := getNum(props, "endLine") + + title := fmt.Sprintf("%s Class — %s Architecture", name, c.repoName) + desc := fmt.Sprintf("Architecture documentation for the %s class", name) + if filePath != "" { + desc += fmt.Sprintf(" in %s", filepath.Base(filePath)) + } + desc += fmt.Sprintf(" from the %s codebase.", c.repoName) + + fmt.Fprintf(sb, "title: %q\n", title) + fmt.Fprintf(sb, "description: %q\n", desc) + sb.WriteString("node_type: \"Class\"\n") + fmt.Fprintf(sb, "class_name: %q\n", name) + if filePath != "" { + fmt.Fprintf(sb, "file_path: %q\n", filePath) + dir := filepath.Dir(filePath) + if dir != "" && dir != "." { + fmt.Fprintf(sb, "directory: %q\n", dir) + } + } + if lang != "" { + fmt.Fprintf(sb, "language: %q\n", lang) + } + if startLine > 0 { + fmt.Fprintf(sb, "start_line: %d\n", startLine) + } + if endLine > 0 { + fmt.Fprintf(sb, "end_line: %d\n", endLine) + fmt.Fprintf(sb, "line_count: %d\n", endLine-startLine+1) + } + fmt.Fprintf(sb, "repo: %q\n", c.repoName) + + if d, ok := c.belongsToDomain[c.node.ID]; ok { + fmt.Fprintf(sb, "domain: %q\n", d) + } + if s, ok := c.belongsToSubdomain[c.node.ID]; ok { + fmt.Fprintf(sb, "subdomain: %q\n", s) + } + + extends := c.extendsRel[c.node.ID] + if len(extends) > 0 { + names := c.resolveNames(extends) + fmt.Fprintf(sb, "extends: %q\n", strings.Join(names, ", ")) + } + + c.writeTags(sb) +} + +func (c *renderContext) writeTypeFrontmatter(sb *strings.Builder) { + props := c.node.Properties + name := getStr(props, "name") + filePath := getStr(props, "filePath") + lang := getStr(props, "language") + startLine := getNum(props, "startLine") + endLine := getNum(props, "endLine") + + title := fmt.Sprintf("%s Type — %s Architecture", name, c.repoName) + desc := fmt.Sprintf("Architecture documentation for the %s type/interface", name) + if filePath != "" { + desc += fmt.Sprintf(" in %s", filepath.Base(filePath)) + } + desc += fmt.Sprintf(" from the %s codebase.", c.repoName) + + fmt.Fprintf(sb, "title: %q\n", title) + fmt.Fprintf(sb, "description: %q\n", desc) + sb.WriteString("node_type: \"Type\"\n") + fmt.Fprintf(sb, "type_name: %q\n", name) + if filePath != "" { + fmt.Fprintf(sb, "file_path: %q\n", filePath) + dir := filepath.Dir(filePath) + if dir != "" && dir != "." { + fmt.Fprintf(sb, "directory: %q\n", dir) + } + } + if lang != "" { + fmt.Fprintf(sb, "language: %q\n", lang) + } + if startLine > 0 { + fmt.Fprintf(sb, "start_line: %d\n", startLine) + } + if endLine > 0 { + fmt.Fprintf(sb, "end_line: %d\n", endLine) + fmt.Fprintf(sb, "line_count: %d\n", endLine-startLine+1) + } + fmt.Fprintf(sb, "repo: %q\n", c.repoName) + + if d, ok := c.belongsToDomain[c.node.ID]; ok { + fmt.Fprintf(sb, "domain: %q\n", d) + } + if s, ok := c.belongsToSubdomain[c.node.ID]; ok { + fmt.Fprintf(sb, "subdomain: %q\n", s) + } + + c.writeTags(sb) +} + +func (c *renderContext) writeDomainFrontmatter(sb *strings.Builder) { + name := getStr(c.node.Properties, "name") + if name == "" { + name = c.node.ID + } + + nodeDesc := getStr(c.node.Properties, "description") + fileCount := len(c.domainFiles[name]) + title := fmt.Sprintf("%s Domain — %s Architecture", name, c.repoName) + desc := "" + if nodeDesc != "" { + desc = nodeDesc + " " + } + desc += fmt.Sprintf("Architectural overview of the %s domain in the %s codebase. Contains %d source files.", name, c.repoName, fileCount) + + fmt.Fprintf(sb, "title: %q\n", title) + fmt.Fprintf(sb, "description: %q\n", desc) + sb.WriteString("node_type: \"Domain\"\n") + fmt.Fprintf(sb, "domain: %q\n", name) + fmt.Fprintf(sb, "repo: %q\n", c.repoName) + fmt.Fprintf(sb, "file_count: %d\n", fileCount) + if nodeDesc != "" { + fmt.Fprintf(sb, "summary: %q\n", nodeDesc) + } + + c.writeTags(sb) +} + +func (c *renderContext) writeSubdomainFrontmatter(sb *strings.Builder) { + name := getStr(c.node.Properties, "name") + if name == "" { + name = c.node.ID + } + + nodeDesc := getStr(c.node.Properties, "description") + parentDomain := c.partOfDomain[c.node.ID] + fileCount := len(c.subdomainFiles[name]) + + title := fmt.Sprintf("%s — %s Architecture", name, c.repoName) + desc := "" + if nodeDesc != "" { + desc = nodeDesc + " " + } + desc += fmt.Sprintf("Architecture documentation for the %s subdomain", name) + if parentDomain != "" { + desc += fmt.Sprintf(" (part of %s domain)", parentDomain) + } + desc += fmt.Sprintf(" in the %s codebase. Contains %d source files.", c.repoName, fileCount) + + fmt.Fprintf(sb, "title: %q\n", title) + fmt.Fprintf(sb, "description: %q\n", desc) + sb.WriteString("node_type: \"Subdomain\"\n") + fmt.Fprintf(sb, "subdomain: %q\n", name) + if parentDomain != "" { + fmt.Fprintf(sb, "domain: %q\n", parentDomain) + } + fmt.Fprintf(sb, "repo: %q\n", c.repoName) + fmt.Fprintf(sb, "file_count: %d\n", fileCount) + if nodeDesc != "" { + fmt.Fprintf(sb, "summary: %q\n", nodeDesc) + } + + c.writeTags(sb) +} + +func (c *renderContext) writeDirectoryFrontmatter(sb *strings.Builder) { + props := c.node.Properties + name := getStr(props, "name") + path := getStr(props, "path") + if name == "" { + name = filepath.Base(path) + } + if path == "" { + path = name + } + + // Skip root directory + if strings.Contains(path, "/app/repo-root/") { + return + } + + fileCount := len(c.containsFile[c.node.ID]) + subdirCount := len(c.childDir[c.node.ID]) + + // Aggregate function/class/type counts from contained files + funcCount := 0 + classCount := 0 + typeCount := 0 + for _, fileID := range c.containsFile[c.node.ID] { + funcCount += len(c.definesFunc[fileID]) + classCount += len(c.declaresClass[fileID]) + typeCount += len(c.definesType[fileID]) + } + + title := fmt.Sprintf("%s/ — %s Directory Structure", path, c.repoName) + desc := fmt.Sprintf("Directory listing for %s/ in the %s codebase. Contains %d files and %d subdirectories.", path, c.repoName, fileCount, subdirCount) + + fmt.Fprintf(sb, "title: %q\n", title) + fmt.Fprintf(sb, "description: %q\n", desc) + sb.WriteString("node_type: \"Directory\"\n") + fmt.Fprintf(sb, "dir_name: %q\n", name) + fmt.Fprintf(sb, "dir_path: %q\n", path) + fmt.Fprintf(sb, "repo: %q\n", c.repoName) + fmt.Fprintf(sb, "file_count: %d\n", fileCount) + fmt.Fprintf(sb, "subdir_count: %d\n", subdirCount) + fmt.Fprintf(sb, "function_count: %d\n", funcCount) + fmt.Fprintf(sb, "class_count: %d\n", classCount) + fmt.Fprintf(sb, "type_count: %d\n", typeCount) + + parts := strings.Split(path, "/") + if len(parts) > 0 { + fmt.Fprintf(sb, "top_directory: %q\n", parts[0]) + } + + c.writeTags(sb) +} + +// --- Body writers --- + +func (c *renderContext) writeFileBody(sb *strings.Builder) { + props := c.node.Properties + path := getStr(props, "path") + + // Domain link + if d, ok := c.belongsToDomain[c.node.ID]; ok { + sb.WriteString("## Domain\n\n") + fmt.Fprintf(sb, "- %s\n", c.domainLink(d)) + sb.WriteString("\n") + + // Subdomain link (only show if domain exists) + if s, ok := c.belongsToSubdomain[c.node.ID]; ok { + sb.WriteString("## Subdomains\n\n") + fmt.Fprintf(sb, "- %s\n", c.subdomainLink(s)) + sb.WriteString("\n") + } + } + + // Functions defined in this file + funcs := c.definesFunc[c.node.ID] + if len(funcs) > 0 { + sb.WriteString("## Functions\n\n") + c.writeLinkedList(sb, funcs, func(id string) string { + name := c.resolveName(id) + return c.internalLink(id, name+"()") + }) + } + + // Classes defined in this file + classes := c.declaresClass[c.node.ID] + if len(classes) > 0 { + sb.WriteString("## Classes\n\n") + c.writeLinkedList(sb, classes, func(id string) string { + return c.internalLink(id, c.resolveName(id)) + }) + } + + // Types defined in this file + types := c.definesType[c.node.ID] + if len(types) > 0 { + sb.WriteString("## Types\n\n") + c.writeLinkedList(sb, types, func(id string) string { + return c.internalLink(id, c.resolveName(id)) + }) + } + + // Dependencies + deps := c.imports[c.node.ID] + if len(deps) > 0 { + sb.WriteString("## Dependencies\n\n") + c.writeLinkedList(sb, deps, func(id string) string { + return c.internalLink(id, c.resolveName(id)) + }) + } + + // Imported By + ib := c.importedBy[c.node.ID] + if len(ib) > 0 { + sb.WriteString("## Imported By\n\n") + c.writeLinkedList(sb, ib, func(id string) string { + return c.internalLink(id, c.resolveNameWithPath(id)) + }) + } + + // Source link + if path != "" && c.repoURL != "" { + sb.WriteString("## Source\n\n") + fmt.Fprintf(sb, "- View on GitHub\n\n", c.repoURL, path) + } +} + +func (c *renderContext) writeFunctionBody(sb *strings.Builder) { + props := c.node.Properties + filePath := getStr(props, "filePath") + startLine := getNum(props, "startLine") + + // Defined In + if fileID, ok := c.fileOfFunc[c.node.ID]; ok { + sb.WriteString("## Defined In\n\n") + fmt.Fprintf(sb, "- %s\n", c.internalLink(fileID, c.resolveNameWithPath(fileID))) + sb.WriteString("\n") + } + + // Domain link + if d, ok := c.belongsToDomain[c.node.ID]; ok { + sb.WriteString("## Domain\n\n") + fmt.Fprintf(sb, "- %s\n", c.domainLink(d)) + sb.WriteString("\n") + + if s, ok := c.belongsToSubdomain[c.node.ID]; ok { + sb.WriteString("## Subdomains\n\n") + fmt.Fprintf(sb, "- %s\n", c.subdomainLink(s)) + sb.WriteString("\n") + } + } + + // Calls + called := c.calls[c.node.ID] + if len(called) > 0 { + sb.WriteString("## Calls\n\n") + c.writeLinkedList(sb, called, func(id string) string { + name := c.resolveName(id) + return c.internalLink(id, name+"()") + }) + } + + // Called By + callers := c.calledBy[c.node.ID] + if len(callers) > 0 { + sb.WriteString("## Called By\n\n") + c.writeLinkedList(sb, callers, func(id string) string { + name := c.resolveName(id) + return c.internalLink(id, name+"()") + }) + } + + // Source + if filePath != "" && c.repoURL != "" { + sb.WriteString("## Source\n\n") + link := fmt.Sprintf("%s/blob/main/%s", c.repoURL, filePath) + if startLine > 0 { + link += fmt.Sprintf("#L%d", startLine) + } + fmt.Fprintf(sb, "- View on GitHub\n\n", link) + } +} + +func (c *renderContext) writeClassBody(sb *strings.Builder) { + props := c.node.Properties + filePath := getStr(props, "filePath") + startLine := getNum(props, "startLine") + + // Defined In + if fileID, ok := c.fileOfClass[c.node.ID]; ok { + sb.WriteString("## Defined In\n\n") + fmt.Fprintf(sb, "- %s\n", c.internalLink(fileID, c.resolveNameWithPath(fileID))) + sb.WriteString("\n") + } + + // Domain link + if d, ok := c.belongsToDomain[c.node.ID]; ok { + sb.WriteString("## Domain\n\n") + fmt.Fprintf(sb, "- %s\n", c.domainLink(d)) + sb.WriteString("\n") + + if s, ok := c.belongsToSubdomain[c.node.ID]; ok { + sb.WriteString("## Subdomains\n\n") + fmt.Fprintf(sb, "- %s\n", c.subdomainLink(s)) + sb.WriteString("\n") + } + } + + // Extends + extends := c.extendsRel[c.node.ID] + if len(extends) > 0 { + sb.WriteString("## Extends\n\n") + for _, id := range extends { + fmt.Fprintf(sb, "- %s\n", c.internalLink(id, c.resolveName(id))) + } + sb.WriteString("\n") + } + + // Source + if filePath != "" && c.repoURL != "" { + sb.WriteString("## Source\n\n") + link := fmt.Sprintf("%s/blob/main/%s", c.repoURL, filePath) + if startLine > 0 { + link += fmt.Sprintf("#L%d", startLine) + } + fmt.Fprintf(sb, "- View on GitHub\n\n", link) + } +} + +func (c *renderContext) writeTypeBody(sb *strings.Builder) { + props := c.node.Properties + filePath := getStr(props, "filePath") + startLine := getNum(props, "startLine") + + // Defined In + if fileID, ok := c.fileOfType[c.node.ID]; ok { + sb.WriteString("## Defined In\n\n") + fmt.Fprintf(sb, "- %s\n", c.internalLink(fileID, c.resolveNameWithPath(fileID))) + sb.WriteString("\n") + } + + // Domain link + if d, ok := c.belongsToDomain[c.node.ID]; ok { + sb.WriteString("## Domain\n\n") + fmt.Fprintf(sb, "- %s\n", c.domainLink(d)) + sb.WriteString("\n") + + if s, ok := c.belongsToSubdomain[c.node.ID]; ok { + sb.WriteString("## Subdomains\n\n") + fmt.Fprintf(sb, "- %s\n", c.subdomainLink(s)) + sb.WriteString("\n") + } + } + + if filePath != "" && c.repoURL != "" { + sb.WriteString("## Source\n\n") + link := fmt.Sprintf("%s/blob/main/%s", c.repoURL, filePath) + if startLine > 0 { + link += fmt.Sprintf("#L%d", startLine) + } + fmt.Fprintf(sb, "- View on GitHub\n\n", link) + } +} + +func (c *renderContext) writeDomainBody(sb *strings.Builder) { + name := getStr(c.node.Properties, "name") + + // Subdomains + subs := c.domainSubdomains[name] + if len(subs) > 0 { + sb.WriteString("## Subdomains\n\n") + c.writeLinkedList(sb, subs, func(id string) string { + return c.internalLink(id, c.resolveName(id)) + }) + } + + // Source Files + files := c.domainFiles[name] + if len(files) > 0 { + sb.WriteString("## Source Files\n\n") + c.writeLinkedList(sb, files, func(id string) string { + return c.internalLink(id, c.resolveNameWithPath(id)) + }) + } +} + +func (c *renderContext) writeSubdomainBody(sb *strings.Builder) { + name := getStr(c.node.Properties, "name") + + // Domain link + if parentDomain := c.partOfDomain[c.node.ID]; parentDomain != "" { + sb.WriteString("## Domain\n\n") + fmt.Fprintf(sb, "- %s\n", c.domainLink(parentDomain)) + sb.WriteString("\n") + } + + // Functions in this subdomain + funcs := c.subdomainFuncs[name] + if len(funcs) > 0 { + sb.WriteString("## Functions\n\n") + c.writeLinkedList(sb, funcs, func(id string) string { + fnName := c.resolveName(id) + return c.internalLink(id, fnName+"()") + }) + } + + // Classes in this subdomain + classes := c.subdomainClasses[name] + if len(classes) > 0 { + sb.WriteString("## Classes\n\n") + c.writeLinkedList(sb, classes, func(id string) string { + return c.internalLink(id, c.resolveName(id)) + }) + } + + // Source Files + files := c.subdomainFiles[name] + if len(files) > 0 { + sb.WriteString("## Source Files\n\n") + c.writeLinkedList(sb, files, func(id string) string { + return c.internalLink(id, c.resolveNameWithPath(id)) + }) + } +} + +func (c *renderContext) writeDirectoryBody(sb *strings.Builder) { + // Subdirectories + subdirs := c.childDir[c.node.ID] + if len(subdirs) > 0 { + sb.WriteString("## Subdirectories\n\n") + c.writeLinkedList(sb, subdirs, func(id string) string { + label := c.resolveNameWithPath(id) + "/" + return c.internalLink(id, label) + }) + } + + // Files + files := c.containsFile[c.node.ID] + if len(files) > 0 { + sb.WriteString("## Files\n\n") + c.writeLinkedList(sb, files, func(id string) string { + return c.internalLink(id, c.resolveName(id)) + }) + } +} + +// --- FAQ Section --- + +func (c *renderContext) writeFAQSection(sb *strings.Builder) { + type faqEntry struct{ q, a string } + var faqs []faqEntry + + name := getStr(c.node.Properties, "name") + if name == "" { + name = c.node.ID + } + + switch c.label { + case "File": + path := getStr(c.node.Properties, "path") + fileName := getStr(c.node.Properties, "name") + if fileName == "" { + fileName = filepath.Base(path) + } + lang := getStr(c.node.Properties, "language") + + // What does file do? + desc := fmt.Sprintf("%s is a source file in the %s codebase", fileName, c.repoName) + if lang != "" { + desc += fmt.Sprintf(", written in %s", lang) + } + desc += "." + if d, ok := c.belongsToDomain[c.node.ID]; ok { + desc += fmt.Sprintf(" It belongs to the %s domain", d) + if s, ok := c.belongsToSubdomain[c.node.ID]; ok { + desc += fmt.Sprintf(", %s subdomain", s) + } + desc += "." + } + faqs = append(faqs, faqEntry{fmt.Sprintf("What does %s do?", fileName), desc}) + + // Functions defined + funcs := c.definesFunc[c.node.ID] + if len(funcs) > 0 { + names := c.resolveNames(funcs) + sort.Strings(names) + listed := names + if len(listed) > 10 { + listed = listed[:10] + } + a := fmt.Sprintf("%s defines %d function(s): %s", fileName, len(funcs), strings.Join(listed, ", ")) + if len(funcs) > 10 { + a += fmt.Sprintf(", and %d more", len(funcs)-10) + } + a += "." + faqs = append(faqs, faqEntry{fmt.Sprintf("What functions are defined in %s?", fileName), a}) + } + + // Dependencies + deps := c.imports[c.node.ID] + if len(deps) > 0 { + names := c.resolveNames(deps) + sort.Strings(names) + listed := names + if len(listed) > 8 { + listed = listed[:8] + } + a := fmt.Sprintf("%s imports %d module(s): %s", fileName, len(deps), strings.Join(listed, ", ")) + if len(deps) > 8 { + a += fmt.Sprintf(", and %d more", len(deps)-8) + } + a += "." + faqs = append(faqs, faqEntry{fmt.Sprintf("What does %s depend on?", fileName), a}) + } + + // Imported by + ib := c.importedBy[c.node.ID] + if len(ib) > 0 { + names := c.resolveNames(ib) + sort.Strings(names) + listed := names + if len(listed) > 8 { + listed = listed[:8] + } + a := fmt.Sprintf("%s is imported by %d file(s): %s", fileName, len(ib), strings.Join(listed, ", ")) + if len(ib) > 8 { + a += fmt.Sprintf(", and %d more", len(ib)-8) + } + a += "." + faqs = append(faqs, faqEntry{fmt.Sprintf("What files import %s?", fileName), a}) + } + + // Architecture position + archParts := []string{} + if d, ok := c.belongsToDomain[c.node.ID]; ok { + archParts = append(archParts, fmt.Sprintf("domain: %s", d)) + } + if s, ok := c.belongsToSubdomain[c.node.ID]; ok { + archParts = append(archParts, fmt.Sprintf("subdomain: %s", s)) + } + dir := filepath.Dir(path) + if dir != "" && dir != "." { + archParts = append(archParts, fmt.Sprintf("directory: %s", dir)) + } + if len(archParts) > 0 { + faqs = append(faqs, faqEntry{ + fmt.Sprintf("Where is %s in the architecture?", fileName), + fmt.Sprintf("%s is located at %s (%s).", fileName, path, strings.Join(archParts, ", ")), + }) + } + + case "Function": + funcName := name + "()" + + // What does it do? + desc := fmt.Sprintf("%s is a function in the %s codebase", funcName, c.repoName) + if fileID, ok := c.fileOfFunc[c.node.ID]; ok { + desc += fmt.Sprintf(", defined in %s", c.resolveNameWithPath(fileID)) + } + desc += "." + faqs = append(faqs, faqEntry{fmt.Sprintf("What does %s do?", funcName), desc}) + + // Where defined + if fileID, ok := c.fileOfFunc[c.node.ID]; ok { + filePath := c.resolveNameWithPath(fileID) + startLine := getNum(c.node.Properties, "startLine") + a := fmt.Sprintf("%s is defined in %s", funcName, filePath) + if startLine > 0 { + a += fmt.Sprintf(" at line %d", startLine) + } + a += "." + faqs = append(faqs, faqEntry{fmt.Sprintf("Where is %s defined?", funcName), a}) + } + + // What does it call? + called := c.calls[c.node.ID] + if len(called) > 0 { + names := c.resolveNames(called) + sort.Strings(names) + listed := names + if len(listed) > 8 { + listed = listed[:8] + } + a := fmt.Sprintf("%s calls %d function(s): %s", funcName, len(called), strings.Join(listed, ", ")) + if len(called) > 8 { + a += fmt.Sprintf(", and %d more", len(called)-8) + } + a += "." + faqs = append(faqs, faqEntry{fmt.Sprintf("What does %s call?", funcName), a}) + } + + // What calls it? + callers := c.calledBy[c.node.ID] + if len(callers) > 0 { + names := c.resolveNames(callers) + sort.Strings(names) + listed := names + if len(listed) > 8 { + listed = listed[:8] + } + a := fmt.Sprintf("%s is called by %d function(s): %s", funcName, len(callers), strings.Join(listed, ", ")) + if len(callers) > 8 { + a += fmt.Sprintf(", and %d more", len(callers)-8) + } + a += "." + faqs = append(faqs, faqEntry{fmt.Sprintf("What calls %s?", funcName), a}) + } + + case "Class": + className := name + + desc := fmt.Sprintf("%s is a class in the %s codebase", className, c.repoName) + if fileID, ok := c.fileOfClass[c.node.ID]; ok { + desc += fmt.Sprintf(", defined in %s", c.resolveNameWithPath(fileID)) + } + desc += "." + faqs = append(faqs, faqEntry{fmt.Sprintf("What is the %s class?", className), desc}) + + if fileID, ok := c.fileOfClass[c.node.ID]; ok { + filePath := c.resolveNameWithPath(fileID) + startLine := getNum(c.node.Properties, "startLine") + a := fmt.Sprintf("%s is defined in %s", className, filePath) + if startLine > 0 { + a += fmt.Sprintf(" at line %d", startLine) + } + a += "." + faqs = append(faqs, faqEntry{fmt.Sprintf("Where is %s defined?", className), a}) + } + + extends := c.extendsRel[c.node.ID] + if len(extends) > 0 { + names := c.resolveNames(extends) + faqs = append(faqs, faqEntry{ + fmt.Sprintf("What does %s extend?", className), + fmt.Sprintf("%s extends %s.", className, strings.Join(names, ", ")), + }) + } + + case "Type": + typeName := name + + desc := fmt.Sprintf("%s is a type/interface in the %s codebase", typeName, c.repoName) + if fileID, ok := c.fileOfType[c.node.ID]; ok { + desc += fmt.Sprintf(", defined in %s", c.resolveNameWithPath(fileID)) + } + desc += "." + faqs = append(faqs, faqEntry{fmt.Sprintf("What is the %s type?", typeName), desc}) + + if fileID, ok := c.fileOfType[c.node.ID]; ok { + filePath := c.resolveNameWithPath(fileID) + startLine := getNum(c.node.Properties, "startLine") + a := fmt.Sprintf("%s is defined in %s", typeName, filePath) + if startLine > 0 { + a += fmt.Sprintf(" at line %d", startLine) + } + a += "." + faqs = append(faqs, faqEntry{fmt.Sprintf("Where is %s defined?", typeName), a}) + } + + case "Domain": + domainName := name + fileCount := len(c.domainFiles[domainName]) + subs := c.domainSubdomains[domainName] + + nodeDesc := getStr(c.node.Properties, "description") + desc := fmt.Sprintf("The %s domain is an architectural grouping in the %s codebase", domainName, c.repoName) + if nodeDesc != "" { + desc += ". " + nodeDesc + } + desc += fmt.Sprintf(" It contains %d source files.", fileCount) + faqs = append(faqs, faqEntry{fmt.Sprintf("What is the %s domain?", domainName), desc}) + + if len(subs) > 0 { + names := c.resolveNames(subs) + sort.Strings(names) + faqs = append(faqs, faqEntry{ + fmt.Sprintf("What subdomains are in %s?", domainName), + fmt.Sprintf("The %s domain contains %d subdomain(s): %s.", domainName, len(subs), strings.Join(names, ", ")), + }) + } + + faqs = append(faqs, faqEntry{ + fmt.Sprintf("How many files are in %s?", domainName), + fmt.Sprintf("The %s domain contains %d source files.", domainName, fileCount), + }) + + case "Subdomain": + subName := name + parentDomain := c.partOfDomain[c.node.ID] + fileCount := len(c.subdomainFiles[subName]) + funcs := c.subdomainFuncs[subName] + + nodeDesc := getStr(c.node.Properties, "description") + desc := fmt.Sprintf("%s is a subdomain in the %s codebase", subName, c.repoName) + if parentDomain != "" { + desc += fmt.Sprintf(", part of the %s domain", parentDomain) + } + if nodeDesc != "" { + desc += ". " + nodeDesc + } + desc += fmt.Sprintf(" It contains %d source files.", fileCount) + faqs = append(faqs, faqEntry{fmt.Sprintf("What is the %s subdomain?", subName), desc}) + + if parentDomain != "" { + faqs = append(faqs, faqEntry{ + fmt.Sprintf("Which domain does %s belong to?", subName), + fmt.Sprintf("%s belongs to the %s domain.", subName, parentDomain), + }) + } + + if len(funcs) > 0 { + names := c.resolveNames(funcs) + sort.Strings(names) + listed := names + if len(listed) > 8 { + listed = listed[:8] + } + a := fmt.Sprintf("The %s subdomain contains %d function(s): %s", subName, len(funcs), strings.Join(listed, ", ")) + if len(funcs) > 8 { + a += fmt.Sprintf(", and %d more", len(funcs)-8) + } + a += "." + faqs = append(faqs, faqEntry{fmt.Sprintf("What functions are in %s?", subName), a}) + } + + case "Directory": + dirName := getStr(c.node.Properties, "name") + if dirName == "" { + dirName = filepath.Base(getStr(c.node.Properties, "path")) + } + files := c.containsFile[c.node.ID] + subdirs := c.childDir[c.node.ID] + + desc := fmt.Sprintf("The %s/ directory contains %d files and %d subdirectories in the %s codebase.", dirName, len(files), len(subdirs), c.repoName) + faqs = append(faqs, faqEntry{fmt.Sprintf("What's in the %s/ directory?", dirName), desc}) + + if len(subdirs) > 0 { + names := c.resolveNames(subdirs) + sort.Strings(names) + faqs = append(faqs, faqEntry{ + fmt.Sprintf("What subdirectories does %s/ contain?", dirName), + fmt.Sprintf("%s/ contains %d subdirectory(ies): %s.", dirName, len(subdirs), strings.Join(names, ", ")), + }) + } + } + + // Require minimum 2 FAQs + if len(faqs) < 2 { + return + } + + sb.WriteString("## FAQs\n\n") + for _, faq := range faqs { + fmt.Fprintf(sb, "### %s\n\n%s\n\n", faq.q, faq.a) + } +} + +// --- Graph Data (frontmatter) --- + +type graphNode struct { + ID string `json:"id"` + Label string `json:"label"` + Type string `json:"type"` + Slug string `json:"slug"` + LC int `json:"lc,omitempty"` // line count + Lang string `json:"lang,omitempty"` // language + CC int `json:"cc,omitempty"` // call count (calls out) + CBC int `json:"cbc,omitempty"` // called by count +} + +type graphEdge struct { + Source string `json:"source"` + Target string `json:"target"` + Type string `json:"type"` +} + +type graphData struct { + Nodes []graphNode `json:"nodes"` + Edges []graphEdge `json:"edges"` +} + +func (c *renderContext) writeGraphData(sb *strings.Builder) { + var nodes []graphNode + var edges []graphEdge + seen := make(map[string]bool) + + addNode := func(nodeID string) { + if seen[nodeID] || len(seen) >= 31 { // center + 30 neighbors + return + } + n := c.nodeLookup[nodeID] + if n == nil { + return + } + seen[nodeID] = true + label := getStr(n.Properties, "name") + if label == "" { + label = nodeID + } + nodeType := "" + if len(n.Labels) > 0 { + nodeType = n.Labels[0] + } + // Enrichment data + lineCount := 0 + startLine := getNum(n.Properties, "startLine") + endLine := getNum(n.Properties, "endLine") + if startLine > 0 && endLine > 0 { + lineCount = endLine - startLine + 1 + } + lang := getStr(n.Properties, "language") + callCount := len(c.calls[nodeID]) + calledByCount := len(c.calledBy[nodeID]) + + nodes = append(nodes, graphNode{ + ID: nodeID, + Label: label, + Type: nodeType, + Slug: c.slugLookup[nodeID], + LC: lineCount, + Lang: lang, + CC: callCount, + CBC: calledByCount, + }) + } + + addEdge := func(from, to, relType string) { + edges = append(edges, graphEdge{Source: from, Target: to, Type: relType}) + } + + // Add center node + addNode(c.node.ID) + + // Collect neighbor relationships + relSets := []struct { + ids []string + relType string + reverse bool // if true, edge goes neighbor -> center + }{ + {c.imports[c.node.ID], "imports", false}, + {c.importedBy[c.node.ID], "imports", true}, + {c.calls[c.node.ID], "calls", false}, + {c.calledBy[c.node.ID], "calls", true}, + {c.definesFunc[c.node.ID], "defines", false}, + {c.declaresClass[c.node.ID], "defines", false}, + {c.definesType[c.node.ID], "defines", false}, + {c.extendsRel[c.node.ID], "extends", false}, + {c.containsFile[c.node.ID], "contains", false}, + {c.childDir[c.node.ID], "contains", false}, + } + + // Add file-of reverse lookups + if fileID, ok := c.fileOfFunc[c.node.ID]; ok { + relSets = append(relSets, struct { + ids []string + relType string + reverse bool + }{[]string{fileID}, "defines", true}) + } + if fileID, ok := c.fileOfClass[c.node.ID]; ok { + relSets = append(relSets, struct { + ids []string + relType string + reverse bool + }{[]string{fileID}, "defines", true}) + } + if fileID, ok := c.fileOfType[c.node.ID]; ok { + relSets = append(relSets, struct { + ids []string + relType string + reverse bool + }{[]string{fileID}, "defines", true}) + } + + // Domain/subdomain neighbors + if domName, ok := c.belongsToDomain[c.node.ID]; ok { + if domNodeID, ok := c.domainNodeByName[domName]; ok { + relSets = append(relSets, struct { + ids []string + relType string + reverse bool + }{[]string{domNodeID}, "belongsTo", false}) + } + } + if subName, ok := c.belongsToSubdomain[c.node.ID]; ok { + if subNodeID, ok := c.subdomainNodeByName[subName]; ok { + relSets = append(relSets, struct { + ids []string + relType string + reverse bool + }{[]string{subNodeID}, "belongsTo", false}) + } + } + + // For domains: add subdomain children + if c.label == "Domain" { + domName := getStr(c.node.Properties, "name") + relSets = append(relSets, struct { + ids []string + relType string + reverse bool + }{c.domainSubdomains[domName], "contains", false}) + } + // For subdomains: add domain parent + if c.label == "Subdomain" { + if parentDom := c.partOfDomain[c.node.ID]; parentDom != "" { + if domNodeID, ok := c.domainNodeByName[parentDom]; ok { + relSets = append(relSets, struct { + ids []string + relType string + reverse bool + }{[]string{domNodeID}, "partOf", false}) + } + } + } + + for _, rs := range relSets { + for _, id := range rs.ids { + if len(seen) >= 31 { + break + } + addNode(id) + if !seen[id] { + continue // node wasn't added (cap reached before) + } + if rs.reverse { + addEdge(id, c.node.ID, rs.relType) + } else { + addEdge(c.node.ID, id, rs.relType) + } + } + } + + if len(nodes) < 2 { + return // no neighbors, skip + } + + gd := graphData{Nodes: nodes, Edges: edges} + data, err := json.Marshal(gd) + if err != nil { + return + } + fmt.Fprintf(sb, "graph_data: %q\n", string(data)) +} + +// --- Mermaid Diagram (frontmatter) --- + +func mermaidEscape(s string) string { + // Escape special chars for Mermaid node labels + s = strings.ReplaceAll(s, `"`, "#quot;") + s = strings.ReplaceAll(s, `<`, "#lt;") + s = strings.ReplaceAll(s, `>`, "#gt;") + return s +} + +func mermaidID(nodeID string) string { + // Create valid Mermaid node ID from arbitrary string + id := strings.Map(func(r rune) rune { + if (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') || (r >= '0' && r <= '9') || r == '_' { + return r + } + return '_' + }, nodeID) + if id == "" { + id = "node" + } + return id +} + +func (c *renderContext) writeMermaidDiagram(sb *strings.Builder) { + var lines []string + centerID := mermaidID(c.node.ID) + centerLabel := mermaidEscape(getStr(c.node.Properties, "name")) + if centerLabel == "" { + centerLabel = mermaidEscape(c.node.ID) + } + nodeCount := 0 + maxNodes := 15 + + addedNodes := make(map[string]bool) + + addNode := func(nodeID, label string) string { + mid := mermaidID(nodeID) + if !addedNodes[mid] { + addedNodes[mid] = true + nodeCount++ + } + return mid + } + + switch c.label { + case "File": + lines = append(lines, "graph LR") + lines = append(lines, fmt.Sprintf(" %s[\"%s\"]", centerID, centerLabel)) + addedNodes[centerID] = true + nodeCount++ + + // Imports + for _, id := range c.imports[c.node.ID] { + if nodeCount >= maxNodes { + break + } + label := mermaidEscape(c.resolveName(id)) + mid := addNode(id, label) + lines = append(lines, fmt.Sprintf(" %s[\"%s\"]", mid, label)) + lines = append(lines, fmt.Sprintf(" %s --> %s", centerID, mid)) + } + // ImportedBy + for _, id := range c.importedBy[c.node.ID] { + if nodeCount >= maxNodes { + break + } + label := mermaidEscape(c.resolveName(id)) + mid := addNode(id, label) + lines = append(lines, fmt.Sprintf(" %s[\"%s\"]", mid, label)) + lines = append(lines, fmt.Sprintf(" %s --> %s", mid, centerID)) + } + + case "Function": + lines = append(lines, "graph TD") + lines = append(lines, fmt.Sprintf(" %s[\"%s()\"]", centerID, centerLabel)) + addedNodes[centerID] = true + nodeCount++ + + // File it's defined in + if fileID, ok := c.fileOfFunc[c.node.ID]; ok { + if nodeCount < maxNodes { + label := mermaidEscape(c.resolveName(fileID)) + mid := addNode(fileID, label) + lines = append(lines, fmt.Sprintf(" %s[\"%s\"]", mid, label)) + lines = append(lines, fmt.Sprintf(" %s -->|defined in| %s", centerID, mid)) + } + } + + for _, id := range c.calledBy[c.node.ID] { + if nodeCount >= maxNodes { + break + } + label := mermaidEscape(c.resolveName(id)) + mid := addNode(id, label) + lines = append(lines, fmt.Sprintf(" %s[\"%s()\"]", mid, label)) + lines = append(lines, fmt.Sprintf(" %s -->|calls| %s", mid, centerID)) + } + for _, id := range c.calls[c.node.ID] { + if nodeCount >= maxNodes { + break + } + label := mermaidEscape(c.resolveName(id)) + mid := addNode(id, label) + lines = append(lines, fmt.Sprintf(" %s[\"%s()\"]", mid, label)) + lines = append(lines, fmt.Sprintf(" %s -->|calls| %s", centerID, mid)) + } + + case "Type": + lines = append(lines, "graph TD") + lines = append(lines, fmt.Sprintf(" %s[\"%s\"]", centerID, centerLabel)) + addedNodes[centerID] = true + nodeCount++ + + // File it's defined in + if fileID, ok := c.fileOfType[c.node.ID]; ok { + if nodeCount < maxNodes { + label := mermaidEscape(c.resolveName(fileID)) + mid := addNode(fileID, label) + lines = append(lines, fmt.Sprintf(" %s[\"%s\"]", mid, label)) + lines = append(lines, fmt.Sprintf(" %s -->|defined in| %s", centerID, mid)) + } + } + + case "Class": + lines = append(lines, "graph TD") + lines = append(lines, fmt.Sprintf(" %s[\"%s\"]", centerID, centerLabel)) + addedNodes[centerID] = true + nodeCount++ + + // Parent classes (extends) + for _, id := range c.extendsRel[c.node.ID] { + if nodeCount >= maxNodes { + break + } + label := mermaidEscape(c.resolveName(id)) + mid := addNode(id, label) + lines = append(lines, fmt.Sprintf(" %s[\"%s\"]", mid, label)) + lines = append(lines, fmt.Sprintf(" %s -->|extends| %s", centerID, mid)) + } + + // File it's defined in + if fileID, ok := c.fileOfClass[c.node.ID]; ok { + if nodeCount < maxNodes { + label := mermaidEscape(c.resolveName(fileID)) + mid := addNode(fileID, label) + lines = append(lines, fmt.Sprintf(" %s[\"%s\"]", mid, label)) + lines = append(lines, fmt.Sprintf(" %s -->|defined in| %s", centerID, mid)) + } + } + + // Methods defined on this class + for _, id := range c.definesFunc[c.node.ID] { + if nodeCount >= maxNodes { + break + } + label := mermaidEscape(c.resolveName(id)) + mid := addNode(id, label) + lines = append(lines, fmt.Sprintf(" %s[\"%s()\"]", mid, label)) + lines = append(lines, fmt.Sprintf(" %s -->|method| %s", centerID, mid)) + } + + case "Domain": + lines = append(lines, "graph TD") + domName := getStr(c.node.Properties, "name") + lines = append(lines, fmt.Sprintf(" %s[\"%s\"]", centerID, mermaidEscape(domName))) + addedNodes[centerID] = true + nodeCount++ + + for _, subID := range c.domainSubdomains[domName] { + if nodeCount >= maxNodes { + break + } + label := mermaidEscape(c.resolveName(subID)) + mid := addNode(subID, label) + lines = append(lines, fmt.Sprintf(" %s[\"%s\"]", mid, label)) + lines = append(lines, fmt.Sprintf(" %s --> %s", centerID, mid)) + } + + case "Subdomain": + lines = append(lines, "graph TD") + subName := getStr(c.node.Properties, "name") + lines = append(lines, fmt.Sprintf(" %s[\"%s\"]", centerID, mermaidEscape(subName))) + addedNodes[centerID] = true + nodeCount++ + + files := c.subdomainFiles[subName] + for _, fID := range files { + if nodeCount >= maxNodes { + break + } + label := mermaidEscape(c.resolveName(fID)) + mid := addNode(fID, label) + lines = append(lines, fmt.Sprintf(" %s[\"%s\"]", mid, label)) + lines = append(lines, fmt.Sprintf(" %s --> %s", centerID, mid)) + } + + case "Directory": + lines = append(lines, "graph TD") + dirName := getStr(c.node.Properties, "name") + if dirName == "" { + dirName = filepath.Base(getStr(c.node.Properties, "path")) + } + lines = append(lines, fmt.Sprintf(" %s[\"%s/\"]", centerID, mermaidEscape(dirName))) + addedNodes[centerID] = true + nodeCount++ + + for _, id := range c.childDir[c.node.ID] { + if nodeCount >= maxNodes { + break + } + label := mermaidEscape(c.resolveName(id)) + mid := addNode(id, label) + lines = append(lines, fmt.Sprintf(" %s[\"%s/\"]", mid, label)) + lines = append(lines, fmt.Sprintf(" %s --> %s", centerID, mid)) + } + for _, id := range c.containsFile[c.node.ID] { + if nodeCount >= maxNodes { + break + } + label := mermaidEscape(c.resolveName(id)) + mid := addNode(id, label) + lines = append(lines, fmt.Sprintf(" %s[\"%s\"]", mid, label)) + lines = append(lines, fmt.Sprintf(" %s --> %s", centerID, mid)) + } + + default: + return + } + + // Style the center node + if len(lines) > 1 && c.label != "Class" { + lines = append(lines, fmt.Sprintf(" style %s fill:#6366f1,stroke:#818cf8,color:#fff", centerID)) + } + + if nodeCount < 2 { + return + } + + diagram := strings.Join(lines, "\n") + fmt.Fprintf(sb, "mermaid_diagram: %q\n", diagram) +} + +// --- Architecture Map (frontmatter) --- + +func (c *renderContext) writeArchMap(sb *strings.Builder) { + archMap := make(map[string]interface{}) + + // Domain + if domName, ok := c.belongsToDomain[c.node.ID]; ok && domName != "" { + entry := map[string]string{"name": domName} + if domNodeID, ok := c.domainNodeByName[domName]; ok { + entry["slug"] = c.slugLookup[domNodeID] + } + archMap["domain"] = entry + } + + // Subdomain + if subName, ok := c.belongsToSubdomain[c.node.ID]; ok && subName != "" { + entry := map[string]string{"name": subName} + if subNodeID, ok := c.subdomainNodeByName[subName]; ok { + entry["slug"] = c.slugLookup[subNodeID] + } + archMap["subdomain"] = entry + } + + // File (for functions/classes/types) + switch c.label { + case "Function": + if fileID, ok := c.fileOfFunc[c.node.ID]; ok { + archMap["file"] = map[string]string{ + "name": c.resolveName(fileID), + "slug": c.slugLookup[fileID], + } + } + case "Class": + if fileID, ok := c.fileOfClass[c.node.ID]; ok { + archMap["file"] = map[string]string{ + "name": c.resolveName(fileID), + "slug": c.slugLookup[fileID], + } + } + case "Type": + if fileID, ok := c.fileOfType[c.node.ID]; ok { + archMap["file"] = map[string]string{ + "name": c.resolveName(fileID), + "slug": c.slugLookup[fileID], + } + } + } + + // Entity itself + name := getStr(c.node.Properties, "name") + if name == "" { + name = c.node.ID + } + archMap["entity"] = map[string]string{ + "name": name, + "type": c.label, + "slug": c.slug, + } + + if len(archMap) < 2 { + return // just the entity itself, not useful + } + + data, err := json.Marshal(archMap) + if err != nil { + return + } + fmt.Fprintf(sb, "arch_map: %q\n", string(data)) +} + +// writeLinkedList writes a sorted list of linked items. +func (c *renderContext) writeLinkedList(sb *strings.Builder, nodeIDs []string, linkFn func(string) string) { + type sortItem struct { + label string + id string + } + items := make([]sortItem, 0, len(nodeIDs)) + for _, id := range nodeIDs { + items = append(items, sortItem{label: c.resolveName(id), id: id}) + } + sort.Slice(items, func(i, j int) bool { + return items[i].label < items[j].label + }) + for _, item := range items { + fmt.Fprintf(sb, "- %s\n", linkFn(item.id)) + } + sb.WriteString("\n") +} + +// --- Tag generation --- + +func (c *renderContext) writeTags(sb *strings.Builder) { + var tags []string + + for _, label := range c.node.Labels { + tags = append(tags, label) + } + + if lang := getStr(c.node.Properties, "language"); lang != "" { + tags = append(tags, lang) + } + + ibCount := len(c.importedBy[c.node.ID]) + impCount := len(c.imports[c.node.ID]) + cbCount := len(c.calledBy[c.node.ID]) + + if ibCount >= 5 || cbCount >= 5 { + tags = append(tags, "High-Dependency") + } + if impCount >= 5 { + tags = append(tags, "Many-Imports") + } + + funcCount := len(c.definesFunc[c.node.ID]) + classCount := len(c.declaresClass[c.node.ID]) + if funcCount >= 10 || classCount >= 5 { + tags = append(tags, "Complex") + } + + if ibCount == 0 && impCount == 0 && cbCount == 0 && c.label == "File" { + tags = append(tags, "Isolated") + } + + if len(tags) > 0 { + sb.WriteString("tags:\n") + for _, t := range tags { + fmt.Fprintf(sb, " - %q\n", t) + } + } +} + +// --- Helpers --- + +func (c *renderContext) resolveName(nodeID string) string { + n := c.nodeLookup[nodeID] + if n == nil { + return nodeID + } + name := getStr(n.Properties, "name") + if name == "" { + return nodeID + } + return name +} + +func (c *renderContext) resolveNames(nodeIDs []string) []string { + result := make([]string, 0, len(nodeIDs)) + for _, id := range nodeIDs { + result = append(result, c.resolveName(id)) + } + return result +} + +func (c *renderContext) resolveNameWithPath(nodeID string) string { + n := c.nodeLookup[nodeID] + if n == nil { + return nodeID + } + path := getStr(n.Properties, "path") + if path == "" { + path = getStr(n.Properties, "filePath") + } + name := getStr(n.Properties, "name") + if path != "" { + return path + } else if name != "" { + return name + } + return nodeID +} + +func loadGraph(path string) ([]Node, []Relationship, error) { + data, err := os.ReadFile(path) + if err != nil { + return nil, nil, err + } + + log.Printf(" File size: %d bytes", len(data)) + + var resp APIResponse + if err := json.Unmarshal(data, &resp); err != nil { + log.Printf(" APIResponse unmarshal error: %v", err) + } else if resp.Result == nil { + log.Printf(" APIResponse parsed but Result is nil (status=%s)", resp.Status) + } else { + g := resp.Result.Graph + log.Printf(" APIResponse parsed: %d nodes, %d rels", len(g.Nodes), len(g.Relationships)) + return g.Nodes, g.Relationships, nil + } + + var result GraphResult + if err := json.Unmarshal(data, &result); err == nil && len(result.Graph.Nodes) > 0 { + return result.Graph.Nodes, result.Graph.Relationships, nil + } + + var graph Graph + if err := json.Unmarshal(data, &graph); err == nil && len(graph.Nodes) > 0 { + return graph.Nodes, graph.Relationships, nil + } + + return nil, nil, fmt.Errorf("unrecognized graph format") +} + +func generateSlug(node Node, label string) string { + props := node.Properties + + switch label { + case "File": + path := getStr(props, "path") + if path == "" { + return "" + } + return toSlug("file-" + path) + case "Function": + name := getStr(props, "name") + filePath := getStr(props, "filePath") + if name == "" { + return "" + } + if filePath != "" { + return toSlug("fn-" + filepath.Base(filePath) + "-" + name) + } + return toSlug("fn-" + name) + case "Class": + name := getStr(props, "name") + filePath := getStr(props, "filePath") + if name == "" { + return "" + } + if filePath != "" { + return toSlug("class-" + filepath.Base(filePath) + "-" + name) + } + return toSlug("class-" + name) + case "Type": + name := getStr(props, "name") + filePath := getStr(props, "filePath") + if name == "" { + return "" + } + if filePath != "" { + return toSlug("type-" + filepath.Base(filePath) + "-" + name) + } + return toSlug("type-" + name) + case "Domain": + name := getStr(props, "name") + if name == "" { + return "" + } + return toSlug("domain-" + name) + case "Subdomain": + name := getStr(props, "name") + if name == "" { + return "" + } + return toSlug("subdomain-" + name) + case "Directory": + path := getStr(props, "path") + if path == "" || strings.Contains(path, "/app/repo-root/") { + return "" + } + return toSlug("dir-" + path) + default: + return "" + } +} + +func hasLabel(node *Node, label string) bool { + for _, l := range node.Labels { + if l == label { + return true + } + } + return false +} + +func getStr(m map[string]interface{}, key string) string { + v, ok := m[key] + if !ok { + return "" + } + s, ok := v.(string) + if !ok { + return "" + } + return s +} + +func getNum(m map[string]interface{}, key string) int { + v, ok := m[key] + if !ok { + return 0 + } + switch n := v.(type) { + case float64: + return int(n) + case int: + return n + } + return 0 +} diff --git a/internal/archdocs/handler.go b/internal/archdocs/handler.go new file mode 100644 index 0000000..f57cd2b --- /dev/null +++ b/internal/archdocs/handler.go @@ -0,0 +1,493 @@ +// Package archdocs generates static architecture documentation for a repository +// by uploading it to the Supermodel API, converting the returned graph to +// markdown via graph2md, and building a static HTML site with pssg. +package archdocs + +import ( + "context" + "embed" + "fmt" + "net/url" + "os" + "path/filepath" + "strconv" + "strings" + + "github.com/supermodeltools/cli/internal/api" + "github.com/supermodeltools/cli/internal/archdocs/graph2md" + pssgbuild "github.com/supermodeltools/cli/internal/archdocs/pssg/build" + pssgconfig "github.com/supermodeltools/cli/internal/archdocs/pssg/config" + "github.com/supermodeltools/cli/internal/cache" + "github.com/supermodeltools/cli/internal/config" + "github.com/supermodeltools/cli/internal/ui" +) + +//go:embed templates/* +var bundledTemplates embed.FS + +// Options configures the arch-docs command. +type Options struct { + // SiteName is the display title for the generated site. + // Defaults to " Architecture Docs". + SiteName string + + // BaseURL is the canonical base URL where the site will be hosted. + // Defaults to "https://example.com". + BaseURL string + + // Repo is the "owner/repo" GitHub slug used to build a repo URL and + // derive defaults. Optional – inferred from git remote when empty. + Repo string + + // Output is the directory to write the generated site into. + // Defaults to "./arch-docs-output". + Output string + + // TemplatesDir overrides the bundled HTML/CSS/JS templates. + TemplatesDir string + + // MaxSourceFiles caps how many source files are included in the archive + // sent to the API. 0 means unlimited. + MaxSourceFiles int + + // MaxEntities caps how many entity pages are generated. 0 means unlimited. + MaxEntities int + + // Force bypasses the analysis cache and re-uploads even if cached. + Force bool +} + +// pssgConfigTemplate is the YAML configuration template for the pssg static +// site generator. Placeholders (in order): site.name, site.base_url, +// site.repo_url, site.description (repo name), paths.data, paths.templates, +// paths.output, paths.source_dir, rss.title, rss.description (repo name), +// llms_txt.title, llms_txt.description (repo name). +const pssgConfigTemplate = `site: + name: "%s" + base_url: "%s" + repo_url: "%s" + description: "Architecture documentation for the %s codebase. Explore files, functions, classes, domains, and dependencies." + author: "Supermodel" + language: "en" + +paths: + data: "%s" + templates: "%s" + output: "%s" + source_dir: "%s" + +data: + format: "markdown" + entity_type: "entity" + entity_slug: + source: "filename" + body_sections: + - name: "Functions" + header: "Functions" + type: "unordered_list" + - name: "Classes" + header: "Classes" + type: "unordered_list" + - name: "Types" + header: "Types" + type: "unordered_list" + - name: "Dependencies" + header: "Dependencies" + type: "unordered_list" + - name: "Imported By" + header: "Imported By" + type: "unordered_list" + - name: "Calls" + header: "Calls" + type: "unordered_list" + - name: "Called By" + header: "Called By" + type: "unordered_list" + - name: "Source Files" + header: "Source Files" + type: "unordered_list" + - name: "Subdirectories" + header: "Subdirectories" + type: "unordered_list" + - name: "Files" + header: "Files" + type: "unordered_list" + - name: "Source" + header: "Source" + type: "unordered_list" + - name: "Extends" + header: "Extends" + type: "unordered_list" + - name: "Defined In" + header: "Defined In" + type: "unordered_list" + - name: "Subdomains" + header: "Subdomains" + type: "unordered_list" + - name: "Domain" + header: "Domain" + type: "unordered_list" + - name: "faqs" + header: "FAQs" + type: "faq" + +taxonomies: + - name: "node_type" + label: "Node Types" + label_singular: "Node Type" + field: "node_type" + multi_value: false + min_entities: 1 + index_description: "Browse by entity type" + + - name: "language" + label: "Languages" + label_singular: "Language" + field: "language" + multi_value: false + min_entities: 1 + index_description: "Browse by programming language" + + - name: "domain" + label: "Domains" + label_singular: "Domain" + field: "domain" + multi_value: false + min_entities: 1 + index_description: "Browse by architectural domain" + + - name: "subdomain" + label: "Subdomains" + label_singular: "Subdomain" + field: "subdomain" + multi_value: false + min_entities: 1 + index_description: "Browse by architectural subdomain" + + - name: "top_directory" + label: "Top Directories" + label_singular: "Directory" + field: "top_directory" + multi_value: false + min_entities: 1 + index_description: "Browse by top-level directory" + + - name: "extension" + label: "File Extensions" + label_singular: "Extension" + field: "extension" + multi_value: false + min_entities: 1 + index_description: "Browse by file extension" + + - name: "tags" + label: "Tags" + label_singular: "Tag" + field: "tags" + multi_value: true + min_entities: 1 + index_description: "Browse by tag" + +pagination: + per_page: 48 + url_pattern: "/{taxonomy}/{entry}/{page}" + +structured_data: + entity_type: "SoftwareSourceCode" + field_mappings: + name: "title" + description: "description" + programmingLanguage: "language" + codeRepository: "repo_url" + +sitemap: + enabled: true + max_urls_per_file: 50000 + priorities: + homepage: 1.0 + entity: 0.8 + taxonomy_index: 0.7 + hub_page_1: 0.6 + hub_page_n: 0.4 + +rss: + enabled: true + title: "%s" + description: "Architecture documentation for the %s codebase" + +robots: + enabled: true + +llms_txt: + enabled: true + title: "%s" + description: "Architecture documentation for the %s codebase" + +search: + enabled: true + +templates: + entity: "entity.html" + homepage: "index.html" + hub: "hub.html" + taxonomy_index: "taxonomy_index.html" + all_entities: "all_entities.html" + +output: + clean_before_build: true + extract_css: "styles.css" + extract_js: "main.js" + share_images: false + +extra: + cta: + enabled: true + heading: "Analyze Your Own Codebase" + description: "Get architecture documentation, dependency graphs, and domain analysis for your codebase in minutes." + button_text: "Try Supermodel Free" + button_url: "https://dashboard.supermodeltools.com/billing/" +` + +// Run generates architecture documentation for the repository at dir. +func Run(ctx context.Context, cfg *config.Config, dir string, opts Options) error { //nolint:gocyclo,gocritic // sequential pipeline; opts is a value-semantic config struct + // Resolve absolute path + absDir, err := filepath.Abs(dir) + if err != nil { + return fmt.Errorf("resolving path: %w", err) + } + + // Derive repo name / URL from opts.Repo or directory name + repoName, repoURL := deriveRepoInfo(opts.Repo, absDir) + + // Apply defaults + if opts.SiteName == "" { + if repoName != "" { + opts.SiteName = repoName + " Architecture Docs" + } else { + opts.SiteName = "Architecture Docs" + } + } + if opts.BaseURL == "" { + opts.BaseURL = "https://example.com" + } + if opts.Output == "" { + opts.Output = filepath.Join(absDir, "arch-docs-output") + } else if !filepath.IsAbs(opts.Output) { + opts.Output = filepath.Join(absDir, opts.Output) + } + if opts.MaxSourceFiles == 0 { + opts.MaxSourceFiles = 3000 + } + if opts.MaxEntities == 0 { + opts.MaxEntities = 12000 + } + + ui.Step("Creating repository archive…") + zipPath, err := createZip(absDir) + if err != nil { + return fmt.Errorf("create archive: %w", err) + } + defer os.Remove(zipPath) + + // Use zip hash as idempotency key (matches existing CLI cache key style) + hash, err := cache.HashFile(zipPath) + if err != nil { + return fmt.Errorf("hash archive: %w", err) + } + + client := api.New(cfg) + spin := ui.Start("Uploading and analyzing repository…") + rawResult, err := client.AnalyzeRaw(ctx, zipPath, "archdocs-"+hash[:16]) + spin.Stop() + if err != nil { + return fmt.Errorf("API analysis: %w", err) + } + ui.Success("Analysis complete") + + // Write raw graph JSON to a temp file for graph2md + tmpDir, err := os.MkdirTemp("", "supermodel-archdocs-*") + if err != nil { + return fmt.Errorf("create temp dir: %w", err) + } + defer os.RemoveAll(tmpDir) + + graphPath := filepath.Join(tmpDir, "graph.json") + if err := os.WriteFile(graphPath, rawResult, 0o600); err != nil { + return fmt.Errorf("write graph JSON: %w", err) + } + + // Convert graph → markdown + ui.Step("Generating markdown from graph…") + contentDir := filepath.Join(tmpDir, "content") + if err := os.MkdirAll(contentDir, 0o755); err != nil { + return fmt.Errorf("create content dir: %w", err) + } + if err := graph2md.Run(graphPath, contentDir, repoName, repoURL, opts.MaxEntities); err != nil { + return fmt.Errorf("graph2md: %w", err) + } + entityCount := countFiles(contentDir, ".md") + ui.Success("Generated %d entity pages", entityCount) + + // Resolve templates directory (bundled or user-supplied) + tplDir, tplCleanup, err := resolveTemplates(opts.TemplatesDir) + if err != nil { + return fmt.Errorf("resolve templates: %w", err) + } + if tplCleanup != nil { + defer tplCleanup() + } + + // Write pssg.yaml and build the static site + ui.Step("Building static site…") + configPath := filepath.Join(tmpDir, "pssg.yaml") + if err := writePssgConfig(configPath, opts.SiteName, opts.BaseURL, repoURL, repoName, contentDir, tplDir, opts.Output, absDir); err != nil { + return fmt.Errorf("write pssg config: %w", err) + } + + pssgCfg, err := pssgconfig.Load(configPath) + if err != nil { + return fmt.Errorf("load pssg config: %w", err) + } + builder := pssgbuild.NewBuilder(pssgCfg, false) + if err := builder.Build(); err != nil { + return fmt.Errorf("pssg build: %w", err) + } + + // Rewrite root-relative paths if base URL has a path prefix + if prefix := extractPathPrefix(opts.BaseURL); prefix != "" { + if err := rewritePathPrefix(opts.Output, prefix); err != nil { + return fmt.Errorf("rewrite paths: %w", err) + } + } + + pageCount := countFiles(opts.Output, ".html") + ui.Success("Site built: %d pages → %s", pageCount, opts.Output) + fmt.Fprintf(os.Stdout, "\n entities : %s\n pages : %s\n output : %s\n\n", + strconv.Itoa(entityCount), strconv.Itoa(pageCount), opts.Output) + return nil +} + +// deriveRepoInfo extracts a short repo name and GitHub URL from a +// "owner/repo" slug. Falls back to the directory base name. +func deriveRepoInfo(repoSlug, dir string) (name, repoURL string) { + if repoSlug != "" { + parts := strings.SplitN(repoSlug, "/", 2) + if len(parts) == 2 { + return parts[1], "https://github.com/" + repoSlug + } + return repoSlug, "" + } + return filepath.Base(dir), "" +} + +// resolveTemplates returns the path to the templates directory. If override is +// non-empty it is used directly. Otherwise the bundled templates are extracted +// to a temporary directory and a cleanup function is returned. +func resolveTemplates(override string) (dir string, cleanup func(), err error) { + if override != "" { + return override, nil, nil + } + + tmp, err := os.MkdirTemp("", "supermodel-templates-*") + if err != nil { + return "", nil, err + } + + entries, err := bundledTemplates.ReadDir("templates") + if err != nil { + os.RemoveAll(tmp) + return "", nil, err + } + for _, e := range entries { + if e.IsDir() { + continue + } + data, err := bundledTemplates.ReadFile("templates/" + e.Name()) + if err != nil { + os.RemoveAll(tmp) + return "", nil, err + } + if err := os.WriteFile(filepath.Join(tmp, e.Name()), data, 0o600); err != nil { + os.RemoveAll(tmp) + return "", nil, err + } + } + + return tmp, func() { os.RemoveAll(tmp) }, nil +} + +// writePssgConfig generates a pssg.yaml configuration file from the template. +func writePssgConfig(path, siteName, baseURL, repoURL, repoName, contentDir, tplDir, outputDir, sourceDir string) error { + content := fmt.Sprintf(pssgConfigTemplate, + siteName, // site.name + baseURL, // site.base_url + repoURL, // site.repo_url + repoName, // site.description + contentDir, // paths.data + tplDir, // paths.templates + outputDir, // paths.output + sourceDir, // paths.source_dir + siteName, // rss.title + repoName, // rss.description + siteName, // llms_txt.title + repoName, // llms_txt.description + ) + return os.WriteFile(path, []byte(content), 0o600) +} + +// extractPathPrefix returns the path component of a URL (e.g. "/myrepo" from +// "https://org.github.io/myrepo"). Returns "" if there is no path prefix. +func extractPathPrefix(baseURL string) string { + u, err := url.Parse(baseURL) + if err != nil { + return "" + } + p := strings.TrimRight(u.Path, "/") + if p == "" || p == "/" { + return "" + } + return p +} + +// rewritePathPrefix rewrites root-relative paths in HTML and JS files to +// include prefix. Required for subdirectory deployments (e.g. GitHub Pages). +func rewritePathPrefix(dir, prefix string) error { + return filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { + if err != nil || info.IsDir() { + return err + } + ext := strings.ToLower(filepath.Ext(path)) + if ext != ".html" && ext != ".js" { + return nil + } + data, err := os.ReadFile(path) + if err != nil { + return nil + } + content := string(data) + original := content + content = strings.ReplaceAll(content, `href="/`, `href="`+prefix+`/`) + content = strings.ReplaceAll(content, `src="/`, `src="`+prefix+`/`) + content = strings.ReplaceAll(content, `fetch("/`, `fetch("`+prefix+`/`) + content = strings.ReplaceAll(content, `window.location.href = "/"`, `window.location.href = "`+prefix+`/"`) + content = strings.ReplaceAll(content, `window.location.href = "/" + `, `window.location.href = "`+prefix+`/" + `) + if content != original { + if err := os.WriteFile(path, []byte(content), info.Mode()); err != nil { + return fmt.Errorf("writing %s: %w", path, err) + } + } + return nil + }) +} + +// countFiles counts files with the given extension under dir. +func countFiles(dir, ext string) int { + count := 0 + _ = filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { + if err == nil && !info.IsDir() && strings.HasSuffix(path, ext) { + count++ + } + return nil + }) + return count +} diff --git a/internal/archdocs/pssg/affiliate/affiliate.go b/internal/archdocs/pssg/affiliate/affiliate.go new file mode 100644 index 0000000..e7d9860 --- /dev/null +++ b/internal/archdocs/pssg/affiliate/affiliate.go @@ -0,0 +1,127 @@ +package affiliate + +import ( + "net/url" + "os" + "strings" + + "github.com/supermodeltools/cli/internal/archdocs/pssg/config" +) + +// Link represents a single affiliate link. +type Link struct { + Provider string + Term string + URL string +} + +// Provider generates affiliate links for a given search term. +type Provider struct { + Name string + URLTemplate string // e.g., "https://www.amazon.com/s?k={{term}}&tag={{tag}}" + Tag string +} + +// GenerateLink creates an affiliate URL for the given search term. +func (p *Provider) GenerateLink(term string) string { + encoded := url.QueryEscape(term) + encoded = strings.ReplaceAll(encoded, "+", "%20") + // Also support + encoding like the TS version + plusEncoded := strings.ReplaceAll(url.QueryEscape(term), "%20", "+") + + result := p.URLTemplate + result = strings.ReplaceAll(result, "{{term}}", plusEncoded) + result = strings.ReplaceAll(result, "{{tag}}", p.Tag) + return result +} + +// Registry holds all configured affiliate providers. +type Registry struct { + Providers []Provider +} + +// NewRegistry creates a Registry from config, reading env vars for tags. +func NewRegistry(cfg config.AffiliatesConfig) *Registry { + var providers []Provider + for _, pc := range cfg.Providers { + tag := "" + if pc.EnvVar != "" { + tag = os.Getenv(pc.EnvVar) + } + // Skip providers that require an env var but don't have one set + if tag == "" && !pc.AlwaysInclude { + continue + } + providers = append(providers, Provider{ + Name: pc.Name, + URLTemplate: pc.URLTemplate, + Tag: tag, + }) + } + return &Registry{Providers: providers} +} + +// GenerateLinks creates affiliate links for all search terms from enrichment data. +func (r *Registry) GenerateLinks(enrichmentData map[string]interface{}, searchTermPaths []string) []Link { + if len(r.Providers) == 0 || enrichmentData == nil { + return nil + } + + // Extract search terms from enrichment data using configured paths + var terms []string + for _, path := range searchTermPaths { + terms = append(terms, extractTerms(enrichmentData, path)...) + } + + var links []Link + for _, provider := range r.Providers { + for _, term := range terms { + links = append(links, Link{ + Provider: provider.Name, + Term: term, + URL: provider.GenerateLink(term), + }) + } + } + return links +} + +// extractTerms extracts string values from enrichment data at the given path. +// Supports paths like "ingredients[].searchTerm" and "gear[].searchTerm". +func extractTerms(data map[string]interface{}, path string) []string { + parts := strings.Split(path, "[].") + if len(parts) != 2 { + // Simple field + if v, ok := data[path]; ok { + if s, ok := v.(string); ok { + return []string{s} + } + } + return nil + } + + arrayField := parts[0] + subField := parts[1] + + arr, ok := data[arrayField] + if !ok { + return nil + } + + items, ok := arr.([]interface{}) + if !ok { + return nil + } + + var results []string + for _, item := range items { + if m, ok := item.(map[string]interface{}); ok { + if v, ok := m[subField]; ok { + if s, ok := v.(string); ok && s != "" { + results = append(results, s) + } + } + } + } + return results +} diff --git a/internal/archdocs/pssg/build/build.go b/internal/archdocs/pssg/build/build.go new file mode 100644 index 0000000..b8107cb --- /dev/null +++ b/internal/archdocs/pssg/build/build.go @@ -0,0 +1,1403 @@ +package build + +import ( + "encoding/json" + "fmt" + "html/template" + "log" + "os" + "path/filepath" + "sort" + "strings" + "sync" + "sync/atomic" + "time" + + "github.com/supermodeltools/cli/internal/archdocs/pssg/affiliate" + "github.com/supermodeltools/cli/internal/archdocs/pssg/config" + "github.com/supermodeltools/cli/internal/archdocs/pssg/enrichment" + "github.com/supermodeltools/cli/internal/archdocs/pssg/entity" + "github.com/supermodeltools/cli/internal/archdocs/pssg/loader" + "github.com/supermodeltools/cli/internal/archdocs/pssg/output" + "github.com/supermodeltools/cli/internal/archdocs/pssg/render" + "github.com/supermodeltools/cli/internal/archdocs/pssg/schema" + "github.com/supermodeltools/cli/internal/archdocs/pssg/taxonomy" +) + +// Builder orchestrates the entire static site generation pipeline. +type Builder struct { + cfg *config.Config + force bool +} + +// NewBuilder creates a new builder. +func NewBuilder(cfg *config.Config, force bool) *Builder { + return &Builder{cfg: cfg, force: force} +} + +// Build runs the complete build pipeline. +func (b *Builder) Build() error { + start := time.Now() + log.Printf("Building site: %s", b.cfg.Site.Name) + + // 1. Load entities + log.Printf("Loading entities from %s...", b.cfg.Paths.Data) + ldr := loader.New(b.cfg) + entities, err := ldr.Load() + if err != nil { + return fmt.Errorf("loading entities: %w", err) + } + log.Printf("Loaded %d entities", len(entities)) + + // 2. Build slug lookup + slugMap := make(map[string]*entity.Entity) + for _, e := range entities { + slugMap[e.Slug] = e + } + + // 3. Load enrichment cache + enrichmentData := make(map[string]map[string]interface{}) + if b.cfg.Enrichment.CacheDir != "" { + log.Printf("Loading enrichment cache from %s...", b.cfg.Enrichment.CacheDir) + var err error + enrichmentData, err = enrichment.ReadAllCaches(b.cfg.Enrichment.CacheDir) + if err != nil { + log.Printf("Warning: failed to load enrichment cache: %v", err) + } else { + log.Printf("Loaded enrichment data for %d entities", len(enrichmentData)) + } + } + + // 4. Load extra data + favorites := b.loadFavorites(slugMap) + contributors := b.loadContributors() + + // 5. Set up affiliate registry + affiliateRegistry := affiliate.NewRegistry(b.cfg.Affiliates) + + // 6. Build taxonomies + log.Printf("Building taxonomies...") + taxonomies := taxonomy.BuildAll(entities, b.cfg.Taxonomies, enrichmentData) + for _, tax := range taxonomies { + log.Printf(" %s: %d entries", tax.Label, len(tax.Entries)) + } + + // 7. Build valid taxonomy slug lookup + validSlugs := make(map[string]map[string]bool) + for _, tax := range taxonomies { + slugSet := make(map[string]bool) + for _, entry := range tax.Entries { + slugSet[entry.Slug] = true + } + validSlugs[tax.Name] = slugSet + } + + // 8. Ensure output directory exists + outDir := b.cfg.Paths.Output + if err := os.MkdirAll(outDir, 0755); err != nil { + return fmt.Errorf("creating output dir: %w", err) + } + + // 9. Initialize render engine + log.Printf("Loading templates from %s...", b.cfg.Paths.Templates) + engine, err := render.NewEngine(b.cfg) + if err != nil { + return fmt.Errorf("initializing render engine: %w", err) + } + + // 10. Extract CSS/JS + if b.cfg.Output.ExtractCSS != "" { + cssContent, err := engine.RenderCSS() + if err != nil { + log.Printf("Warning: failed to render CSS: %v", err) + } else if cssContent != "" { + cssPath := filepath.Join(outDir, b.cfg.Output.ExtractCSS) + if err := os.WriteFile(cssPath, []byte(cssContent), 0644); err != nil { + return fmt.Errorf("writing CSS: %w", err) + } + } + } + if b.cfg.Output.ExtractJS != "" { + jsContent, err := engine.RenderJS() + if err != nil { + log.Printf("Warning: failed to render JS: %v", err) + } else if jsContent != "" { + jsPath := filepath.Join(outDir, b.cfg.Output.ExtractJS) + if err := os.WriteFile(jsPath, []byte(jsContent), 0644); err != nil { + return fmt.Errorf("writing JS: %w", err) + } + } + } + + // JSON-LD generator + schemaGen := schema.NewGenerator(b.cfg.Site, b.cfg.Schema) + + // Track sitemap entries + var sitemapEntries []output.SitemapEntry + var sitemapMu sync.Mutex + today := time.Now().Format("2006-01-02") + + addSitemapEntry := func(path, priority, changefreq string) { + sitemapMu.Lock() + defer sitemapMu.Unlock() + sitemapEntries = append(sitemapEntries, output.NewSitemapEntry( + b.cfg.Site.BaseURL, path, today, priority, changefreq, + )) + } + + // Track category taxonomy entries for RSS + categoryEntries := make(map[string][]*entity.Entity) + + // 11. Render entity pages (concurrent) + log.Printf("Rendering %d entity pages...", len(entities)) + var entityErrors int64 + var wg sync.WaitGroup + sem := make(chan struct{}, 32) // 32-goroutine pool + + for _, e := range entities { + wg.Add(1) + sem <- struct{}{} // acquire + go func(e *entity.Entity) { + defer wg.Done() + defer func() { <-sem }() // release + + err := b.renderEntityPage(e, engine, schemaGen, slugMap, enrichmentData, + affiliateRegistry, taxonomies, validSlugs, contributors, outDir, addSitemapEntry) + if err != nil { + atomic.AddInt64(&entityErrors, 1) + fmt.Fprintf(os.Stderr, "Warning: failed to render %s: %v\n", e.Slug, err) + } + }(e) + } + wg.Wait() + if entityErrors > 0 { + log.Printf(" %d entity pages had errors", entityErrors) + } + + // 11b. Generate search index + if len(entities) > 0 { + if err := b.generateSearchIndex(entities, outDir); err != nil { + log.Printf("Warning: failed to generate search index: %v", err) + } + } + + // Build category entries for RSS + for _, tax := range taxonomies { + if tax.Name == b.cfg.RSS.CategoryTaxonomy { + for _, entry := range tax.Entries { + categoryEntries[entry.Slug] = entry.Entities + } + } + } + + // 12. Render taxonomy pages + log.Printf("Rendering taxonomy pages...") + for _, tax := range taxonomies { + if err := b.renderTaxonomyPages(tax, engine, schemaGen, taxonomies, contributors, outDir, addSitemapEntry, today); err != nil { + return fmt.Errorf("rendering taxonomy %s: %w", tax.Name, err) + } + } + + // 12b. Render all-entities pages + log.Printf("Rendering all-entities pages...") + if err := b.renderAllEntitiesPages(engine, schemaGen, entities, taxonomies, outDir, addSitemapEntry); err != nil { + return fmt.Errorf("rendering all-entities pages: %w", err) + } + + // 13. Render homepage + log.Printf("Rendering homepage...") + if err := b.renderHomepage(engine, schemaGen, entities, taxonomies, favorites, contributors, outDir); err != nil { + return fmt.Errorf("rendering homepage: %w", err) + } + addSitemapEntry("/index.html", b.cfg.Sitemap.Priorities["homepage"], b.cfg.Sitemap.ChangeFreqs["homepage"]) + + // 14. Render static pages + for path, tmpl := range b.cfg.Templates.StaticPages { + ctx := render.StaticPageContext{ + Site: b.cfg.Site, + AllTaxonomies: taxonomies, + } + html, err := engine.RenderStatic(tmpl, ctx) + if err != nil { + log.Printf("Warning: failed to render static page %s: %v", path, err) + continue + } + outPath := filepath.Join(outDir, path) + if err := os.MkdirAll(filepath.Dir(outPath), 0755); err != nil { + return fmt.Errorf("creating dir for %s: %w", path, err) + } + if err := os.WriteFile(outPath, []byte(html), 0644); err != nil { + return fmt.Errorf("writing %s: %w", path, err) + } + } + + // 15. Generate sitemap + log.Printf("Generating sitemap (%d entries)...", len(sitemapEntries)) + sitemapFiles := output.GenerateSitemapFiles(sitemapEntries, b.cfg.Site.BaseURL, b.cfg.Sitemap.MaxURLsPerFile) + for _, sf := range sitemapFiles { + if err := os.WriteFile(filepath.Join(outDir, sf.Filename), []byte(sf.Content), 0644); err != nil { + return fmt.Errorf("writing %s: %w", sf.Filename, err) + } + } + log.Printf(" Generated %d sitemap file(s)", len(sitemapFiles)) + + // 16. Generate RSS + rssFeeds := output.GenerateRSSFeeds(entities, b.cfg, categoryEntries) + for _, feed := range rssFeeds { + feedPath := filepath.Join(outDir, feed.RelativePath) + if err := os.MkdirAll(filepath.Dir(feedPath), 0755); err != nil { + return fmt.Errorf("creating dir for RSS %s: %w", feed.RelativePath, err) + } + if err := os.WriteFile(feedPath, []byte(feed.Content), 0644); err != nil { + return fmt.Errorf("writing RSS %s: %w", feed.RelativePath, err) + } + } + if len(rssFeeds) > 0 { + log.Printf("Generated %d RSS feed(s)", len(rssFeeds)) + } + + // 17. Generate robots.txt + robotsContent := output.GenerateRobotsTxt(b.cfg) + if err := os.WriteFile(filepath.Join(outDir, "robots.txt"), []byte(robotsContent), 0644); err != nil { + return fmt.Errorf("writing robots.txt: %w", err) + } + + // 18. Generate llms.txt + if b.cfg.LlmsTxt.Enabled { + llmsContent := output.GenerateLlmsTxt(b.cfg, entities, taxonomies) + if err := os.WriteFile(filepath.Join(outDir, "llms.txt"), []byte(llmsContent), 0644); err != nil { + return fmt.Errorf("writing llms.txt: %w", err) + } + } + + // 19. Generate manifest.json + manifestContent := output.GenerateManifest(b.cfg) + if err := os.WriteFile(filepath.Join(outDir, "manifest.json"), []byte(manifestContent), 0644); err != nil { + return fmt.Errorf("writing manifest.json: %w", err) + } + + // 20. Write CNAME if configured + if b.cfg.Site.CNAME != "" { + if err := os.WriteFile(filepath.Join(outDir, "CNAME"), []byte(b.cfg.Site.CNAME+"\n"), 0644); err != nil { + return fmt.Errorf("writing CNAME: %w", err) + } + } + + // 21. Copy static assets + if b.cfg.Paths.Static != "" { + if err := copyDir(b.cfg.Paths.Static, outDir); err != nil { + log.Printf("Warning: failed to copy static assets: %v", err) + } + } + + elapsed := time.Since(start) + log.Printf("\nBuild complete!") + log.Printf(" Entities: %d", len(entities)) + log.Printf(" Taxonomies: %d (%d total entries)", len(taxonomies), countTaxEntries(taxonomies)) + log.Printf(" Sitemap: %d URLs in %d file(s)", len(sitemapEntries), len(sitemapFiles)) + log.Printf(" Output: %s", outDir) + log.Printf(" Duration: %s", elapsed.Round(time.Millisecond)) + + return nil +} + +func (b *Builder) renderEntityPage( + e *entity.Entity, + engine *render.Engine, + schemaGen *schema.Generator, + slugMap map[string]*entity.Entity, + enrichmentData map[string]map[string]interface{}, + affiliateReg *affiliate.Registry, + taxonomies []taxonomy.Taxonomy, + validSlugs map[string]map[string]bool, + contributors map[string]interface{}, + outDir string, + addSitemapEntry func(string, string, string), +) error { + entityURL := fmt.Sprintf("%s/%s.html", b.cfg.Site.BaseURL, e.Slug) + + // Resolve pairings + var pairings []*entity.Entity + if pairingsSlugs := e.GetStringSlice("pairings"); len(pairingsSlugs) > 0 { + for _, ps := range pairingsSlugs { + if paired, ok := slugMap[ps]; ok { + pairings = append(pairings, paired) + } + } + } + + // Enrichment data for this entity + eData := enrichmentData[e.Slug] + + // Generate affiliate links + var affLinks []affiliate.Link + if eData != nil { + affLinks = affiliateReg.GenerateLinks(eData, b.cfg.Affiliates.SearchTermPaths) + } + + // Cook mode prompt + cookPrompt := render.GenerateCookModePrompt(e, eData, affLinks) + + // JSON-LD + recipeSchema := schemaGen.GenerateRecipeSchema(e, entityURL) + + // Fix pairing names from slugMap + if related, ok := recipeSchema["isRelatedTo"].([]map[string]interface{}); ok { + for i, r := range related { + if slug, ok := r["name"].(string); ok { + if paired, ok := slugMap[slug]; ok { + related[i]["name"] = paired.GetString("title") + } + } + } + } + + // Breadcrumbs + var breadcrumbs []render.Breadcrumb + breadcrumbs = append(breadcrumbs, render.Breadcrumb{Name: "Home", URL: b.cfg.Site.BaseURL + "/"}) + if cat := e.GetString("recipe_category"); cat != "" { + catSlug := entity.ToSlug(cat) + breadcrumbs = append(breadcrumbs, render.Breadcrumb{ + Name: cat, + URL: fmt.Sprintf("%s/category/%s.html", b.cfg.Site.BaseURL, catSlug), + }) + } + breadcrumbs = append(breadcrumbs, render.Breadcrumb{Name: e.GetString("title"), URL: ""}) + + breadcrumbSchema := schemaGen.GenerateBreadcrumbSchema(toBreadcrumbItems(breadcrumbs)) + + // FAQ schema + var faqSchema map[string]interface{} + if faqs := e.GetFAQs(); len(faqs) > 0 { + faqSchema = schemaGen.GenerateFAQSchema(faqs) + } + + // Share image + svgContent := render.GenerateEntityShareSVG( + b.cfg.Site.Name, + e.GetString("title"), + e.GetString("recipe_category"), + e.GetString("cuisine"), + e.GetString("skill_level"), + ) + svgFilename := e.Slug + ".svg" + if err := b.maybeWriteShareSVG(outDir, svgFilename, svgContent); err != nil { + log.Printf("Warning: failed to write entity share SVG for %s: %v", e.Slug, err) + } + imageURL := shareImageURL(b.cfg.Site.BaseURL, svgFilename) + + // Set share image on recipe schema + recipeSchema["image"] = []string{imageURL} + + jsonLD := schema.MarshalSchemas(recipeSchema, breadcrumbSchema, faqSchema) + + title := e.GetString("title") + description := e.GetString("description") + + // Entity profile chart data (compact format for JS) + // Always include metrics so empty values are visible (helps diagnose API gaps) + nodeType := e.GetString("node_type") + profileData := map[string]interface{}{} + + profileData["lc"] = e.GetInt("line_count") + + switch nodeType { + case "Function": + profileData["co"] = e.GetInt("call_count") + profileData["cb"] = e.GetInt("called_by_count") + case "File": + profileData["ic"] = e.GetInt("import_count") + profileData["ib"] = e.GetInt("imported_by_count") + profileData["fn"] = e.GetInt("function_count") + profileData["cl"] = e.GetInt("class_count") + profileData["tc"] = e.GetInt("type_count") + case "Class", "Type": + profileData["fn"] = e.GetInt("function_count") + profileData["cb"] = e.GetInt("called_by_count") + case "Directory": + profileData["fc"] = e.GetInt("file_count") + profileData["fn"] = e.GetInt("function_count") + profileData["cl"] = e.GetInt("class_count") + default: + // Domain, Subdomain, etc — include whatever is available + if v := e.GetInt("function_count"); v > 0 { + profileData["fn"] = v + } + if v := e.GetInt("file_count"); v > 0 { + profileData["fc"] = v + } + } + + if sl := e.GetInt("start_line"); sl > 0 { + profileData["sl"] = sl + } + if el := e.GetInt("end_line"); el > 0 { + profileData["el"] = el + } + + // Edge type breakdown + edgeTypes := map[string]int{} + ic := e.GetInt("import_count") + ibc := e.GetInt("imported_by_count") + if ic+ibc > 0 { + edgeTypes["imports"] = ic + ibc + } + co := e.GetInt("call_count") + cbc := e.GetInt("called_by_count") + if co+cbc > 0 { + edgeTypes["calls"] = co + cbc + } + defines := e.GetInt("function_count") + e.GetInt("class_count") + e.GetInt("type_count") + if defines > 0 { + edgeTypes["defines"] = defines + } + if len(edgeTypes) > 0 { + profileData["et"] = edgeTypes + } + + var entityChartJSON []byte + entityChartJSON, _ = json.Marshal(profileData) + + // Source code (read from workspace if available) + var sourceCode, sourceLang string + if filePath := e.GetString("file_path"); filePath != "" { + if sl := e.GetInt("start_line"); sl > 0 { + if el := e.GetInt("end_line"); el > 0 { + sourceDir := b.cfg.Paths.SourceDir + if sourceDir != "" { + fullPath := filepath.Join(sourceDir, filePath) + if data, err := os.ReadFile(fullPath); err == nil { + lines := strings.Split(string(data), "\n") + if sl <= len(lines) && el <= len(lines) { + sourceCode = strings.Join(lines[sl-1:el], "\n") + } + } + } + } + } + sourceLang = e.GetString("language") + if sourceLang == "" { + ext := filepath.Ext(filePath) + langMap := map[string]string{ + ".js": "javascript", ".ts": "typescript", ".tsx": "typescript", + ".py": "python", ".go": "go", ".rs": "rust", ".java": "java", + ".rb": "ruby", ".php": "php", ".c": "c", ".cpp": "cpp", + ".cs": "csharp", ".swift": "swift", ".kt": "kotlin", + } + sourceLang = langMap[ext] + } + } + + ctx := render.EntityPageContext{ + Site: b.cfg.Site, + Entity: e, + Slug: e.Slug, + URL: entityURL, + CanonicalURL: entityURL, + Breadcrumbs: breadcrumbs, + Pairings: pairings, + Enrichment: eData, + AffiliateLinks: affLinks, + CookModePrompt: cookPrompt, + JsonLD: toTemplateHTML(jsonLD), + Taxonomies: taxonomies, + AllTaxonomies: taxonomies, + ValidSlugs: validSlugs, + Contributors: contributors, + ChartData: template.JS(entityChartJSON), + SourceCode: sourceCode, + SourceLang: sourceLang, + CTA: b.cfg.Extra.CTA, + OG: render.OGMeta{ + Title: title + " \u2014 " + b.cfg.Site.Name, + Description: description, + URL: entityURL, + ImageURL: imageURL, + Type: "article", + SiteName: b.cfg.Site.Name, + }, + } + + html, err := engine.RenderEntity(ctx) + if err != nil { + return err + } + + outPath := filepath.Join(outDir, e.Slug+".html") + if err := os.WriteFile(outPath, []byte(html), 0644); err != nil { + return fmt.Errorf("writing %s: %w", outPath, err) + } + + addSitemapEntry("/"+e.Slug+".html", + b.cfg.Sitemap.Priorities["entity"], + b.cfg.Sitemap.ChangeFreqs["entity"]) + + return nil +} + +func (b *Builder) renderTaxonomyPages( + tax taxonomy.Taxonomy, + engine *render.Engine, + schemaGen *schema.Generator, + allTaxonomies []taxonomy.Taxonomy, + contributors map[string]interface{}, + outDir string, + addSitemapEntry func(string, string, string), + today string, +) error { + // Ensure taxonomy type directory exists + taxDir := filepath.Join(outDir, tax.Name) + if err := os.MkdirAll(taxDir, 0755); err != nil { + return fmt.Errorf("creating taxonomy dir: %w", err) + } + + perPage := b.cfg.Pagination.EntitiesPerPage + + // Render hub pages for each entry + for _, entry := range tax.Entries { + totalPages := (len(entry.Entities) + perPage - 1) / perPage + if totalPages == 0 { + totalPages = 1 + } + + // Hub share image (generate once per entry, reuse for all pages) + typeDist := countFieldDistribution(entry.Entities, "recipe_category", 8) + hubSVGFilename := fmt.Sprintf("%s-%s.svg", tax.Name, entry.Slug) + hubImageURL := shareImageURL(b.cfg.Site.BaseURL, hubSVGFilename) + if totalPages >= 1 { + hubSVG := render.GenerateHubShareSVG(b.cfg.Site.Name, entry.Name, tax.Label, len(entry.Entities), typeDist) + if err := b.maybeWriteShareSVG(outDir, hubSVGFilename, hubSVG); err != nil { + log.Printf("Warning: failed to write hub share SVG for %s/%s: %v", tax.Name, entry.Slug, err) + } + } + + // Hub chart data (same for all pages) + // Build distributions: breakdown by each taxonomy field (except the current one) + distFields := []struct { + Key string + Field string + }{ + {"node_type", "node_type"}, + {"language", "language"}, + {"domain", "domain"}, + {"extension", "extension"}, + } + distributions := make(map[string][]render.NameCount) + for _, df := range distFields { + if df.Key == tax.Name { + continue // skip the current taxonomy dimension + } + dist := countFieldDistribution(entry.Entities, df.Field, 8) + if len(dist) > 0 { + distributions[df.Key] = dist + } + } + + // Build topEntities: largest by line count + type topEntity struct { + Name string `json:"name"` + Type string `json:"type"` + Lines int `json:"lines"` + Slug string `json:"slug"` + } + var topEnts []topEntity + for _, e := range entry.Entities { + lc := e.GetInt("line_count") + if lc > 0 { + topEnts = append(topEnts, topEntity{ + Name: e.GetString("title"), + Type: e.GetString("node_type"), + Lines: lc, + Slug: e.Slug, + }) + } + } + sort.Slice(topEnts, func(i, j int) bool { + return topEnts[i].Lines > topEnts[j].Lines + }) + if len(topEnts) > 10 { + topEnts = topEnts[:10] + } + + type hubChart struct { + EntryName string `json:"entryName"` + TotalEntities int `json:"totalEntities"` + TypeDistribution []render.NameCount `json:"typeDistribution"` + Distributions map[string][]render.NameCount `json:"distributions"` + TopEntities []topEntity `json:"topEntities"` + } + hubChartJSON, _ := json.Marshal(hubChart{ + EntryName: entry.Name, + TotalEntities: len(entry.Entities), + TypeDistribution: typeDist, + Distributions: distributions, + TopEntities: topEnts, + }) + + for page := 1; page <= totalPages; page++ { + pagination := taxonomy.ComputePagination(entry, page, perPage, tax.Name) + + // Get entities for this page + pageEntities := entry.Entities + if pagination.StartIndex < len(entry.Entities) { + end := pagination.EndIndex + if end > len(entry.Entities) { + end = len(entry.Entities) + } + pageEntities = entry.Entities[pagination.StartIndex:end] + } + + // JSON-LD + pageURL := fmt.Sprintf("%s%s", b.cfg.Site.BaseURL, taxonomy.HubPageURL(tax.Name, entry.Slug, page)) + var items []schema.ItemListEntry + for _, e := range pageEntities { + items = append(items, schema.ItemListEntry{ + Name: e.GetString("title"), + URL: fmt.Sprintf("%s/%s.html", b.cfg.Site.BaseURL, e.Slug), + }) + } + collectionSchema := schemaGen.GenerateCollectionPageSchema( + entry.Name, fmt.Sprintf("%s %s recipes", entry.Name, tax.LabelSingular), + pageURL, items, hubImageURL, + ) + + // Breadcrumbs + breadcrumbs := []render.Breadcrumb{ + {Name: "Home", URL: b.cfg.Site.BaseURL + "/"}, + {Name: tax.Label, URL: fmt.Sprintf("%s/%s/", b.cfg.Site.BaseURL, tax.Name)}, + {Name: entry.Name, URL: ""}, + } + breadcrumbSchema := schemaGen.GenerateBreadcrumbSchema(toBreadcrumbItems(breadcrumbs)) + jsonLD := schema.MarshalSchemas(collectionSchema, breadcrumbSchema) + + // Contributor profile for author taxonomy + var contributorProfile map[string]interface{} + if tax.Name == "author" && contributors != nil { + if profiles, ok := contributors["profiles"].(map[string]interface{}); ok { + contributorProfile, _ = profiles[entry.Slug].(map[string]interface{}) + } + } + + hubDesc := fmt.Sprintf("Browse %d %s %s recipes on %s.", len(entry.Entities), entry.Name, tax.LabelSingular, b.cfg.Site.Name) + + ctx := render.HubPageContext{ + Site: b.cfg.Site, + Taxonomy: tax, + Entry: entry, + Entities: pageEntities, + Pagination: pagination, + JsonLD: toTemplateHTML(jsonLD), + Breadcrumbs: breadcrumbs, + AllTaxonomies: allTaxonomies, + Contributors: contributors, + ContributorProfile: contributorProfile, + OG: render.OGMeta{ + Title: entry.Name + " \u2014 " + tax.Label + " \u2014 " + b.cfg.Site.Name, + Description: hubDesc, + URL: pageURL, + ImageURL: hubImageURL, + Type: "article", + SiteName: b.cfg.Site.Name, + }, + ChartData: template.JS(hubChartJSON), + CTA: b.cfg.Extra.CTA, + } + + html, err := engine.RenderHub(ctx) + if err != nil { + return fmt.Errorf("rendering hub %s/%s page %d: %w", tax.Name, entry.Slug, page, err) + } + + // Determine filename + var filename string + if page == 1 { + filename = entry.Slug + ".html" + } else { + filename = fmt.Sprintf("%s-page-%d.html", entry.Slug, page) + } + + if err := os.WriteFile(filepath.Join(taxDir, filename), []byte(html), 0644); err != nil { + return fmt.Errorf("writing hub page: %w", err) + } + + // Sitemap + priority := b.cfg.Sitemap.Priorities["hub_page_1"] + if page > 1 { + priority = b.cfg.Sitemap.Priorities["hub_page_n"] + } + addSitemapEntry(fmt.Sprintf("/%s/%s", tax.Name, filename), priority, b.cfg.Sitemap.ChangeFreqs["hub"]) + } + } + + // Render taxonomy index page + hasLetters := len(tax.Entries) >= tax.Config.LetterPageThreshold + letterGroups := taxonomy.GroupByLetter(tax.Entries) + + var letters []string + for _, lg := range letterGroups { + letters = append(letters, lg.Letter) + } + + topEntries := taxonomy.TopEntries(tax.Entries, 12) + + // Taxonomy index share image + var taxIndexEntries []render.NameCount + for _, entry := range taxonomy.TopEntries(tax.Entries, 20) { + taxIndexEntries = append(taxIndexEntries, render.NameCount{Name: entry.Name, Count: len(entry.Entities)}) + } + taxIndexSVGFilename := fmt.Sprintf("%s-index.svg", tax.Name) + taxIndexSVG := render.GenerateTaxIndexShareSVG(b.cfg.Site.Name, tax.Label, taxIndexEntries) + if err := b.maybeWriteShareSVG(outDir, taxIndexSVGFilename, taxIndexSVG); err != nil { + log.Printf("Warning: failed to write taxonomy index share SVG for %s: %v", tax.Name, err) + } + taxIndexImageURL := shareImageURL(b.cfg.Site.BaseURL, taxIndexSVGFilename) + + // Taxonomy index chart data + type taxChart struct { + TaxonomyName string `json:"taxonomyName"` + Entries []render.NameCount `json:"entries"` + } + taxChartJSON, _ := json.Marshal(taxChart{ + TaxonomyName: tax.Label, + Entries: taxIndexEntries, + }) + + // Index page JSON-LD + var indexItems []schema.ItemListEntry + for _, entry := range tax.Entries { + indexItems = append(indexItems, schema.ItemListEntry{ + Name: entry.Name, + URL: fmt.Sprintf("%s/%s/%s.html", b.cfg.Site.BaseURL, tax.Name, entry.Slug), + }) + } + indexURL := fmt.Sprintf("%s/%s/", b.cfg.Site.BaseURL, tax.Name) + indexSchema := schemaGen.GenerateItemListSchema(tax.Label, fmt.Sprintf("Browse all %s", tax.Label), indexItems, taxIndexImageURL) + breadcrumbs := []render.Breadcrumb{ + {Name: "Home", URL: b.cfg.Site.BaseURL + "/"}, + {Name: tax.Label, URL: ""}, + } + breadcrumbSchema := schemaGen.GenerateBreadcrumbSchema(toBreadcrumbItems(breadcrumbs)) + jsonLD := schema.MarshalSchemas(indexSchema, breadcrumbSchema) + + ctx := render.TaxonomyIndexContext{ + Site: b.cfg.Site, + Taxonomy: tax, + Entries: tax.Entries, + TopEntries: topEntries, + LetterGroups: letterGroups, + HasLetters: hasLetters, + Letters: letters, + JsonLD: toTemplateHTML(jsonLD), + Breadcrumbs: breadcrumbs, + AllTaxonomies: allTaxonomies, + OG: render.OGMeta{ + Title: tax.Label + " \u2014 " + b.cfg.Site.Name, + Description: tax.Config.IndexDescription, + URL: indexURL, + ImageURL: taxIndexImageURL, + Type: "article", + SiteName: b.cfg.Site.Name, + }, + ChartData: template.JS(taxChartJSON), + CTA: b.cfg.Extra.CTA, + } + + html, err := engine.RenderTaxonomyIndex(ctx) + if err != nil { + return fmt.Errorf("rendering taxonomy index %s: %w", tax.Name, err) + } + + if err := os.WriteFile(filepath.Join(taxDir, "index.html"), []byte(html), 0644); err != nil { + return fmt.Errorf("writing taxonomy index: %w", err) + } + addSitemapEntry(fmt.Sprintf("/%s/", tax.Name), b.cfg.Sitemap.Priorities["taxonomy_index"], b.cfg.Sitemap.ChangeFreqs["taxonomy_index"]) + + // Render letter pages if threshold met + if hasLetters { + for _, lg := range letterGroups { + // Letter share image + letterSlug := strings.ToLower(lg.Letter) + if lg.Letter == "#" { + letterSlug = "num" + } + letterSVGFilename := fmt.Sprintf("%s-letter-%s.svg", tax.Name, letterSlug) + letterSVG := render.GenerateLetterShareSVG(b.cfg.Site.Name, tax.Label, lg.Letter, len(lg.Entries)) + if err := b.maybeWriteShareSVG(outDir, letterSVGFilename, letterSVG); err != nil { + log.Printf("Warning: failed to write letter share SVG for %s/%s: %v", tax.Name, lg.Letter, err) + } + letterImageURL := shareImageURL(b.cfg.Site.BaseURL, letterSVGFilename) + + // Letter chart data + var letterEntries []render.NameCount + limit := 15 + if len(lg.Entries) < limit { + limit = len(lg.Entries) + } + for _, e := range lg.Entries[:limit] { + letterEntries = append(letterEntries, render.NameCount{Name: e.Name, Count: len(e.Entities)}) + } + type letterChart struct { + Letter string `json:"letter"` + TaxonomyName string `json:"taxonomyName"` + Entries []render.NameCount `json:"entries"` + } + letterChartJSON, _ := json.Marshal(letterChart{ + Letter: lg.Letter, + TaxonomyName: tax.Label, + Entries: letterEntries, + }) + + letterFile := fmt.Sprintf("letter-%s.html", letterSlug) + letterPageURL := fmt.Sprintf("%s/%s/%s", b.cfg.Site.BaseURL, tax.Name, letterFile) + + letterBreadcrumbs := []render.Breadcrumb{ + {Name: "Home", URL: b.cfg.Site.BaseURL + "/"}, + {Name: tax.Label, URL: fmt.Sprintf("%s/%s/", b.cfg.Site.BaseURL, tax.Name)}, + {Name: fmt.Sprintf("Letter %s", lg.Letter), URL: ""}, + } + + letterCtx := render.LetterPageContext{ + Site: b.cfg.Site, + Taxonomy: tax, + Letter: lg.Letter, + Entries: lg.Entries, + Letters: letters, + Breadcrumbs: letterBreadcrumbs, + AllTaxonomies: allTaxonomies, + OG: render.OGMeta{ + Title: fmt.Sprintf("%s \u2014 Letter %s \u2014 %s", tax.Label, lg.Letter, b.cfg.Site.Name), + Description: fmt.Sprintf("Browse %s starting with %s on %s.", tax.Label, lg.Letter, b.cfg.Site.Name), + URL: letterPageURL, + ImageURL: letterImageURL, + Type: "article", + SiteName: b.cfg.Site.Name, + }, + ChartData: template.JS(letterChartJSON), + CTA: b.cfg.Extra.CTA, + } + + letterHTML, err := engine.RenderLetter(letterCtx) + if err != nil { + return fmt.Errorf("rendering letter page %s/%s: %w", tax.Name, lg.Letter, err) + } + + if err := os.WriteFile(filepath.Join(taxDir, letterFile), []byte(letterHTML), 0644); err != nil { + return fmt.Errorf("writing letter page: %w", err) + } + addSitemapEntry(fmt.Sprintf("/%s/%s", tax.Name, letterFile), + b.cfg.Sitemap.Priorities["letter_page"], + b.cfg.Sitemap.ChangeFreqs["letter_page"]) + } + } + + return nil +} + +func (b *Builder) renderAllEntitiesPages( + engine *render.Engine, + schemaGen *schema.Generator, + entities []*entity.Entity, + allTaxonomies []taxonomy.Taxonomy, + outDir string, + addSitemapEntry func(string, string, string), +) error { + // Ensure all/ directory exists + allDir := filepath.Join(outDir, "all") + if err := os.MkdirAll(allDir, 0755); err != nil { + return fmt.Errorf("creating all dir: %w", err) + } + + // Global type distribution + typeDist := countFieldDistribution(entities, "recipe_category", 10) + + // Share image (once) + allSVG := render.GenerateAllEntitiesShareSVG(b.cfg.Site.Name, len(entities), typeDist) + if err := b.maybeWriteShareSVG(outDir, "all-entities.svg", allSVG); err != nil { + log.Printf("Warning: failed to write all-entities share SVG: %v", err) + } + imageURL := shareImageURL(b.cfg.Site.BaseURL, "all-entities.svg") + + // Chart data + type allChart struct { + TotalEntities int `json:"totalEntities"` + TypeDistribution []render.NameCount `json:"typeDistribution"` + } + chartJSON, _ := json.Marshal(allChart{ + TotalEntities: len(entities), + TypeDistribution: typeDist, + }) + + perPage := b.cfg.Pagination.EntitiesPerPage + totalPages := (len(entities) + perPage - 1) / perPage + if totalPages == 0 { + totalPages = 1 + } + + for page := 1; page <= totalPages; page++ { + start := (page - 1) * perPage + end := start + perPage + if end > len(entities) { + end = len(entities) + } + pageEntities := entities[start:end] + + // Build pagination info + pagination := taxonomy.PaginationInfo{ + CurrentPage: page, + TotalPages: totalPages, + TotalItems: len(entities), + StartIndex: start, + EndIndex: end, + } + for p := 1; p <= totalPages; p++ { + url := "/all/index.html" + if p > 1 { + url = fmt.Sprintf("/all/page-%d.html", p) + } + pagination.PageURLs = append(pagination.PageURLs, taxonomy.PageURL{Number: p, URL: url}) + } + if page > 1 { + if page == 2 { + pagination.PrevURL = "/all/index.html" + } else { + pagination.PrevURL = fmt.Sprintf("/all/page-%d.html", page-1) + } + } + if page < totalPages { + pagination.NextURL = fmt.Sprintf("/all/page-%d.html", page+1) + } + + pageURL := fmt.Sprintf("%s/all/index.html", b.cfg.Site.BaseURL) + if page > 1 { + pageURL = fmt.Sprintf("%s/all/page-%d.html", b.cfg.Site.BaseURL, page) + } + + // JSON-LD + var items []schema.ItemListEntry + for _, e := range pageEntities { + items = append(items, schema.ItemListEntry{ + Name: e.GetString("title"), + URL: fmt.Sprintf("%s/%s.html", b.cfg.Site.BaseURL, e.Slug), + }) + } + collectionSchema := schemaGen.GenerateCollectionPageSchema( + "All Recipes", + fmt.Sprintf("Browse all %d recipes on %s", len(entities), b.cfg.Site.Name), + pageURL, items, imageURL, + ) + breadcrumbs := []render.Breadcrumb{ + {Name: "Home", URL: b.cfg.Site.BaseURL + "/"}, + {Name: "All Recipes", URL: ""}, + } + breadcrumbSchema := schemaGen.GenerateBreadcrumbSchema(toBreadcrumbItems(breadcrumbs)) + jsonLD := schema.MarshalSchemas(collectionSchema, breadcrumbSchema) + + // Only include chart data on page 1 + var pageChartData template.JS + if page == 1 { + pageChartData = template.JS(chartJSON) + } + + ctx := render.AllEntitiesPageContext{ + Site: b.cfg.Site, + Entities: pageEntities, + Pagination: pagination, + JsonLD: toTemplateHTML(jsonLD), + Breadcrumbs: breadcrumbs, + AllTaxonomies: allTaxonomies, + EntityCount: len(entities), + TotalEntities: len(entities), + OG: render.OGMeta{ + Title: "All Recipes \u2014 " + b.cfg.Site.Name, + Description: fmt.Sprintf("Browse all %d recipes on %s.", len(entities), b.cfg.Site.Name), + URL: pageURL, + ImageURL: imageURL, + Type: "article", + SiteName: b.cfg.Site.Name, + }, + ChartData: pageChartData, + CTA: b.cfg.Extra.CTA, + } + + html, err := engine.RenderAllEntities(ctx) + if err != nil { + return fmt.Errorf("rendering all-entities page %d: %w", page, err) + } + + filename := "index.html" + if page > 1 { + filename = fmt.Sprintf("page-%d.html", page) + } + + if err := os.WriteFile(filepath.Join(allDir, filename), []byte(html), 0644); err != nil { + return fmt.Errorf("writing all-entities page: %w", err) + } + + addSitemapEntry(fmt.Sprintf("/all/%s", filename), "0.5", "weekly") + } + + return nil +} + +func (b *Builder) renderHomepage( + engine *render.Engine, + schemaGen *schema.Generator, + entities []*entity.Entity, + taxonomies []taxonomy.Taxonomy, + favorites []*entity.Entity, + contributors map[string]interface{}, + outDir string, +) error { + // Share image + var taxStats []render.NameCount + for _, tax := range taxonomies { + taxStats = append(taxStats, render.NameCount{Name: tax.Label, Count: len(tax.Entries)}) + } + svgContent := render.GenerateHomepageShareSVG(b.cfg.Site.Name, b.cfg.Site.Description, taxStats, len(entities)) + if err := b.maybeWriteShareSVG(outDir, "homepage.svg", svgContent); err != nil { + log.Printf("Warning: failed to write homepage share SVG: %v", err) + } + imageURL := shareImageURL(b.cfg.Site.BaseURL, "homepage.svg") + + // Chart data: treemap of taxonomies + type chartTax struct { + Name string `json:"name"` + Count int `json:"count"` + Slug string `json:"slug"` + } + type homepageChart struct { + Taxonomies []chartTax `json:"taxonomies"` + TotalEntities int `json:"totalEntities"` + } + var chartTaxonomies []chartTax + for _, tax := range taxonomies { + totalCount := 0 + for _, entry := range tax.Entries { + totalCount += len(entry.Entities) + } + chartTaxonomies = append(chartTaxonomies, chartTax{ + Name: tax.Label, + Count: totalCount, + Slug: tax.Name, + }) + } + chartJSON, _ := json.Marshal(homepageChart{Taxonomies: chartTaxonomies, TotalEntities: len(entities)}) + + // Architecture overview: domain/subdomain force graph + type archNode struct { + ID string `json:"id"` + Name string `json:"name"` + Type string `json:"type"` + Count int `json:"count"` + Slug string `json:"slug,omitempty"` + } + type archLink struct { + Source string `json:"source"` + Target string `json:"target"` + } + type archOverview struct { + Nodes []archNode `json:"nodes"` + Links []archLink `json:"links"` + } + + var archNodes []archNode + var archLinks []archLink + + // Root node is the repo/site + rootID := "__root__" + archNodes = append(archNodes, archNode{ID: rootID, Name: b.cfg.Site.Name, Type: "root", Count: len(entities)}) + + // Find subdomain -> domain parent relationships + subdomainParent := make(map[string]string) // subdomain name -> domain name + for _, tax := range taxonomies { + if tax.Name == "subdomain" { + for _, entry := range tax.Entries { + parentDomain := "" + if len(entry.Entities) > 0 { + parentDomain = entry.Entities[0].GetString("domain") + } + subdomainParent[entry.Name] = parentDomain + } + } + } + + // Add domain nodes + for _, tax := range taxonomies { + if tax.Name == "domain" { + for _, entry := range tax.Entries { + nodeID := "domain:" + entry.Slug + archNodes = append(archNodes, archNode{ + ID: nodeID, + Name: entry.Name, + Type: "domain", + Count: len(entry.Entities), + Slug: "domain/" + entry.Slug, + }) + archLinks = append(archLinks, archLink{Source: rootID, Target: nodeID}) + } + } + } + // Add subdomain nodes + for _, tax := range taxonomies { + if tax.Name == "subdomain" { + for _, entry := range tax.Entries { + nodeID := "subdomain:" + entry.Slug + archNodes = append(archNodes, archNode{ + ID: nodeID, + Name: entry.Name, + Type: "subdomain", + Count: len(entry.Entities), + Slug: "subdomain/" + entry.Slug, + }) + parentDomain := subdomainParent[entry.Name] + if parentDomain != "" { + parentSlug := entity.ToSlug(parentDomain) + archLinks = append(archLinks, archLink{Source: "domain:" + parentSlug, Target: nodeID}) + } else { + archLinks = append(archLinks, archLink{Source: rootID, Target: nodeID}) + } + } + } + } + + var archJSON []byte + if len(archNodes) > 1 { + archJSON, _ = json.Marshal(archOverview{Nodes: archNodes, Links: archLinks}) + } + + // JSON-LD + websiteSchema := schemaGen.GenerateWebSiteSchema(imageURL) + + var items []schema.ItemListEntry + for _, e := range entities { + items = append(items, schema.ItemListEntry{ + Name: e.GetString("title"), + URL: fmt.Sprintf("%s/%s.html", b.cfg.Site.BaseURL, e.Slug), + }) + } + itemListSchema := schemaGen.GenerateItemListSchema( + b.cfg.Site.Name, + b.cfg.Site.Description, + items, + imageURL, + ) + + jsonLD := schema.MarshalSchemas(websiteSchema, itemListSchema) + + ctx := render.HomepageContext{ + Site: b.cfg.Site, + Entities: entities, + Taxonomies: taxonomies, + Favorites: favorites, + JsonLD: toTemplateHTML(jsonLD), + EntityCount: len(entities), + Contributors: contributors, + OG: render.OGMeta{ + Title: b.cfg.Site.Name, + Description: b.cfg.Site.Description, + URL: b.cfg.Site.BaseURL + "/", + ImageURL: imageURL, + Type: "website", + SiteName: b.cfg.Site.Name, + }, + ChartData: template.JS(chartJSON), + ArchData: template.JS(archJSON), + CTA: b.cfg.Extra.CTA, + } + + html, err := engine.RenderHomepage(ctx) + if err != nil { + return err + } + + return os.WriteFile(filepath.Join(outDir, "index.html"), []byte(html), 0644) +} + +func (b *Builder) loadFavorites(slugMap map[string]*entity.Entity) []*entity.Entity { + if b.cfg.Extra.Favorites == "" { + return nil + } + + data, err := os.ReadFile(b.cfg.Extra.Favorites) + if err != nil { + log.Printf("Warning: failed to load favorites: %v", err) + return nil + } + + var slugs []string + if err := json.Unmarshal(data, &slugs); err != nil { + log.Printf("Warning: failed to parse favorites: %v", err) + return nil + } + + var result []*entity.Entity + for _, slug := range slugs { + if e, ok := slugMap[slug]; ok { + result = append(result, e) + } + } + return result +} + +func (b *Builder) loadContributors() map[string]interface{} { + if b.cfg.Extra.Contributors == "" { + return nil + } + + data, err := os.ReadFile(b.cfg.Extra.Contributors) + if err != nil { + log.Printf("Warning: failed to load contributors: %v", err) + return nil + } + + var result map[string]interface{} + if err := json.Unmarshal(data, &result); err != nil { + log.Printf("Warning: failed to parse contributors: %v", err) + return nil + } + return result +} + +func toBreadcrumbItems(breadcrumbs []render.Breadcrumb) []schema.BreadcrumbItem { + items := make([]schema.BreadcrumbItem, len(breadcrumbs)) + for i, bc := range breadcrumbs { + items[i] = schema.BreadcrumbItem{Name: bc.Name, URL: bc.URL} + } + return items +} + +// toTemplateHTML converts a string to template.HTML (trusted HTML). +func toTemplateHTML(s string) template.HTML { + return template.HTML(s) +} + +func countTaxEntries(taxonomies []taxonomy.Taxonomy) int { + total := 0 + for _, tax := range taxonomies { + total += len(tax.Entries) + } + return total +} + +// countFieldDistribution counts occurrences of a string field across entities, +// returns sorted desc, capped at limit. +func countFieldDistribution(entities []*entity.Entity, field string, limit int) []render.NameCount { + counts := make(map[string]int) + for _, e := range entities { + val := e.GetString(field) + if val != "" { + counts[val]++ + } + } + + var result []render.NameCount + for name, count := range counts { + result = append(result, render.NameCount{Name: name, Count: count}) + } + sort.Slice(result, func(i, j int) bool { + return result[i].Count > result[j].Count + }) + if limit > 0 && len(result) > limit { + result = result[:limit] + } + return result +} + +// writeShareSVG writes an SVG share image to the images/share/ directory. +func writeShareSVG(outDir, filename, svg string) error { + dir := filepath.Join(outDir, "images", "share") + if err := os.MkdirAll(dir, 0755); err != nil { + return err + } + return os.WriteFile(filepath.Join(dir, filename), []byte(svg), 0644) +} + +// maybeWriteShareSVG skips SVG generation when output.share_images is false. +func (b *Builder) maybeWriteShareSVG(outDir, filename, svg string) error { + if !b.cfg.Output.ShareImages { + return nil + } + return writeShareSVG(outDir, filename, svg) +} + +// shareImageURL returns the full URL for a share image. +func shareImageURL(baseURL, filename string) string { + return fmt.Sprintf("%s/images/share/%s", baseURL, filename) +} + +type searchEntry struct { + T string `json:"t"` // title + D string `json:"d,omitempty"` // description (truncated) + S string `json:"s"` // slug + N string `json:"n,omitempty"` // node_type + L string `json:"l,omitempty"` // language + M string `json:"m,omitempty"` // domain +} + +func (b *Builder) generateSearchIndex(entities []*entity.Entity, outDir string) error { + if !b.cfg.Search.Enabled { + return nil + } + + entries := make([]searchEntry, 0, len(entities)) + for _, e := range entities { + desc := e.GetString("description") + if len(desc) > 120 { + desc = desc[:120] + } + entries = append(entries, searchEntry{ + T: e.GetString("title"), + D: desc, + S: e.Slug, + N: e.GetString("node_type"), + L: e.GetString("language"), + M: e.GetString("domain"), + }) + } + + data, err := json.Marshal(entries) + if err != nil { + return err + } + + outPath := filepath.Join(outDir, "search-index.json") + if err := os.WriteFile(outPath, data, 0644); err != nil { + return err + } + log.Printf(" Generated search index (%d entries, %dKB)", len(entries), len(data)/1024) + return nil +} + +// copyDir copies files from src to dst directory. +func copyDir(src, dst string) error { + entries, err := os.ReadDir(src) + if err != nil { + if os.IsNotExist(err) { + return nil + } + return err + } + + for _, entry := range entries { + srcPath := filepath.Join(src, entry.Name()) + dstPath := filepath.Join(dst, entry.Name()) + + if entry.IsDir() { + if err := os.MkdirAll(dstPath, 0755); err != nil { + return err + } + if err := copyDir(srcPath, dstPath); err != nil { + return err + } + } else { + data, err := os.ReadFile(srcPath) + if err != nil { + return err + } + if err := os.WriteFile(dstPath, data, 0644); err != nil { + return err + } + } + } + return nil +} diff --git a/internal/archdocs/pssg/config/config.go b/internal/archdocs/pssg/config/config.go new file mode 100644 index 0000000..a8892af --- /dev/null +++ b/internal/archdocs/pssg/config/config.go @@ -0,0 +1,160 @@ +package config + +import ( + "fmt" + "os" + "path/filepath" + + "gopkg.in/yaml.v3" +) + +// Load reads and parses a YAML config file, applies defaults, and validates. +func Load(path string) (*Config, error) { + data, err := os.ReadFile(path) + if err != nil { + return nil, fmt.Errorf("reading config %s: %w", path, err) + } + + var cfg Config + if err := yaml.Unmarshal(data, &cfg); err != nil { + return nil, fmt.Errorf("parsing config %s: %w", path, err) + } + + cfg.ConfigDir = filepath.Dir(path) + applyDefaults(&cfg) + + if err := validate(&cfg); err != nil { + return nil, fmt.Errorf("validating config: %w", err) + } + + // Resolve relative paths against config directory + resolvePaths(&cfg) + + return &cfg, nil +} + +func applyDefaults(cfg *Config) { + if cfg.Site.Language == "" { + cfg.Site.Language = "en" + } + if cfg.Paths.Output == "" { + cfg.Paths.Output = "docs" + } + if cfg.Paths.Templates == "" { + cfg.Paths.Templates = "templates" + } + if cfg.Paths.Cache == "" { + cfg.Paths.Cache = ".cache" + } + if cfg.Data.Format == "" { + cfg.Data.Format = "markdown" + } + if cfg.Data.EntitySlug.Source == "" { + cfg.Data.EntitySlug.Source = "filename" + } + if cfg.Pagination.EntitiesPerPage == 0 { + cfg.Pagination.EntitiesPerPage = 48 + } + if cfg.Sitemap.MaxURLsPerFile == 0 { + cfg.Sitemap.MaxURLsPerFile = 50000 + } + if cfg.Schema.DatePublished == "" { + cfg.Schema.DatePublished = "2025-01-01" + } + + // Default taxonomy settings + for i := range cfg.Taxonomies { + if cfg.Taxonomies[i].MinEntities == 0 { + cfg.Taxonomies[i].MinEntities = 1 + } + if cfg.Taxonomies[i].LetterPageThreshold == 0 { + cfg.Taxonomies[i].LetterPageThreshold = 50 + } + if cfg.Taxonomies[i].Template == "" { + cfg.Taxonomies[i].Template = "hub.html" + } + if cfg.Taxonomies[i].IndexTemplate == "" { + cfg.Taxonomies[i].IndexTemplate = "taxonomy_index.html" + } + if cfg.Taxonomies[i].LetterTemplate == "" { + cfg.Taxonomies[i].LetterTemplate = "letter.html" + } + } + + // Default sitemap priorities + if cfg.Sitemap.Priorities == nil { + cfg.Sitemap.Priorities = map[string]string{ + "homepage": "1.0", + "entity": "0.8", + "taxonomy_index": "0.7", + "hub_page_1": "0.6", + "hub_page_n": "0.4", + "letter_page": "0.5", + } + } + if cfg.Sitemap.ChangeFreqs == nil { + cfg.Sitemap.ChangeFreqs = map[string]string{ + "homepage": "daily", + "entity": "weekly", + "taxonomy_index": "weekly", + "hub": "weekly", + "letter_page": "weekly", + } + } + + // Default templates + if cfg.Templates.Entity == "" { + cfg.Templates.Entity = "recipe.html" + } + if cfg.Templates.Homepage == "" { + cfg.Templates.Homepage = "index.html" + } + if cfg.Templates.Hub == "" { + cfg.Templates.Hub = "hub.html" + } + if cfg.Templates.TaxonomyIndex == "" { + cfg.Templates.TaxonomyIndex = "taxonomy_index.html" + } + if cfg.Templates.Letter == "" { + cfg.Templates.Letter = "letter.html" + } +} + +func validate(cfg *Config) error { + if cfg.Site.Name == "" { + return fmt.Errorf("site.name is required") + } + if cfg.Site.BaseURL == "" { + return fmt.Errorf("site.base_url is required") + } + if cfg.Paths.Data == "" { + return fmt.Errorf("paths.data is required") + } + return nil +} + +func resolvePaths(cfg *Config) { + resolve := func(p string) string { + if filepath.IsAbs(p) { + return p + } + return filepath.Join(cfg.ConfigDir, p) + } + + cfg.Paths.Data = resolve(cfg.Paths.Data) + cfg.Paths.Templates = resolve(cfg.Paths.Templates) + cfg.Paths.Output = resolve(cfg.Paths.Output) + cfg.Paths.Cache = resolve(cfg.Paths.Cache) + if cfg.Paths.Static != "" { + cfg.Paths.Static = resolve(cfg.Paths.Static) + } + if cfg.Enrichment.CacheDir != "" { + cfg.Enrichment.CacheDir = resolve(cfg.Enrichment.CacheDir) + } + if cfg.Extra.Favorites != "" { + cfg.Extra.Favorites = resolve(cfg.Extra.Favorites) + } + if cfg.Extra.Contributors != "" { + cfg.Extra.Contributors = resolve(cfg.Extra.Contributors) + } +} diff --git a/internal/archdocs/pssg/config/types.go b/internal/archdocs/pssg/config/types.go new file mode 100644 index 0000000..e20a980 --- /dev/null +++ b/internal/archdocs/pssg/config/types.go @@ -0,0 +1,177 @@ +package config + +// Config is the top-level pssg configuration loaded from YAML. +type Config struct { + Site SiteConfig `yaml:"site"` + Paths PathsConfig `yaml:"paths"` + Data DataConfig `yaml:"data"` + Taxonomies []TaxonomyConfig `yaml:"taxonomies"` + Pagination PaginationConfig `yaml:"pagination"` + Schema SchemaConfig `yaml:"structured_data"` + Affiliates AffiliatesConfig `yaml:"affiliates"` + Enrichment EnrichmentConfig `yaml:"enrichment"` + Sitemap SitemapConfig `yaml:"sitemap"` + RSS RSSConfig `yaml:"rss"` + Robots RobotsConfig `yaml:"robots"` + LlmsTxt LlmsTxtConfig `yaml:"llms_txt"` + Templates TemplatesConfig `yaml:"templates"` + Output OutputConfig `yaml:"output"` + Extra ExtraConfig `yaml:"extra"` + Search SearchConfig `yaml:"search"` + + // ConfigDir is the directory containing the config file (set at load time). + ConfigDir string `yaml:"-"` +} + +type SiteConfig struct { + Name string `yaml:"name"` + BaseURL string `yaml:"base_url"` + RepoURL string `yaml:"repo_url"` + Description string `yaml:"description"` + Language string `yaml:"language"` + Version string `yaml:"version"` + Author string `yaml:"author"` + AuthorURL string `yaml:"author_url"` + License string `yaml:"license"` + CNAME string `yaml:"cname"` +} + +type PathsConfig struct { + Data string `yaml:"data"` + Templates string `yaml:"templates"` + Output string `yaml:"output"` + Cache string `yaml:"cache"` + Static string `yaml:"static"` + SourceDir string `yaml:"source_dir"` +} + +type DataConfig struct { + Format string `yaml:"format"` + EntityType string `yaml:"entity_type"` + EntitySlug EntitySlug `yaml:"entity_slug"` + BodySections []BodySection `yaml:"body_sections"` +} + +type EntitySlug struct { + Source string `yaml:"source"` // "filename" or "field:" +} + +type BodySection struct { + Name string `yaml:"name"` + Header string `yaml:"header"` + Type string `yaml:"type"` // "unordered_list", "ordered_list", "faq", "markdown" +} + +type TaxonomyConfig struct { + Name string `yaml:"name"` + Label string `yaml:"label"` + LabelSingular string `yaml:"label_singular"` + Field string `yaml:"field"` + MultiValue bool `yaml:"multi_value"` + MinEntities int `yaml:"min_entities"` + LetterPageThreshold int `yaml:"letter_page_threshold"` + Invert bool `yaml:"invert"` + EnrichmentOverrideField string `yaml:"enrichment_override_field"` + Template string `yaml:"template"` + IndexTemplate string `yaml:"index_template"` + LetterTemplate string `yaml:"letter_template"` + + // Description templates (Go template strings evaluated with .Name, .Count, .Start, .End) + HubTitle string `yaml:"hub_title"` + HubMetaDescription string `yaml:"hub_meta_description"` + HubSubheading string `yaml:"hub_subheading"` + IndexDescription string `yaml:"index_description"` + CollectionDesc string `yaml:"collection_description"` +} + +type PaginationConfig struct { + EntitiesPerPage int `yaml:"entities_per_page"` +} + +type SchemaConfig struct { + EntityType string `yaml:"entity_type"` + FieldMappings map[string]string `yaml:"field_mappings"` + ExtraKeywords []string `yaml:"extra_keywords"` + DatePublished string `yaml:"date_published"` + HomepageSchema []string `yaml:"homepage_schemas"` + EntitySchema []string `yaml:"entity_schemas"` + HubSchema []string `yaml:"hub_schemas"` + IndexSchema []string `yaml:"index_schemas"` +} + +type AffiliatesConfig struct { + Providers []AffiliateProviderConfig `yaml:"providers"` + SearchTermPaths []string `yaml:"search_term_paths"` +} + +type AffiliateProviderConfig struct { + Name string `yaml:"name"` + URLTemplate string `yaml:"url_template"` + EnvVar string `yaml:"env_var"` + AlwaysInclude bool `yaml:"always_include"` +} + +type EnrichmentConfig struct { + CacheDir string `yaml:"cache_dir"` + IngredientOverrideField string `yaml:"ingredient_override_field"` +} + +type SitemapConfig struct { + MaxURLsPerFile int `yaml:"max_urls_per_file"` + Priorities map[string]string `yaml:"priorities"` + ChangeFreqs map[string]string `yaml:"change_freqs"` +} + +type RSSConfig struct { + Enabled bool `yaml:"enabled"` + MainFeed string `yaml:"main_feed"` + CategoryFeeds bool `yaml:"category_feeds"` + CategoryTaxonomy string `yaml:"category_taxonomy"` +} + +type RobotsConfig struct { + AllowAll bool `yaml:"allow_all"` + ExtraBots []string `yaml:"extra_bots"` +} + +type LlmsTxtConfig struct { + Enabled bool `yaml:"enabled"` + Tagline string `yaml:"tagline"` + Taxonomies []string `yaml:"taxonomies"` +} + +type TemplatesConfig struct { + Entity string `yaml:"entity"` + Homepage string `yaml:"homepage"` + Hub string `yaml:"hub"` + TaxonomyIndex string `yaml:"taxonomy_index"` + Letter string `yaml:"letter"` + StaticPages map[string]string `yaml:"static_pages"` +} + +type OutputConfig struct { + CleanBuild bool `yaml:"clean_build"` + Minify bool `yaml:"minify"` + ExtractCSS string `yaml:"extract_css"` + ExtractJS string `yaml:"extract_js"` + ShareImages bool `yaml:"share_images"` +} + +type ExtraConfig struct { + Favorites string `yaml:"favorites"` + Contributors string `yaml:"contributors"` + CTA CTAConfig `yaml:"cta"` +} + +type CTAConfig struct { + Enabled bool `yaml:"enabled"` + Heading string `yaml:"heading"` + Description string `yaml:"description"` + ButtonText string `yaml:"button_text"` + ButtonURL string `yaml:"button_url"` +} + +type SearchConfig struct { + Enabled bool `yaml:"enabled"` + Fields []string `yaml:"fields"` // entity fields to index, default: ["title","description","node_type","language","domain","subdomain","tags"] +} diff --git a/internal/archdocs/pssg/enrichment/cache.go b/internal/archdocs/pssg/enrichment/cache.go new file mode 100644 index 0000000..c492926 --- /dev/null +++ b/internal/archdocs/pssg/enrichment/cache.go @@ -0,0 +1,132 @@ +package enrichment + +import ( + "encoding/json" + "fmt" + "os" + "path/filepath" +) + +// CacheEntry represents the cached enrichment data for a single entity. +type CacheEntry struct { + ContentHash string `json:"contentHash"` + Enrichment map[string]interface{} `json:"enrichment"` + Timestamp string `json:"timestamp"` +} + +// ReadCache reads a single enrichment cache file for the given slug. +// Returns nil if the cache file doesn't exist or is invalid. +func ReadCache(cacheDir, slug string) map[string]interface{} { + filePath := filepath.Join(cacheDir, slug+".json") + data, err := os.ReadFile(filePath) + if err != nil { + return nil + } + + var entry CacheEntry + if err := json.Unmarshal(data, &entry); err != nil { + return nil + } + + return entry.Enrichment +} + +// ReadAllCaches reads all cache files from the cache directory. +func ReadAllCaches(cacheDir string) (map[string]map[string]interface{}, error) { + result := make(map[string]map[string]interface{}) + + if cacheDir == "" { + return result, nil + } + + entries, err := os.ReadDir(cacheDir) + if err != nil { + if os.IsNotExist(err) { + return result, nil + } + return nil, fmt.Errorf("reading cache dir %s: %w", cacheDir, err) + } + + for _, entry := range entries { + if entry.IsDir() || filepath.Ext(entry.Name()) != ".json" { + continue + } + slug := entry.Name()[:len(entry.Name())-5] // strip .json + data := ReadCache(cacheDir, slug) + if data != nil { + result[slug] = data + } + } + + return result, nil +} + +// GetIngredients extracts ingredient search terms from enrichment data. +func GetIngredients(data map[string]interface{}) []map[string]interface{} { + v, ok := data["ingredients"] + if !ok { + return nil + } + items, ok := v.([]interface{}) + if !ok { + return nil + } + var result []map[string]interface{} + for _, item := range items { + if m, ok := item.(map[string]interface{}); ok { + result = append(result, m) + } + } + return result +} + +// GetGear extracts gear items from enrichment data. +func GetGear(data map[string]interface{}) []map[string]interface{} { + v, ok := data["gear"] + if !ok { + return nil + } + items, ok := v.([]interface{}) + if !ok { + return nil + } + var result []map[string]interface{} + for _, item := range items { + if m, ok := item.(map[string]interface{}); ok { + result = append(result, m) + } + } + return result +} + +// GetCookingTips extracts cooking tips from enrichment data. +func GetCookingTips(data map[string]interface{}) []string { + v, ok := data["cookingTips"] + if !ok { + return nil + } + items, ok := v.([]interface{}) + if !ok { + return nil + } + var result []string + for _, item := range items { + if s, ok := item.(string); ok { + result = append(result, s) + } + } + return result +} + +// GetCoachingPrompt extracts the coaching prompt from enrichment data. +func GetCoachingPrompt(data map[string]interface{}) string { + v, ok := data["coachingPrompt"] + if !ok { + return "" + } + s, ok := v.(string) + if !ok { + return "" + } + return s +} diff --git a/internal/archdocs/pssg/entity/entity.go b/internal/archdocs/pssg/entity/entity.go new file mode 100644 index 0000000..4e19897 --- /dev/null +++ b/internal/archdocs/pssg/entity/entity.go @@ -0,0 +1,153 @@ +package entity + +// Entity is a generic content item with map-based fields and parsed body sections. +type Entity struct { + Slug string + SourceFile string + Fields map[string]interface{} + Sections map[string]interface{} // section name -> content ([]string for lists, []FAQ for faqs, string for markdown) + Body string // raw markdown body (minus frontmatter) +} + +// FAQ represents a question-answer pair extracted from a body section. +type FAQ struct { + Question string + Answer string +} + +// GetString returns a string field value, or empty string if not found/not a string. +func (e *Entity) GetString(key string) string { + v, ok := e.Fields[key] + if !ok { + return "" + } + s, ok := v.(string) + if !ok { + return "" + } + return s +} + +// GetStringSlice returns a []string field value, or nil if not found/wrong type. +func (e *Entity) GetStringSlice(key string) []string { + v, ok := e.Fields[key] + if !ok { + return nil + } + switch val := v.(type) { + case []string: + return val + case []interface{}: + result := make([]string, 0, len(val)) + for _, item := range val { + if s, ok := item.(string); ok { + result = append(result, s) + } + } + return result + } + return nil +} + +// GetInt returns an int field value, or 0 if not found/wrong type. +func (e *Entity) GetInt(key string) int { + v, ok := e.Fields[key] + if !ok { + return 0 + } + switch val := v.(type) { + case int: + return val + case int64: + return int(val) + case float64: + return int(val) + } + return 0 +} + +// GetFloat returns a float64 field value, or 0 if not found/wrong type. +func (e *Entity) GetFloat(key string) float64 { + v, ok := e.Fields[key] + if !ok { + return 0 + } + switch val := v.(type) { + case float64: + return val + case int: + return float64(val) + case int64: + return float64(val) + } + return 0 +} + +// GetBool returns a bool field value, or false if not found/wrong type. +func (e *Entity) GetBool(key string) bool { + v, ok := e.Fields[key] + if !ok { + return false + } + b, ok := v.(bool) + if !ok { + return false + } + return b +} + +// GetIngredients returns the ingredients section as []string. +func (e *Entity) GetIngredients() []string { + v, ok := e.Sections["ingredients"] + if !ok { + return nil + } + if s, ok := v.([]string); ok { + return s + } + return nil +} + +// GetInstructions returns the instructions section as []string. +func (e *Entity) GetInstructions() []string { + v, ok := e.Sections["instructions"] + if !ok { + return nil + } + if s, ok := v.([]string); ok { + return s + } + return nil +} + +// GetFAQs returns the FAQs section as []FAQ. +func (e *Entity) GetFAQs() []FAQ { + v, ok := e.Sections["faqs"] + if !ok { + return nil + } + if f, ok := v.([]FAQ); ok { + return f + } + return nil +} + +// HasField checks if a field exists and is non-empty. +func (e *Entity) HasField(key string) bool { + v, ok := e.Fields[key] + if !ok { + return false + } + switch val := v.(type) { + case string: + return val != "" + case []interface{}: + return len(val) > 0 + case []string: + return len(val) > 0 + case nil: + return false + default: + return true + } +} diff --git a/internal/archdocs/pssg/entity/slug.go b/internal/archdocs/pssg/entity/slug.go new file mode 100644 index 0000000..3e53568 --- /dev/null +++ b/internal/archdocs/pssg/entity/slug.go @@ -0,0 +1,17 @@ +package entity + +import ( + "regexp" + "strings" +) + +var nonAlnum = regexp.MustCompile(`[^a-z0-9]+`) + +// ToSlug converts a string to a URL-safe slug. +// Lowercase, replace non-alphanumeric with hyphens, trim leading/trailing hyphens. +func ToSlug(s string) string { + s = strings.ToLower(s) + s = nonAlnum.ReplaceAllString(s, "-") + s = strings.Trim(s, "-") + return s +} diff --git a/internal/archdocs/pssg/loader/loader.go b/internal/archdocs/pssg/loader/loader.go new file mode 100644 index 0000000..e503a90 --- /dev/null +++ b/internal/archdocs/pssg/loader/loader.go @@ -0,0 +1,21 @@ +package loader + +import ( + "github.com/supermodeltools/cli/internal/archdocs/pssg/config" + "github.com/supermodeltools/cli/internal/archdocs/pssg/entity" +) + +// Loader is the interface for loading entities from data files. +type Loader interface { + Load() ([]*entity.Entity, error) +} + +// New creates a loader based on the config data format. +func New(cfg *config.Config) Loader { + switch cfg.Data.Format { + case "markdown": + return &MarkdownLoader{Config: cfg} + default: + return &MarkdownLoader{Config: cfg} + } +} diff --git a/internal/archdocs/pssg/loader/markdown.go b/internal/archdocs/pssg/loader/markdown.go new file mode 100644 index 0000000..ae4f4a3 --- /dev/null +++ b/internal/archdocs/pssg/loader/markdown.go @@ -0,0 +1,230 @@ +package loader + +import ( + "fmt" + "os" + "path/filepath" + "strings" + + "gopkg.in/yaml.v3" + + "github.com/supermodeltools/cli/internal/archdocs/pssg/config" + "github.com/supermodeltools/cli/internal/archdocs/pssg/entity" +) + +// MarkdownLoader loads entities from markdown files with YAML frontmatter. +type MarkdownLoader struct { + Config *config.Config +} + +// Load reads all .md files from the data directory and parses them into entities. +func (l *MarkdownLoader) Load() ([]*entity.Entity, error) { + dataDir := l.Config.Paths.Data + entries, err := os.ReadDir(dataDir) + if err != nil { + return nil, fmt.Errorf("reading data dir %s: %w", dataDir, err) + } + + var entities []*entity.Entity + for _, entry := range entries { + if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".md") { + continue + } + + path := filepath.Join(dataDir, entry.Name()) + e, err := l.parseFile(path) + if err != nil { + fmt.Fprintf(os.Stderr, "Warning: skipping %s: %v\n", entry.Name(), err) + continue + } + entities = append(entities, e) + } + + return entities, nil +} + +func (l *MarkdownLoader) parseFile(path string) (*entity.Entity, error) { + data, err := os.ReadFile(path) + if err != nil { + return nil, err + } + + content := string(data) + + // Split frontmatter from body + frontmatter, body, err := splitFrontmatter(content) + if err != nil { + return nil, fmt.Errorf("splitting frontmatter: %w", err) + } + + // Parse YAML frontmatter + var fields map[string]interface{} + if err := yaml.Unmarshal([]byte(frontmatter), &fields); err != nil { + return nil, fmt.Errorf("parsing frontmatter YAML: %w", err) + } + + // Derive slug + slug := l.deriveSlug(path, fields) + + // Parse body sections + sections := l.parseSections(body) + + return &entity.Entity{ + Slug: slug, + SourceFile: path, + Fields: fields, + Sections: sections, + Body: body, + }, nil +} + +// splitFrontmatter separates YAML frontmatter (between --- delimiters) from the body. +func splitFrontmatter(content string) (string, string, error) { + content = strings.TrimSpace(content) + if !strings.HasPrefix(content, "---") { + return "", content, nil + } + + // Find closing --- + rest := content[3:] + idx := strings.Index(rest, "\n---") + if idx < 0 { + return "", content, fmt.Errorf("no closing --- found for frontmatter") + } + + fm := strings.TrimSpace(rest[:idx]) + body := strings.TrimSpace(rest[idx+4:]) + return fm, body, nil +} + +func (l *MarkdownLoader) deriveSlug(path string, fields map[string]interface{}) string { + source := l.Config.Data.EntitySlug.Source + if strings.HasPrefix(source, "field:") { + fieldName := source[6:] + if v, ok := fields[fieldName]; ok { + if s, ok := v.(string); ok { + return entity.ToSlug(s) + } + } + } + // Default: derive from filename + base := filepath.Base(path) + return strings.TrimSuffix(base, filepath.Ext(base)) +} + +func (l *MarkdownLoader) parseSections(body string) map[string]interface{} { + sections := make(map[string]interface{}) + + for _, sectionCfg := range l.Config.Data.BodySections { + content := extractSection(body, sectionCfg.Header) + if content == "" { + continue + } + + switch sectionCfg.Type { + case "unordered_list": + sections[sectionCfg.Name] = parseUnorderedList(content) + case "ordered_list": + sections[sectionCfg.Name] = parseOrderedList(content) + case "faq": + sections[sectionCfg.Name] = parseFAQs(content) + case "markdown": + sections[sectionCfg.Name] = content + default: + sections[sectionCfg.Name] = content + } + } + + return sections +} + +// extractSection extracts the content under a ## heading until the next ## heading. +func extractSection(body, header string) string { + marker := "## " + header + idx := strings.Index(body, marker) + if idx < 0 { + return "" + } + + // Start after the heading line + start := idx + len(marker) + // Find newline after heading + nlIdx := strings.Index(body[start:], "\n") + if nlIdx < 0 { + return "" + } + start += nlIdx + 1 + + // Find next ## heading or end of body + rest := body[start:] + nextH2 := strings.Index(rest, "\n## ") + if nextH2 >= 0 { + rest = rest[:nextH2] + } + + return strings.TrimSpace(rest) +} + +// parseUnorderedList extracts items from a markdown unordered list. +func parseUnorderedList(content string) []string { + var items []string + for _, line := range strings.Split(content, "\n") { + line = strings.TrimSpace(line) + if strings.HasPrefix(line, "- ") { + items = append(items, strings.TrimPrefix(line, "- ")) + } else if strings.HasPrefix(line, "* ") { + items = append(items, strings.TrimPrefix(line, "* ")) + } + } + return items +} + +// parseOrderedList extracts items from a markdown ordered list. +func parseOrderedList(content string) []string { + var items []string + for _, line := range strings.Split(content, "\n") { + line = strings.TrimSpace(line) + // Match "1. ", "2. ", etc. + if len(line) >= 3 { + dotIdx := strings.Index(line, ". ") + if dotIdx > 0 && dotIdx <= 4 { + // Verify prefix is all digits + prefix := line[:dotIdx] + allDigits := true + for _, c := range prefix { + if c < '0' || c > '9' { + allDigits = false + break + } + } + if allDigits { + items = append(items, line[dotIdx+2:]) + } + } + } + } + return items +} + +// parseFAQs extracts FAQ pairs from ### headings and their following paragraphs. +func parseFAQs(content string) []entity.FAQ { + var faqs []entity.FAQ + + parts := strings.Split("\n"+content, "\n### ") + for _, part := range parts[1:] { // skip first empty split + lines := strings.SplitN(part, "\n", 2) + question := strings.TrimSpace(lines[0]) + answer := "" + if len(lines) > 1 { + answer = strings.TrimSpace(lines[1]) + } + if question != "" { + faqs = append(faqs, entity.FAQ{ + Question: question, + Answer: answer, + }) + } + } + + return faqs +} diff --git a/internal/archdocs/pssg/output/llmstxt.go b/internal/archdocs/pssg/output/llmstxt.go new file mode 100644 index 0000000..31f795f --- /dev/null +++ b/internal/archdocs/pssg/output/llmstxt.go @@ -0,0 +1,65 @@ +package output + +import ( + "fmt" + "sort" + "strings" + + "github.com/supermodeltools/cli/internal/archdocs/pssg/config" + "github.com/supermodeltools/cli/internal/archdocs/pssg/entity" + "github.com/supermodeltools/cli/internal/archdocs/pssg/taxonomy" +) + +// GenerateLlmsTxt generates an llms.txt file in the llmstxt.org format. +func GenerateLlmsTxt(cfg *config.Config, entities []*entity.Entity, taxonomies []taxonomy.Taxonomy) string { + var lines []string + + // Header + lines = append(lines, fmt.Sprintf("# %s", cfg.Site.Name)) + lines = append(lines, "") + + // Tagline + if cfg.LlmsTxt.Tagline != "" { + lines = append(lines, fmt.Sprintf("> %s", cfg.LlmsTxt.Tagline)) + lines = append(lines, "") + } + + // Entities section + entityLabel := cfg.Data.EntityType + if entityLabel == "" { + entityLabel = "Items" + } + lines = append(lines, fmt.Sprintf("## %ss", strings.Title(entityLabel))) + + // Sort entities by title + sorted := make([]*entity.Entity, len(entities)) + copy(sorted, entities) + sort.Slice(sorted, func(i, j int) bool { + return sorted[i].GetString("title") < sorted[j].GetString("title") + }) + + for _, e := range sorted { + title := e.GetString("title") + desc := e.GetString("description") + url := fmt.Sprintf("%s/%s.html", cfg.Site.BaseURL, e.Slug) + lines = append(lines, fmt.Sprintf("- [%s](%s): %s", title, url, desc)) + } + lines = append(lines, "") + + // Taxonomy sections + for _, taxName := range cfg.LlmsTxt.Taxonomies { + for _, tax := range taxonomies { + if tax.Name == taxName { + lines = append(lines, fmt.Sprintf("## %s", tax.Label)) + for _, entry := range tax.Entries { + url := fmt.Sprintf("%s/%s/%s.html", cfg.Site.BaseURL, tax.Name, entry.Slug) + lines = append(lines, fmt.Sprintf("- [%s](%s)", entry.Name, url)) + } + lines = append(lines, "") + break + } + } + } + + return strings.Join(lines, "\n") +} diff --git a/internal/archdocs/pssg/output/manifest.go b/internal/archdocs/pssg/output/manifest.go new file mode 100644 index 0000000..9246127 --- /dev/null +++ b/internal/archdocs/pssg/output/manifest.go @@ -0,0 +1,26 @@ +package output + +import ( + "encoding/json" + + "github.com/supermodeltools/cli/internal/archdocs/pssg/config" +) + +// GenerateManifest generates a PWA manifest.json. +func GenerateManifest(cfg *config.Config) string { + manifest := map[string]interface{}{ + "name": cfg.Site.Name, + "short_name": cfg.Site.Name, + "description": cfg.Site.Description, + "start_url": "/", + "display": "standalone", + "background_color": "#FAFAF7", + "theme_color": "#5B7B5E", + } + + data, err := json.MarshalIndent(manifest, "", " ") + if err != nil { + return "{}" + } + return string(data) +} diff --git a/internal/archdocs/pssg/output/robots.go b/internal/archdocs/pssg/output/robots.go new file mode 100644 index 0000000..eb7506d --- /dev/null +++ b/internal/archdocs/pssg/output/robots.go @@ -0,0 +1,40 @@ +package output + +import ( + "fmt" + "strings" + + "github.com/supermodeltools/cli/internal/archdocs/pssg/config" +) + +// GenerateRobotsTxt generates a robots.txt file. +func GenerateRobotsTxt(cfg *config.Config) string { + var lines []string + + lines = append(lines, "User-agent: *") + if cfg.Robots.AllowAll { + lines = append(lines, "Allow: /") + } + lines = append(lines, "") + + // Standard bots + standardBots := []string{"Googlebot", "Bingbot"} + for _, bot := range standardBots { + lines = append(lines, fmt.Sprintf("User-agent: %s", bot)) + lines = append(lines, "Allow: /") + lines = append(lines, "") + } + + // Extra bots (AI crawlers etc.) + for _, bot := range cfg.Robots.ExtraBots { + lines = append(lines, fmt.Sprintf("User-agent: %s", bot)) + lines = append(lines, "Allow: /") + lines = append(lines, "") + } + + // Sitemap + sitemapURL := fmt.Sprintf("%s/sitemap.xml", cfg.Site.BaseURL) + lines = append(lines, fmt.Sprintf("Sitemap: %s", sitemapURL)) + + return strings.Join(lines, "\n") + "\n" +} diff --git a/internal/archdocs/pssg/output/rss.go b/internal/archdocs/pssg/output/rss.go new file mode 100644 index 0000000..362a66e --- /dev/null +++ b/internal/archdocs/pssg/output/rss.go @@ -0,0 +1,139 @@ +package output + +import ( + "encoding/xml" + "fmt" + "strings" + "time" + + "github.com/supermodeltools/cli/internal/archdocs/pssg/config" + "github.com/supermodeltools/cli/internal/archdocs/pssg/entity" +) + +type rssDoc struct { + XMLName xml.Name `xml:"rss"` + Version string `xml:"version,attr"` + Channel rssChannel `xml:"channel"` +} + +type rssChannel struct { + Title string `xml:"title"` + Link string `xml:"link"` + Description string `xml:"description"` + Language string `xml:"language"` + LastBuildDate string `xml:"lastBuildDate"` + Items []rssItem `xml:"item"` +} + +type rssItem struct { + Title string `xml:"title"` + Link string `xml:"link"` + Description string `xml:"description"` + Category string `xml:"category,omitempty"` + GUID string `xml:"guid"` + PubDate string `xml:"pubDate"` +} + +// RSSFeed represents a generated RSS feed file. +type RSSFeed struct { + RelativePath string + Content string +} + +// GenerateRSSFeeds generates the main RSS feed and optionally per-category feeds. +func GenerateRSSFeeds(entities []*entity.Entity, cfg *config.Config, taxonomyEntries map[string][]*entity.Entity) []RSSFeed { + if !cfg.RSS.Enabled { + return nil + } + + buildDate := time.Now().UTC().Format(time.RFC1123Z) + var feeds []RSSFeed + + // Main feed + mainPath := cfg.RSS.MainFeed + if mainPath == "" { + mainPath = "feed.xml" + } + mainContent := generateFeed( + cfg.Site.Name, + cfg.Site.BaseURL, + cfg.Site.Description, + cfg.Site.Language, + buildDate, + entities, + cfg.Site.BaseURL, + ) + feeds = append(feeds, RSSFeed{ + RelativePath: mainPath, + Content: mainContent, + }) + + // Per-category feeds + if cfg.RSS.CategoryFeeds && taxonomyEntries != nil { + for slug, catEntities := range taxonomyEntries { + title := fmt.Sprintf("%s — %s", cfg.Site.Name, slug) + catContent := generateFeed( + title, + fmt.Sprintf("%s/%s/%s.html", cfg.Site.BaseURL, cfg.RSS.CategoryTaxonomy, slug), + fmt.Sprintf("%s recipes", slug), + cfg.Site.Language, + buildDate, + catEntities, + cfg.Site.BaseURL, + ) + feeds = append(feeds, RSSFeed{ + RelativePath: fmt.Sprintf("%s/%s/feed.xml", cfg.RSS.CategoryTaxonomy, slug), + Content: catContent, + }) + } + } + + return feeds +} + +func generateFeed(title, link, description, language, buildDate string, entities []*entity.Entity, baseURL string) string { + channel := rssChannel{ + Title: xmlEscape(title), + Link: link, + Description: xmlEscape(description), + Language: language, + LastBuildDate: buildDate, + } + + for _, e := range entities { + itemTitle := e.GetString("title") + itemDesc := e.GetString("description") + category := e.GetString("recipe_category") + + item := rssItem{ + Title: xmlEscape(itemTitle), + Link: fmt.Sprintf("%s/%s.html", baseURL, e.Slug), + Description: xmlEscape(itemDesc), + GUID: fmt.Sprintf("%s/%s.html", baseURL, e.Slug), + PubDate: buildDate, + } + if category != "" { + item.Category = xmlEscape(category) + } + channel.Items = append(channel.Items, item) + } + + doc := rssDoc{ + Version: "2.0", + Channel: channel, + } + + data, err := xml.MarshalIndent(doc, "", " ") + if err != nil { + return "" + } + return xml.Header + string(data) +} + +func xmlEscape(s string) string { + s = strings.ReplaceAll(s, "&", "&") + s = strings.ReplaceAll(s, "<", "<") + s = strings.ReplaceAll(s, ">", ">") + s = strings.ReplaceAll(s, "\"", """) + return s +} diff --git a/internal/archdocs/pssg/output/sitemap.go b/internal/archdocs/pssg/output/sitemap.go new file mode 100644 index 0000000..b30df8f --- /dev/null +++ b/internal/archdocs/pssg/output/sitemap.go @@ -0,0 +1,148 @@ +package output + +import ( + "encoding/xml" + "fmt" + "strings" +) + +// SitemapEntry represents a single URL in the sitemap. +type SitemapEntry struct { + Loc string + Lastmod string + Priority string + ChangeFreq string +} + +type urlSet struct { + XMLName xml.Name `xml:"urlset"` + XMLNS string `xml:"xmlns,attr"` + URLs []urlEntry `xml:"url"` +} + +type urlEntry struct { + Loc string `xml:"loc"` + Lastmod string `xml:"lastmod,omitempty"` + Priority string `xml:"priority,omitempty"` + ChangeFreq string `xml:"changefreq,omitempty"` +} + +type sitemapIndex struct { + XMLName xml.Name `xml:"sitemapindex"` + XMLNS string `xml:"xmlns,attr"` + Sitemaps []sitemapEntry `xml:"sitemap"` +} + +type sitemapEntry struct { + Loc string `xml:"loc"` + Lastmod string `xml:"lastmod,omitempty"` +} + +// GenerateSitemapFiles generates sitemap XML files, splitting at maxPerFile URLs. +func GenerateSitemapFiles(entries []SitemapEntry, baseURL string, maxPerFile int) []SitemapFile { + if maxPerFile <= 0 { + maxPerFile = 50000 + } + + if len(entries) <= maxPerFile { + // Single sitemap + content := generateSitemap(entries) + return []SitemapFile{ + {Filename: "sitemap.xml", Content: content}, + } + } + + // Multiple sitemaps with index + var files []SitemapFile + var indexEntries []sitemapEntry + lastmod := "" + if len(entries) > 0 { + lastmod = entries[0].Lastmod + } + + chunks := chunkEntries(entries, maxPerFile) + for i, chunk := range chunks { + filename := fmt.Sprintf("sitemap-%d.xml", i+1) + content := generateSitemap(chunk) + files = append(files, SitemapFile{ + Filename: filename, + Content: content, + }) + indexEntries = append(indexEntries, sitemapEntry{ + Loc: fmt.Sprintf("%s/%s", baseURL, filename), + Lastmod: lastmod, + }) + } + + // Generate index + indexContent := generateSitemapIndex(indexEntries) + files = append([]SitemapFile{{Filename: "sitemap.xml", Content: indexContent}}, files...) + + return files +} + +// SitemapFile is a filename + content pair. +type SitemapFile struct { + Filename string + Content string +} + +func generateSitemap(entries []SitemapEntry) string { + us := urlSet{ + XMLNS: "http://www.sitemaps.org/schemas/sitemap/0.9", + } + for _, e := range entries { + us.URLs = append(us.URLs, urlEntry{ + Loc: e.Loc, + Lastmod: e.Lastmod, + Priority: e.Priority, + ChangeFreq: e.ChangeFreq, + }) + } + + data, err := xml.MarshalIndent(us, "", " ") + if err != nil { + return "" + } + return xml.Header + string(data) +} + +func generateSitemapIndex(entries []sitemapEntry) string { + si := sitemapIndex{ + XMLNS: "http://www.sitemaps.org/schemas/sitemap/0.9", + Sitemaps: entries, + } + + data, err := xml.MarshalIndent(si, "", " ") + if err != nil { + return "" + } + return xml.Header + string(data) +} + +func chunkEntries(entries []SitemapEntry, size int) [][]SitemapEntry { + var chunks [][]SitemapEntry + for i := 0; i < len(entries); i += size { + end := i + size + if end > len(entries) { + end = len(entries) + } + chunks = append(chunks, entries[i:end]) + } + return chunks +} + +// NewSitemapEntry creates a sitemap entry with the given base URL. +func NewSitemapEntry(baseURL, path, lastmod, priority, changefreq string) SitemapEntry { + loc := baseURL + path + loc = strings.TrimRight(loc, "/") + if path == "/" { + loc = baseURL + "/" + } + return SitemapEntry{ + Loc: loc, + Lastmod: lastmod, + Priority: priority, + ChangeFreq: changefreq, + } +} diff --git a/internal/archdocs/pssg/render/funcs.go b/internal/archdocs/pssg/render/funcs.go new file mode 100644 index 0000000..6029636 --- /dev/null +++ b/internal/archdocs/pssg/render/funcs.go @@ -0,0 +1,616 @@ +package render + +import ( + "encoding/json" + "fmt" + "html/template" + "math" + "net/url" + "reflect" + "regexp" + "strconv" + "strings" + + "github.com/supermodeltools/cli/internal/archdocs/pssg/entity" +) + +// BuildFuncMap creates the template FuncMap with all helper functions. +func BuildFuncMap() template.FuncMap { + return template.FuncMap{ + // String functions + "slug": entity.ToSlug, + "lower": strings.ToLower, + "upper": strings.ToUpper, + "title": strings.Title, + "join": strings.Join, + "split": strings.Split, + "replace": strings.ReplaceAll, + "contains": strings.Contains, + "hasPrefix": strings.HasPrefix, + "hasSuffix": strings.HasSuffix, + "trimSpace": strings.TrimSpace, + "urlencode": url.QueryEscape, + + // Number functions + "formatNumber": formatNumber, + "add": func(a, b int) int { return a + b }, + "sub": func(a, b int) int { return a - b }, + "mul": func(a, b int) int { return a * b }, + "div": func(a, b int) int { + if b == 0 { + return 0 + } + return a / b + }, + "mod": func(a, b int) int { + if b == 0 { + return 0 + } + return a % b + }, + "addf": func(a, b float64) float64 { return a + b }, + "mulf": func(a, b float64) float64 { return a * b }, + + // Duration functions + "durationMinutes": durationMinutes, + "totalTime": totalTime, + "formatDuration": formatDuration, + + // Collection functions + "first": first, + "last": last, + "seq": seq, + "dict": dict, + "slice": sliceHelper, + "len": length, + "sort": sortStrings, + "reverse": reverseStrings, + "min": minInt, + "max": maxInt, + + // Entity functions + "field": fieldAccess, + "section": sectionAccess, + "getStringSlice": getStringSlice, + "hasField": hasField, + "getInt": getInt, + "getFloat": getFloat, + + // JSON/HTML functions + "jsonMarshal": jsonMarshal, + "safeHTML": func(s string) template.HTML { return template.HTML(s) }, + "safeJS": func(s string) template.JS { return template.JS(s) }, + "safeCSS": func(s string) template.CSS { return template.CSS(s) }, + "safeURL": func(s string) template.URL { return template.URL(s) }, + "safeAttr": func(s string) template.HTMLAttr { return template.HTMLAttr(s) }, + + // Ingredient parsing + "parseIngredientQty": parseIngredientQty, + "parseIngredientUnit": parseIngredientUnit, + "parseIngredientDesc": parseIngredientDesc, + "fractionDisplay": fractionDisplay, + "scaleQty": scaleQty, + + // Conditionals + "default": defaultVal, + "ternary": ternary, + "hasKey": hasKey, + + // Comparison + "eq": func(a, b interface{}) bool { return fmt.Sprintf("%v", a) == fmt.Sprintf("%v", b) }, + "ne": func(a, b interface{}) bool { return fmt.Sprintf("%v", a) != fmt.Sprintf("%v", b) }, + "lt": func(a, b int) bool { return a < b }, + "le": func(a, b int) bool { return a <= b }, + "gt": func(a, b int) bool { return a > b }, + "ge": func(a, b int) bool { return a >= b }, + + // Misc + "toJSON": toJSON, + "noescape": func(s string) template.HTML { return template.HTML(s) }, + } +} + +// formatNumber adds thousands separators to a number. +func formatNumber(n interface{}) string { + var num int64 + switch v := n.(type) { + case int: + num = int64(v) + case int64: + num = v + case float64: + num = int64(v) + default: + return fmt.Sprintf("%v", n) + } + + s := strconv.FormatInt(num, 10) + if len(s) <= 3 { + return s + } + + var result []byte + for i, c := range s { + if i > 0 && (len(s)-i)%3 == 0 { + result = append(result, ',') + } + result = append(result, byte(c)) + } + return string(result) +} + +var isoDuration = regexp.MustCompile(`PT(?:(\d+)H)?(?:(\d+)M)?(?:(\d+)S)?`) + +// durationMinutes converts an ISO 8601 duration to minutes. +func durationMinutes(d string) int { + matches := isoDuration.FindStringSubmatch(d) + if matches == nil { + return 0 + } + hours, _ := strconv.Atoi(matches[1]) + minutes, _ := strconv.Atoi(matches[2]) + return hours*60 + minutes +} + +// totalTime adds two ISO 8601 durations. +func totalTime(d1, d2 string) string { + total := durationMinutes(d1) + durationMinutes(d2) + hours := total / 60 + minutes := total % 60 + if hours > 0 && minutes > 0 { + return fmt.Sprintf("PT%dH%dM", hours, minutes) + } + if hours > 0 { + return fmt.Sprintf("PT%dH", hours) + } + return fmt.Sprintf("PT%dM", minutes) +} + +// formatDuration converts an ISO 8601 duration to human-readable form. +func formatDuration(d string) string { + minutes := durationMinutes(d) + if minutes == 0 { + return d + } + hours := minutes / 60 + mins := minutes % 60 + if hours > 0 && mins > 0 { + return fmt.Sprintf("%d hr %d min", hours, mins) + } + if hours > 0 { + if hours == 1 { + return "1 hr" + } + return fmt.Sprintf("%d hrs", hours) + } + return fmt.Sprintf("%d min", mins) +} + +func first(list interface{}) interface{} { + switch v := list.(type) { + case []string: + if len(v) > 0 { + return v[0] + } + case []interface{}: + if len(v) > 0 { + return v[0] + } + case []*entity.Entity: + if len(v) > 0 { + return v[0] + } + } + return nil +} + +func last(list interface{}) interface{} { + switch v := list.(type) { + case []string: + if len(v) > 0 { + return v[len(v)-1] + } + case []interface{}: + if len(v) > 0 { + return v[len(v)-1] + } + } + return nil +} + +// seq generates a sequence of integers from 1 to n. +func seq(n int) []int { + result := make([]int, n) + for i := range result { + result[i] = i + 1 + } + return result +} + +// dict creates a map from alternating key/value pairs. +func dict(values ...interface{}) map[string]interface{} { + result := make(map[string]interface{}) + for i := 0; i+1 < len(values); i += 2 { + key := fmt.Sprintf("%v", values[i]) + result[key] = values[i+1] + } + return result +} + +func sliceHelper(items interface{}, start, end int) interface{} { + switch v := items.(type) { + case []string: + if start < 0 { + start = 0 + } + if end > len(v) { + end = len(v) + } + return v[start:end] + case []*entity.Entity: + if start < 0 { + start = 0 + } + if end > len(v) { + end = len(v) + } + return v[start:end] + case []interface{}: + if start < 0 { + start = 0 + } + if end > len(v) { + end = len(v) + } + return v[start:end] + } + return items +} + +func length(v interface{}) int { + if v == nil { + return 0 + } + rv := reflect.ValueOf(v) + switch rv.Kind() { + case reflect.Slice, reflect.Map, reflect.Array, reflect.String: + return rv.Len() + } + return 0 +} + +func sortStrings(s []string) []string { + result := make([]string, len(s)) + copy(result, s) + return result +} + +func reverseStrings(s []string) []string { + result := make([]string, len(s)) + for i, v := range s { + result[len(s)-1-i] = v + } + return result +} + +func minInt(a, b int) int { + if a < b { + return a + } + return b +} + +func maxInt(a, b int) int { + if a > b { + return a + } + return b +} + +func fieldAccess(e *entity.Entity, key string) interface{} { + if e == nil { + return nil + } + return e.Fields[key] +} + +func sectionAccess(e *entity.Entity, key string) interface{} { + if e == nil { + return nil + } + return e.Sections[key] +} + +func getStringSlice(e *entity.Entity, key string) []string { + if e == nil { + return nil + } + return e.GetStringSlice(key) +} + +func hasField(e *entity.Entity, key string) bool { + if e == nil { + return false + } + return e.HasField(key) +} + +func getInt(e *entity.Entity, key string) int { + if e == nil { + return 0 + } + return e.GetInt(key) +} + +func getFloat(e *entity.Entity, key string) float64 { + if e == nil { + return 0 + } + return e.GetFloat(key) +} + +func jsonMarshal(v interface{}) template.JS { + data, err := json.Marshal(v) + if err != nil { + return template.JS("{}") + } + return template.JS(data) +} + +func toJSON(v interface{}) string { + data, err := json.Marshal(v) + if err != nil { + return "{}" + } + return string(data) +} + +func defaultVal(def, val interface{}) interface{} { + if val == nil { + return def + } + if s, ok := val.(string); ok && s == "" { + return def + } + return val +} + +func ternary(cond bool, trueVal, falseVal interface{}) interface{} { + if cond { + return trueVal + } + return falseVal +} + +func hasKey(m map[string]interface{}, key string) bool { + _, ok := m[key] + return ok +} + +// Ingredient parsing functions + +// Unicode fraction map +var unicodeFractions = map[rune]float64{ + '\u00BD': 0.5, + '\u2153': 1.0 / 3.0, + '\u2154': 2.0 / 3.0, + '\u00BC': 0.25, + '\u00BE': 0.75, + '\u215B': 0.125, + '\u215C': 0.375, + '\u215D': 0.625, + '\u215E': 0.875, + '\u2155': 0.2, + '\u2156': 0.4, + '\u2157': 0.6, + '\u2158': 0.8, + '\u2159': 1.0 / 6.0, + '\u215A': 5.0 / 6.0, +} + +var unitAliases = map[string]string{ + "cups": "cup", "c": "cup", + "tablespoons": "tablespoon", "tablespoon": "tablespoon", "tbsp": "tablespoon", "tbsps": "tablespoon", "tbs": "tablespoon", + "teaspoons": "teaspoon", "teaspoon": "teaspoon", "tsp": "teaspoon", "tsps": "teaspoon", + "pounds": "pound", "pound": "pound", "lbs": "pound", "lb": "pound", + "ounces": "ounce", "ounce": "ounce", "oz": "ounce", + "grams": "gram", "gram": "gram", "g": "gram", + "kilograms": "kilogram", "kilogram": "kilogram", "kg": "kilogram", "kgs": "kilogram", + "milliliters": "ml", "milliliter": "ml", "ml": "ml", "mls": "ml", + "liters": "liter", "liter": "liter", "l": "liter", + "pints": "pint", "pint": "pint", "pt": "pint", + "quarts": "quart", "quart": "quart", "qt": "quart", "qts": "quart", + "gallons": "gallon", "gallon": "gallon", "gal": "gallon", + "bunches": "bunch", "bunch": "bunch", + "cloves": "clove", "clove": "clove", + "heads": "head", "head": "head", + "cans": "can", "can": "can", + "packages": "package", "package": "package", "pkg": "package", + "slices": "slice", "slice": "slice", + "pieces": "piece", "piece": "piece", + "sticks": "stick", "stick": "stick", + "pinches": "pinch", "pinch": "pinch", + "dashes": "dash", "dash": "dash", + "sprigs": "sprig", "sprig": "sprig", + "stalks": "stalk", "stalk": "stalk", +} + +var qtyRegex = regexp.MustCompile(`^(\d+)\s+(\d+)/(\d+)`) +var fractionRegex = regexp.MustCompile(`^(\d+)/(\d+)`) +var numberRegex = regexp.MustCompile(`^(\d+(?:\.\d+)?)`) + +// parseQuantity extracts a numeric quantity from the beginning of a string. +// Returns the quantity and the remaining string. +func parseQuantity(s string) (float64, string) { + s = strings.TrimSpace(s) + if s == "" { + return 0, "" + } + + // Check for unicode fractions at the beginning + for _, r := range s { + if v, ok := unicodeFractions[r]; ok { + rest := strings.TrimSpace(s[len(string(r)):]) + return v, rest + } + break + } + + // Check for mixed number with unicode fraction: "1 ½" + if m := numberRegex.FindString(s); m != "" { + rest := strings.TrimSpace(s[len(m):]) + if len(rest) > 0 { + for _, r := range rest { + if v, ok := unicodeFractions[r]; ok { + whole, _ := strconv.ParseFloat(m, 64) + newRest := strings.TrimSpace(rest[len(string(r)):]) + return whole + v, newRest + } + break + } + } + } + + // Check for mixed number: "1 1/2" + if matches := qtyRegex.FindStringSubmatch(s); matches != nil { + whole, _ := strconv.ParseFloat(matches[1], 64) + num, _ := strconv.ParseFloat(matches[2], 64) + den, _ := strconv.ParseFloat(matches[3], 64) + if den != 0 { + rest := strings.TrimSpace(s[len(matches[0]):]) + return whole + num/den, rest + } + } + + // Check for simple fraction: "1/2" + if matches := fractionRegex.FindStringSubmatch(s); matches != nil { + num, _ := strconv.ParseFloat(matches[1], 64) + den, _ := strconv.ParseFloat(matches[2], 64) + if den != 0 { + rest := strings.TrimSpace(s[len(matches[0]):]) + return num / den, rest + } + } + + // Check for decimal or integer + if m := numberRegex.FindString(s); m != "" { + v, _ := strconv.ParseFloat(m, 64) + rest := strings.TrimSpace(s[len(m):]) + return v, rest + } + + return 0, s +} + +// parseUnit extracts and normalizes a unit from the beginning of a string. +func parseUnit(s string) (string, string) { + s = strings.TrimSpace(s) + if s == "" { + return "", "" + } + + // Try to match a unit word + words := strings.Fields(s) + if len(words) == 0 { + return "", s + } + + // Check with parenthetical: "(14 ounce)" prefix + if strings.HasPrefix(s, "(") { + endParen := strings.Index(s, ")") + if endParen > 0 { + // Keep the parenthetical as part of the description + return "", s + } + } + + word := strings.ToLower(strings.TrimRight(words[0], ".,;")) + if canonical, ok := unitAliases[word]; ok { + rest := strings.TrimSpace(s[len(words[0]):]) + return canonical, rest + } + + return "", s +} + +// parseIngredientQty returns the numeric quantity from an ingredient line. +func parseIngredientQty(line string) float64 { + qty, _ := parseQuantity(line) + return qty +} + +// parseIngredientUnit returns the canonical unit from an ingredient line. +func parseIngredientUnit(line string) string { + _, rest := parseQuantity(line) + unit, _ := parseUnit(rest) + return unit +} + +// parseIngredientDesc returns the description (everything after qty + unit). +func parseIngredientDesc(line string) string { + _, rest := parseQuantity(line) + _, desc := parseUnit(rest) + return desc +} + +// fractionDisplay converts a decimal to a display string with fraction symbols. +func fractionDisplay(f float64) string { + if f == 0 { + return "0" + } + + whole := int(f) + frac := f - float64(whole) + + // Snap to common fractions + fracStr := "" + if math.Abs(frac) < 0.05 { + fracStr = "" + } else if math.Abs(frac-0.125) < 0.05 { + fracStr = "\u215B" + } else if math.Abs(frac-0.2) < 0.05 { + fracStr = "\u2155" + } else if math.Abs(frac-0.25) < 0.05 { + fracStr = "\u00BC" + } else if math.Abs(frac-1.0/3.0) < 0.05 { + fracStr = "\u2153" + } else if math.Abs(frac-0.375) < 0.05 { + fracStr = "\u215C" + } else if math.Abs(frac-0.5) < 0.05 { + fracStr = "\u00BD" + } else if math.Abs(frac-0.625) < 0.05 { + fracStr = "\u215D" + } else if math.Abs(frac-2.0/3.0) < 0.05 { + fracStr = "\u2154" + } else if math.Abs(frac-0.75) < 0.05 { + fracStr = "\u00BE" + } else if math.Abs(frac-0.875) < 0.05 { + fracStr = "\u215E" + } else { + // No matching fraction, show decimal + if whole > 0 { + return fmt.Sprintf("%.1f", f) + } + return fmt.Sprintf("%.2f", f) + } + + if whole > 0 && fracStr != "" { + return fmt.Sprintf("%d %s", whole, fracStr) + } + if whole > 0 { + return fmt.Sprintf("%d", whole) + } + if fracStr != "" { + return fracStr + } + return fmt.Sprintf("%.1f", f) +} + +// scaleQty scales a quantity by ratio and formats with fractions. +func scaleQty(baseQty float64, baseServings, newServings int) string { + if baseServings == 0 { + return fractionDisplay(baseQty) + } + scaled := baseQty * float64(newServings) / float64(baseServings) + return fractionDisplay(scaled) +} diff --git a/internal/archdocs/pssg/render/render.go b/internal/archdocs/pssg/render/render.go new file mode 100644 index 0000000..c79a2ed --- /dev/null +++ b/internal/archdocs/pssg/render/render.go @@ -0,0 +1,326 @@ +package render + +import ( + "bytes" + "fmt" + "html/template" + "os" + "path/filepath" + "strings" + + "github.com/supermodeltools/cli/internal/archdocs/pssg/affiliate" + "github.com/supermodeltools/cli/internal/archdocs/pssg/config" + "github.com/supermodeltools/cli/internal/archdocs/pssg/entity" + "github.com/supermodeltools/cli/internal/archdocs/pssg/taxonomy" +) + +// Engine is the template rendering engine. +type Engine struct { + tmpl *template.Template + cfg *config.Config +} + +// EntityPageContext is the template context for entity (recipe) pages. +type EntityPageContext struct { + Site config.SiteConfig + Entity *entity.Entity + Slug string + URL string + CanonicalURL string + Breadcrumbs []Breadcrumb + Pairings []*entity.Entity + Enrichment map[string]interface{} + AffiliateLinks []affiliate.Link + CookModePrompt string + JsonLD template.HTML + Taxonomies []taxonomy.Taxonomy + AllTaxonomies []taxonomy.Taxonomy + ValidSlugs map[string]map[string]bool + Contributors map[string]interface{} + OG OGMeta + ChartData template.JS + CTA config.CTAConfig + SourceCode string + SourceLang string +} + +// HomepageContext is the template context for the homepage. +type HomepageContext struct { + Site config.SiteConfig + Entities []*entity.Entity + Taxonomies []taxonomy.Taxonomy + Favorites []*entity.Entity + JsonLD template.HTML + EntityCount int + Contributors map[string]interface{} + OG OGMeta + ChartData template.JS + CTA config.CTAConfig + ArchData template.JS +} + +// HubPageContext is the template context for taxonomy hub (category) pages. +type HubPageContext struct { + Site config.SiteConfig + Taxonomy taxonomy.Taxonomy + Entry taxonomy.Entry + Entities []*entity.Entity + Pagination taxonomy.PaginationInfo + JsonLD template.HTML + Breadcrumbs []Breadcrumb + AllTaxonomies []taxonomy.Taxonomy + Contributors map[string]interface{} + ContributorProfile map[string]interface{} + OG OGMeta + ChartData template.JS + CTA config.CTAConfig +} + +// TaxonomyIndexContext is the template context for taxonomy index pages. +type TaxonomyIndexContext struct { + Site config.SiteConfig + Taxonomy taxonomy.Taxonomy + Entries []taxonomy.Entry + TopEntries []taxonomy.Entry + LetterGroups []taxonomy.LetterGroup + HasLetters bool + Letters []string + JsonLD template.HTML + Breadcrumbs []Breadcrumb + AllTaxonomies []taxonomy.Taxonomy + OG OGMeta + ChartData template.JS + CTA config.CTAConfig +} + +// LetterPageContext is the template context for A-Z letter pages. +type LetterPageContext struct { + Site config.SiteConfig + Taxonomy taxonomy.Taxonomy + Letter string + Entries []taxonomy.Entry + Letters []string + JsonLD template.HTML + Breadcrumbs []Breadcrumb + AllTaxonomies []taxonomy.Taxonomy + OG OGMeta + ChartData template.JS + CTA config.CTAConfig +} + +// AllEntitiesPageContext is the template context for the all-entities listing pages. +type AllEntitiesPageContext struct { + Site config.SiteConfig + Entities []*entity.Entity + Pagination taxonomy.PaginationInfo + JsonLD template.HTML + Breadcrumbs []Breadcrumb + AllTaxonomies []taxonomy.Taxonomy + EntityCount int + TotalEntities int + OG OGMeta + ChartData template.JS + CTA config.CTAConfig +} + +// StaticPageContext is the template context for static pages. +type StaticPageContext struct { + Site config.SiteConfig + Title string + Content template.HTML + JsonLD template.HTML + Breadcrumbs []Breadcrumb + AllTaxonomies []taxonomy.Taxonomy +} + +// Breadcrumb is a single breadcrumb entry. +type Breadcrumb struct { + Name string + URL string +} + +// OGMeta holds Open Graph and Twitter Card metadata for a page. +type OGMeta struct { + Title string + Description string + URL string + ImageURL string + Type string // "website" for homepage, "article" for all others + SiteName string +} + +// NameCount is a generic name+count pair used for chart data and share images. +type NameCount struct { + Name string `json:"name"` + Count int `json:"count"` +} + +// NewEngine creates a render engine loading templates from the given directory. +func NewEngine(cfg *config.Config) (*Engine, error) { + funcMap := BuildFuncMap() + + tmplDir := cfg.Paths.Templates + entries, err := os.ReadDir(tmplDir) + if err != nil { + return nil, fmt.Errorf("reading template dir %s: %w", tmplDir, err) + } + + tmpl := template.New("").Funcs(funcMap) + + for _, entry := range entries { + if entry.IsDir() { + continue + } + name := entry.Name() + ext := filepath.Ext(name) + if ext != ".html" && ext != ".css" && ext != ".js" { + continue + } + + path := filepath.Join(tmplDir, name) + data, err := os.ReadFile(path) + if err != nil { + return nil, fmt.Errorf("reading template %s: %w", name, err) + } + + _, err = tmpl.New(name).Parse(string(data)) + if err != nil { + return nil, fmt.Errorf("parsing template %s: %w", name, err) + } + } + + return &Engine{tmpl: tmpl, cfg: cfg}, nil +} + +// RenderEntity renders an entity page. +func (e *Engine) RenderEntity(ctx EntityPageContext) (string, error) { + return e.render(e.cfg.Templates.Entity, ctx) +} + +// RenderHomepage renders the homepage. +func (e *Engine) RenderHomepage(ctx HomepageContext) (string, error) { + return e.render(e.cfg.Templates.Homepage, ctx) +} + +// RenderHub renders a taxonomy hub page. +func (e *Engine) RenderHub(ctx HubPageContext) (string, error) { + templateName := ctx.Taxonomy.Config.Template + return e.render(templateName, ctx) +} + +// RenderTaxonomyIndex renders a taxonomy index page. +func (e *Engine) RenderTaxonomyIndex(ctx TaxonomyIndexContext) (string, error) { + templateName := ctx.Taxonomy.Config.IndexTemplate + return e.render(templateName, ctx) +} + +// RenderLetter renders a letter page. +func (e *Engine) RenderLetter(ctx LetterPageContext) (string, error) { + templateName := ctx.Taxonomy.Config.LetterTemplate + return e.render(templateName, ctx) +} + +// RenderAllEntities renders an all-entities listing page. +func (e *Engine) RenderAllEntities(ctx AllEntitiesPageContext) (string, error) { + return e.render("all_entities.html", ctx) +} + +// RenderStatic renders a static page. +func (e *Engine) RenderStatic(templateName string, ctx StaticPageContext) (string, error) { + return e.render(templateName, ctx) +} + +func (e *Engine) render(name string, data interface{}) (string, error) { + t := e.tmpl.Lookup(name) + if t == nil { + return "", fmt.Errorf("template %q not found", name) + } + + var buf bytes.Buffer + if err := t.Execute(&buf, data); err != nil { + return "", fmt.Errorf("executing template %q: %w", name, err) + } + + return buf.String(), nil +} + +// RenderCSS reads and returns the CSS template content. +// Uses Tree.Root.String() to avoid html/template escaping in CSS code. +func (e *Engine) RenderCSS() (string, error) { + t := e.tmpl.Lookup("_styles.css") + if t == nil { + return "", nil + } + return t.Tree.Root.String(), nil +} + +// RenderJS reads and returns the JS template content. +// Uses Tree.Root.String() to avoid html/template escaping < to < in JS code. +func (e *Engine) RenderJS() (string, error) { + t := e.tmpl.Lookup("_main.js") + if t == nil { + return "", nil + } + return t.Tree.Root.String(), nil +} + +// GenerateCookModePrompt builds a cook-with-AI prompt for a recipe. +func GenerateCookModePrompt(e *entity.Entity, enrichment map[string]interface{}, affiliateLinks []affiliate.Link) string { + if enrichment == nil { + return "" + } + + var parts []string + + title := e.GetString("title") + parts = append(parts, fmt.Sprintf("I want to cook: %s", title)) + + // Coaching prompt + if cp, ok := enrichment["coachingPrompt"].(string); ok && cp != "" { + parts = append(parts, cp) + } + + // Ingredients + if ingredients := e.GetIngredients(); len(ingredients) > 0 { + items := make([]string, len(ingredients)) + for i, ing := range ingredients { + items[i] = "- " + ing + } + parts = append(parts, "Ingredients:\n"+strings.Join(items, "\n")) + } + + // Instructions + if instructions := e.GetInstructions(); len(instructions) > 0 { + items := make([]string, len(instructions)) + for i, inst := range instructions { + items[i] = fmt.Sprintf("%d. %s", i+1, inst) + } + parts = append(parts, "Instructions:\n"+strings.Join(items, "\n")) + } + + // Cooking tips + if tips, ok := enrichment["cookingTips"].([]interface{}); ok && len(tips) > 0 { + items := make([]string, 0, len(tips)) + for _, tip := range tips { + if s, ok := tip.(string); ok { + items = append(items, "- "+s) + } + } + if len(items) > 0 { + parts = append(parts, "Key Tips:\n"+strings.Join(items, "\n")) + } + } + + // Shopping links + if len(affiliateLinks) > 0 { + items := make([]string, 0, len(affiliateLinks)) + for _, link := range affiliateLinks { + items = append(items, fmt.Sprintf("- %s: %s (%s)", link.Term, link.URL, link.Provider)) + } + parts = append(parts, "Shopping Links:\n"+strings.Join(items, "\n")) + } + + parts = append(parts, "Please guide me through this recipe step by step, including timing, technique details, and what to watch for at each stage.") + + return strings.Join(parts, "\n\n") +} diff --git a/internal/archdocs/pssg/render/shareimage.go b/internal/archdocs/pssg/render/shareimage.go new file mode 100644 index 0000000..c3ef918 --- /dev/null +++ b/internal/archdocs/pssg/render/shareimage.go @@ -0,0 +1,243 @@ +package render + +import ( + "fmt" + "strings" +) + +// Share image constants +const ( + svgWidth = 1200 + svgHeight = 630 + svgBG = "#0f1117" + svgText = "#e4e4e7" + svgMuted = "#71717a" + svgAccent = "#5B7B5E" + svgAccent2 = "#C4956A" +) + +// svgEscape escapes text for safe embedding in SVG. +func svgEscape(s string) string { + s = strings.ReplaceAll(s, "&", "&") + s = strings.ReplaceAll(s, "<", "<") + s = strings.ReplaceAll(s, ">", ">") + s = strings.ReplaceAll(s, "\"", """) + return s +} + +// truncate limits string length with ellipsis. +func truncate(s string, max int) string { + if len(s) <= max { + return s + } + return s[:max-1] + "\u2026" +} + +// svgScaffold wraps content in the standard share image scaffold. +func svgScaffold(siteName, pageTitle, content string) string { + return fmt.Sprintf(` + + %s + %s + %s + + + + + + + +`, + svgWidth, svgHeight, svgWidth, svgHeight, + svgWidth, svgHeight, svgBG, + svgMuted, svgEscape(siteName), + svgText, svgEscape(truncate(pageTitle, 60)), + content, + svgHeight-8, svgWidth, + svgAccent, svgAccent2, + ) +} + +// renderBarsSVG renders horizontal bars as SVG elements. +func renderBarsSVG(bars []NameCount, x, y, maxW, barH, gap int) string { + if len(bars) == 0 { + return "" + } + maxVal := bars[0].Count + for _, b := range bars { + if b.Count > maxVal { + maxVal = b.Count + } + } + if maxVal == 0 { + maxVal = 1 + } + + var sb strings.Builder + colors := []string{"#5B7B5E", "#C4956A", "#4A7B9B", "#7C5BB0", "#A68B2D", "#B94A4A", "#6B6B6B", "#3d8b6e"} + for i, b := range bars { + w := (b.Count * maxW) / maxVal + if w < 4 { + w = 4 + } + cy := y + i*(barH+gap) + color := colors[i%len(colors)] + sb.WriteString(fmt.Sprintf(` `, x, cy, w, barH, color)) + sb.WriteString("\n") + sb.WriteString(fmt.Sprintf(` %s`, x, cy-4, svgText, svgEscape(truncate(b.Name, 30)))) + sb.WriteString("\n") + sb.WriteString(fmt.Sprintf(` %d`, x+w+8, cy+barH-4, svgMuted, b.Count)) + sb.WriteString("\n") + } + return sb.String() +} + +// GenerateHomepageShareSVG generates the homepage share image SVG. +func GenerateHomepageShareSVG(siteName, description string, taxStats []NameCount, totalEntities int) string { + var content strings.Builder + content.WriteString(fmt.Sprintf(` %s`, svgMuted, svgEscape(truncate(description, 80)))) + content.WriteString("\n") + content.WriteString(fmt.Sprintf(` %d total recipes`, svgAccent, totalEntities)) + content.WriteString("\n") + + // Show taxonomy bars (max 8) + limit := len(taxStats) + if limit > 8 { + limit = 8 + } + bars := taxStats[:limit] + content.WriteString(renderBarsSVG(bars, 60, 250, 900, 28, 14)) + + return svgScaffold(siteName, siteName+" \u2014 Recipe Collection", content.String()) +} + +// GenerateEntityShareSVG generates the entity share image SVG. +func GenerateEntityShareSVG(siteName, title, category, cuisine, skillLevel string) string { + var content strings.Builder + + // Pills for metadata + pillX := 60 + pillY := 170 + pills := []struct{ label, color string }{ + {category, svgAccent}, + {cuisine, svgAccent2}, + {skillLevel, "#4A7B9B"}, + } + for _, p := range pills { + if p.label == "" { + continue + } + w := len(p.label)*10 + 24 + content.WriteString(fmt.Sprintf(` `, pillX, pillY, w, p.color)) + content.WriteString("\n") + content.WriteString(fmt.Sprintf(` %s`, pillX+12, pillY+21, p.color, svgEscape(p.label))) + content.WriteString("\n") + pillX += w + 12 + } + + // Large decorative title + content.WriteString(fmt.Sprintf(` %s`, svgText, svgEscape(truncate(title, 40)))) + content.WriteString("\n") + + return svgScaffold(siteName, truncate(title, 55), content.String()) +} + +// GenerateHubShareSVG generates the hub page share image SVG. +func GenerateHubShareSVG(siteName, entryName, taxLabel string, count int, topTypes []NameCount) string { + var content strings.Builder + content.WriteString(fmt.Sprintf(` %s · %d recipes`, svgMuted, svgEscape(taxLabel), count)) + content.WriteString("\n") + + limit := len(topTypes) + if limit > 6 { + limit = 6 + } + bars := topTypes[:limit] + content.WriteString(renderBarsSVG(bars, 60, 220, 900, 32, 16)) + + return svgScaffold(siteName, entryName, content.String()) +} + +// GenerateTaxIndexShareSVG generates the taxonomy index share image SVG. +func GenerateTaxIndexShareSVG(siteName, taxLabel string, topEntries []NameCount) string { + var content strings.Builder + content.WriteString(fmt.Sprintf(` Browse all %s`, svgMuted, svgEscape(taxLabel))) + content.WriteString("\n") + + limit := len(topEntries) + if limit > 10 { + limit = 10 + } + bars := topEntries[:limit] + content.WriteString(renderBarsSVG(bars, 60, 210, 900, 26, 12)) + + return svgScaffold(siteName, taxLabel, content.String()) +} + +// GenerateAllEntitiesShareSVG generates the all-entities share image SVG. +func GenerateAllEntitiesShareSVG(siteName string, totalCount int, typeDist []NameCount) string { + var content strings.Builder + content.WriteString(fmt.Sprintf(` %d recipes`, svgAccent, totalCount)) + content.WriteString("\n") + + // Proportional bar segments + if len(typeDist) > 0 { + totalForBar := 0 + for _, t := range typeDist { + totalForBar += t.Count + } + if totalForBar == 0 { + totalForBar = 1 + } + colors := []string{"#5B7B5E", "#C4956A", "#4A7B9B", "#7C5BB0", "#A68B2D", "#B94A4A", "#6B6B6B", "#3d8b6e"} + barX := 60 + barY := 200 + barW := 1080 + barH := 40 + cx := barX + limit := len(typeDist) + if limit > 8 { + limit = 8 + } + for i := 0; i < limit; i++ { + w := (typeDist[i].Count * barW) / totalForBar + if w < 2 { + w = 2 + } + color := colors[i%len(colors)] + content.WriteString(fmt.Sprintf(` `, cx, barY, w, barH, color)) + content.WriteString("\n") + cx += w + } + + // Legend + ly := 280 + for i := 0; i < limit; i++ { + lx := 60 + (i%4)*270 + if i > 0 && i%4 == 0 { + ly += 30 + } + color := colors[i%len(colors)] + content.WriteString(fmt.Sprintf(` `, lx, ly, color)) + content.WriteString(fmt.Sprintf(` %s (%d)`, lx+18, ly+11, svgMuted, svgEscape(truncate(typeDist[i].Name, 25)), typeDist[i].Count)) + content.WriteString("\n") + } + } + + return svgScaffold(siteName, "All Recipes", content.String()) +} + +// GenerateLetterShareSVG generates the letter page share image SVG. +func GenerateLetterShareSVG(siteName, taxLabel, letter string, entryCount int) string { + var content strings.Builder + content.WriteString(fmt.Sprintf(` %s · %d entries`, svgMuted, svgEscape(taxLabel), entryCount)) + content.WriteString("\n") + + // Large decorative letter + content.WriteString(fmt.Sprintf(` %s`, svgText, svgEscape(letter))) + content.WriteString("\n") + content.WriteString(fmt.Sprintf(` %s`, svgAccent, svgEscape(letter))) + content.WriteString("\n") + + return svgScaffold(siteName, fmt.Sprintf("%s \u2014 %s", taxLabel, letter), content.String()) +} diff --git a/internal/archdocs/pssg/schema/jsonld.go b/internal/archdocs/pssg/schema/jsonld.go new file mode 100644 index 0000000..a8be807 --- /dev/null +++ b/internal/archdocs/pssg/schema/jsonld.go @@ -0,0 +1,325 @@ +package schema + +import ( + "encoding/json" + "fmt" + "regexp" + "strconv" + "strings" + + "github.com/supermodeltools/cli/internal/archdocs/pssg/config" + "github.com/supermodeltools/cli/internal/archdocs/pssg/entity" +) + +// Generator creates JSON-LD structured data. +type Generator struct { + SiteConfig config.SiteConfig + Schema config.SchemaConfig +} + +// NewGenerator creates a new JSON-LD generator. +func NewGenerator(siteCfg config.SiteConfig, schemaCfg config.SchemaConfig) *Generator { + return &Generator{ + SiteConfig: siteCfg, + Schema: schemaCfg, + } +} + +// GenerateRecipeSchema generates Recipe JSON-LD for an entity. +func (g *Generator) GenerateRecipeSchema(e *entity.Entity, entityURL string) map[string]interface{} { + schema := map[string]interface{}{ + "@context": "https://schema.org", + "@type": "Recipe", + "name": e.GetString("title"), + "description": e.GetString("description"), + "url": entityURL, + } + + // Author + authorName := e.GetString("author") + if authorName != "" { + authorSlug := entity.ToSlug(authorName) + schema["author"] = map[string]interface{}{ + "@type": "Person", + "name": authorName, + "url": fmt.Sprintf("%s/author/%s.html", g.SiteConfig.BaseURL, authorSlug), + } + } + + // Date published + schema["datePublished"] = g.Schema.DatePublished + + // Times + prepTime := e.GetString("prep_time") + cookTime := e.GetString("cook_time") + if prepTime != "" { + schema["prepTime"] = prepTime + } + if cookTime != "" { + schema["cookTime"] = cookTime + } + if prepTime != "" && cookTime != "" { + schema["totalTime"] = computeTotalTime(prepTime, cookTime) + } + + // Servings + if servings := e.GetInt("servings"); servings > 0 { + schema["recipeYield"] = fmt.Sprintf("%d servings", servings) + } + + // Category & cuisine + if cat := e.GetString("recipe_category"); cat != "" { + schema["recipeCategory"] = cat + } + if cuisine := e.GetString("cuisine"); cuisine != "" { + schema["recipeCuisine"] = cuisine + } + + // Image + if img := e.GetString("image"); img != "" { + schema["image"] = []string{img} + } + + // Nutrition + if cal := e.GetInt("calories"); cal > 0 { + schema["nutrition"] = map[string]interface{}{ + "@type": "NutritionInformation", + "calories": fmt.Sprintf("%d calories", cal), + } + } + + // Ingredients + if ingredients := e.GetIngredients(); len(ingredients) > 0 { + schema["recipeIngredient"] = ingredients + } + + // Instructions as HowToSteps + if instructions := e.GetInstructions(); len(instructions) > 0 { + var steps []map[string]interface{} + for i, inst := range instructions { + steps = append(steps, map[string]interface{}{ + "@type": "HowToStep", + "text": inst, + "name": stepName(inst), + "position": i + 1, + }) + } + schema["recipeInstructions"] = steps + } + + // Keywords + keywords := e.GetStringSlice("keywords") + extra := g.Schema.ExtraKeywords + allKeywords := append(keywords, extra...) + if len(allKeywords) > 0 { + schema["keywords"] = strings.Join(allKeywords, ", ") + } + + // Pairings as isRelatedTo + if pairings := e.GetStringSlice("pairings"); len(pairings) > 0 { + var related []map[string]interface{} + for _, slug := range pairings { + related = append(related, map[string]interface{}{ + "@type": "Recipe", + "name": slug, // Will be resolved to title by the builder + "url": fmt.Sprintf("%s/%s.html", g.SiteConfig.BaseURL, slug), + }) + } + schema["isRelatedTo"] = related + } + + return schema +} + +// GenerateBreadcrumbSchema generates BreadcrumbList JSON-LD. +func (g *Generator) GenerateBreadcrumbSchema(items []BreadcrumbItem) map[string]interface{} { + var listItems []map[string]interface{} + for i, item := range items { + li := map[string]interface{}{ + "@type": "ListItem", + "position": i + 1, + "name": item.Name, + } + if item.URL != "" { + li["item"] = item.URL + } + listItems = append(listItems, li) + } + + return map[string]interface{}{ + "@context": "https://schema.org", + "@type": "BreadcrumbList", + "itemListElement": listItems, + } +} + +// BreadcrumbItem is a single breadcrumb entry. +type BreadcrumbItem struct { + Name string + URL string +} + +// GenerateFAQSchema generates FAQPage JSON-LD from FAQs. +func (g *Generator) GenerateFAQSchema(faqs []entity.FAQ) map[string]interface{} { + if len(faqs) == 0 { + return nil + } + + var mainEntity []map[string]interface{} + for _, faq := range faqs { + mainEntity = append(mainEntity, map[string]interface{}{ + "@type": "Question", + "name": faq.Question, + "acceptedAnswer": map[string]interface{}{ + "@type": "Answer", + "text": faq.Answer, + }, + }) + } + + return map[string]interface{}{ + "@context": "https://schema.org", + "@type": "FAQPage", + "mainEntity": mainEntity, + } +} + +// GenerateWebSiteSchema generates WebSite JSON-LD. +func (g *Generator) GenerateWebSiteSchema(imageURL string) map[string]interface{} { + s := map[string]interface{}{ + "@context": "https://schema.org", + "@type": "WebSite", + "name": g.SiteConfig.Name, + "url": g.SiteConfig.BaseURL, + "description": g.SiteConfig.Description, + "publisher": map[string]interface{}{ + "@type": "Organization", + "name": g.SiteConfig.Name, + "url": g.SiteConfig.BaseURL, + }, + } + if imageURL != "" { + s["image"] = imageURL + } + return s +} + +// GenerateItemListSchema generates ItemList JSON-LD. +func (g *Generator) GenerateItemListSchema(name, description string, items []ItemListEntry, imageURL string) map[string]interface{} { + var listItems []map[string]interface{} + for i, item := range items { + listItems = append(listItems, map[string]interface{}{ + "@type": "ListItem", + "position": i + 1, + "url": item.URL, + "name": item.Name, + }) + } + + s := map[string]interface{}{ + "@context": "https://schema.org", + "@type": "ItemList", + "name": name, + "description": description, + "numberOfItems": len(items), + "itemListElement": listItems, + } + if imageURL != "" { + s["image"] = imageURL + } + return s +} + +// ItemListEntry is a single item in an ItemList. +type ItemListEntry struct { + Name string + URL string +} + +// GenerateCollectionPageSchema generates CollectionPage JSON-LD. +func (g *Generator) GenerateCollectionPageSchema(name, description, pageURL string, items []ItemListEntry, imageURL string) map[string]interface{} { + var listItems []map[string]interface{} + for i, item := range items { + listItems = append(listItems, map[string]interface{}{ + "@type": "ListItem", + "position": i + 1, + "url": item.URL, + "name": item.Name, + }) + } + + s := map[string]interface{}{ + "@context": "https://schema.org", + "@type": "CollectionPage", + "name": name, + "url": pageURL, + "description": description, + "mainEntity": map[string]interface{}{ + "@type": "ItemList", + "numberOfItems": len(items), + "itemListElement": listItems, + }, + } + if imageURL != "" { + s["image"] = imageURL + } + return s +} + +// MarshalSchemas encodes one or more schemas as a JSON-LD script block. +func MarshalSchemas(schemas ...map[string]interface{}) string { + var parts []string + for _, s := range schemas { + if s == nil { + continue + } + data, err := json.Marshal(s) + if err != nil { + continue + } + parts = append(parts, fmt.Sprintf(``, string(data))) + } + return strings.Join(parts, "\n") +} + +// stepName extracts a short name from an instruction step. +func stepName(step string) string { + // Take first sentence + for _, sep := range []string{". ", ".\n"} { + if idx := strings.Index(step, sep); idx > 0 && idx < 80 { + return step[:idx+1] + } + } + // Truncate if too long + if len(step) > 80 { + return step[:77] + "..." + } + return step +} + +var durationRegex = regexp.MustCompile(`PT(?:(\d+)H)?(?:(\d+)M)?(?:(\d+)S)?`) + +// parseDurationMinutes parses an ISO 8601 duration to minutes. +func parseDurationMinutes(d string) int { + matches := durationRegex.FindStringSubmatch(d) + if matches == nil { + return 0 + } + hours, _ := strconv.Atoi(matches[1]) + minutes, _ := strconv.Atoi(matches[2]) + return hours*60 + minutes +} + +// computeTotalTime adds two ISO 8601 durations and returns the result. +func computeTotalTime(d1, d2 string) string { + total := parseDurationMinutes(d1) + parseDurationMinutes(d2) + hours := total / 60 + minutes := total % 60 + if hours > 0 && minutes > 0 { + return fmt.Sprintf("PT%dH%dM", hours, minutes) + } + if hours > 0 { + return fmt.Sprintf("PT%dH", hours) + } + return fmt.Sprintf("PT%dM", minutes) +} diff --git a/internal/archdocs/pssg/taxonomy/taxonomy.go b/internal/archdocs/pssg/taxonomy/taxonomy.go new file mode 100644 index 0000000..258c277 --- /dev/null +++ b/internal/archdocs/pssg/taxonomy/taxonomy.go @@ -0,0 +1,313 @@ +package taxonomy + +import ( + "fmt" + "sort" + "strings" + "unicode" + + "github.com/supermodeltools/cli/internal/archdocs/pssg/config" + "github.com/supermodeltools/cli/internal/archdocs/pssg/entity" +) + +// Entry represents a single taxonomy value and its associated entities. +type Entry struct { + Name string + Slug string + Entities []*entity.Entity +} + +// Taxonomy holds all entries for a single taxonomy type. +type Taxonomy struct { + Name string + Label string + LabelSingular string + Config config.TaxonomyConfig + Entries []Entry +} + +// PaginationInfo holds pagination state for hub pages. +type PaginationInfo struct { + CurrentPage int + TotalPages int + TotalItems int + StartIndex int + EndIndex int + PrevURL string + NextURL string + PageURLs []PageURL +} + +// PageURL represents a single page link in pagination. +type PageURL struct { + Number int + URL string +} + +// LetterGroup groups taxonomy entries by their first letter. +type LetterGroup struct { + Letter string + Entries []Entry +} + +// BuildAll constructs all taxonomies from the given entities and config. +func BuildAll(entities []*entity.Entity, taxConfigs []config.TaxonomyConfig, enrichmentData map[string]map[string]interface{}) []Taxonomy { + var taxonomies []Taxonomy + + for _, tc := range taxConfigs { + tax := buildOne(entities, tc, enrichmentData) + taxonomies = append(taxonomies, tax) + } + + return taxonomies +} + +func buildOne(entities []*entity.Entity, tc config.TaxonomyConfig, enrichmentData map[string]map[string]interface{}) Taxonomy { + // Group entities by field values + groups := make(map[string]*Entry) + + for _, e := range entities { + values := extractValues(e, tc, enrichmentData) + + if tc.Invert { + // Invert mode: for each possible value, add entities that DON'T have it + // Not commonly used - skip for now, handled separately if needed + continue + } + + for _, val := range values { + slug := entity.ToSlug(val) + if slug == "" { + continue + } + if _, ok := groups[slug]; !ok { + groups[slug] = &Entry{ + Name: val, + Slug: slug, + } + } + groups[slug].Entities = append(groups[slug].Entities, e) + } + } + + // Convert to slice and filter by min_entities + var entries []Entry + for _, entry := range groups { + if len(entry.Entities) >= tc.MinEntities { + entries = append(entries, *entry) + } + } + + // Sort alphabetically by slug + sort.Slice(entries, func(i, j int) bool { + return entries[i].Slug < entries[j].Slug + }) + + return Taxonomy{ + Name: tc.Name, + Label: tc.Label, + LabelSingular: tc.LabelSingular, + Config: tc, + Entries: entries, + } +} + +// extractValues gets the taxonomy values from an entity's field. +func extractValues(e *entity.Entity, tc config.TaxonomyConfig, enrichmentData map[string]map[string]interface{}) []string { + // Check for enrichment overrides + if tc.EnrichmentOverrideField != "" && enrichmentData != nil { + if ed, ok := enrichmentData[e.Slug]; ok { + if overrides := getEnrichmentOverrides(ed, tc.EnrichmentOverrideField); len(overrides) > 0 { + return overrides + } + } + } + + v, ok := e.Fields[tc.Field] + if !ok { + return nil + } + + if tc.MultiValue { + return toStringSlice(v) + } + + // Single value + if s, ok := v.(string); ok && s != "" { + return []string{s} + } + return nil +} + +// getEnrichmentOverrides extracts override values from enrichment data. +// Supports paths like "ingredients[].normalizedName" +func getEnrichmentOverrides(data map[string]interface{}, field string) []string { + // Parse path: "ingredients[].normalizedName" + parts := strings.Split(field, "[].") + if len(parts) != 2 { + // Simple field + if v, ok := data[field]; ok { + return toStringSlice(v) + } + return nil + } + + arrayField := parts[0] + subField := parts[1] + + arr, ok := data[arrayField] + if !ok { + return nil + } + + items, ok := arr.([]interface{}) + if !ok { + return nil + } + + var results []string + for _, item := range items { + if m, ok := item.(map[string]interface{}); ok { + if v, ok := m[subField]; ok { + if s, ok := v.(string); ok && s != "" { + results = append(results, s) + } + } + } + } + return results +} + +func toStringSlice(v interface{}) []string { + switch val := v.(type) { + case []string: + return val + case []interface{}: + result := make([]string, 0, len(val)) + for _, item := range val { + if s, ok := item.(string); ok { + result = append(result, s) + } + } + return result + case string: + return []string{val} + } + return nil +} + +// ComputePagination calculates pagination for a given entry. +func ComputePagination(entry Entry, page, perPage int, taxonomyName string) PaginationInfo { + total := len(entry.Entities) + totalPages := (total + perPage - 1) / perPage + if totalPages == 0 { + totalPages = 1 + } + + start := (page - 1) * perPage + end := start + perPage + if end > total { + end = total + } + + info := PaginationInfo{ + CurrentPage: page, + TotalPages: totalPages, + TotalItems: total, + StartIndex: start, + EndIndex: end, + } + + // Build page URLs + for p := 1; p <= totalPages; p++ { + info.PageURLs = append(info.PageURLs, PageURL{ + Number: p, + URL: HubPageURL(taxonomyName, entry.Slug, p), + }) + } + + if page > 1 { + info.PrevURL = HubPageURL(taxonomyName, entry.Slug, page-1) + } + if page < totalPages { + info.NextURL = HubPageURL(taxonomyName, entry.Slug, page+1) + } + + return info +} + +// HubPageURL returns the URL path for a hub page. +func HubPageURL(taxonomyName, entrySlug string, page int) string { + if page == 1 { + return fmt.Sprintf("/%s/%s.html", taxonomyName, entrySlug) + } + return fmt.Sprintf("/%s/%s-page-%d.html", taxonomyName, entrySlug, page) +} + +// GroupByLetter groups taxonomy entries by their first letter for A-Z pages. +func GroupByLetter(entries []Entry) []LetterGroup { + groups := make(map[string][]Entry) + var letters []string + + for _, entry := range entries { + if len(entry.Name) == 0 { + continue + } + first := unicode.ToUpper(rune(entry.Name[0])) + var letter string + if unicode.IsLetter(first) { + letter = string(first) + } else { + letter = "#" + } + + if _, ok := groups[letter]; !ok { + letters = append(letters, letter) + } + groups[letter] = append(groups[letter], entry) + } + + sort.Strings(letters) + + var result []LetterGroup + for _, letter := range letters { + result = append(result, LetterGroup{ + Letter: letter, + Entries: groups[letter], + }) + } + + return result +} + +// FindEntry returns the entry with the given slug, or nil. +func (t *Taxonomy) FindEntry(slug string) *Entry { + for i := range t.Entries { + if t.Entries[i].Slug == slug { + return &t.Entries[i] + } + } + return nil +} + +// LetterPageURL returns the URL path for a letter page. +func LetterPageURL(taxonomyName, letter string) string { + l := strings.ToLower(letter) + if l == "#" { + l = "num" + } + return fmt.Sprintf("/%s/letter-%s.html", taxonomyName, l) +} + +// TopEntries returns the top N entries sorted by entity count (descending). +func TopEntries(entries []Entry, n int) []Entry { + sorted := make([]Entry, len(entries)) + copy(sorted, entries) + sort.Slice(sorted, func(i, j int) bool { + return len(sorted[i].Entities) > len(sorted[j].Entities) + }) + if n > len(sorted) { + n = len(sorted) + } + return sorted[:n] +} diff --git a/internal/archdocs/templates/_footer.html b/internal/archdocs/templates/_footer.html new file mode 100644 index 0000000..98cf496 --- /dev/null +++ b/internal/archdocs/templates/_footer.html @@ -0,0 +1,5 @@ + diff --git a/internal/archdocs/templates/_head.html b/internal/archdocs/templates/_head.html new file mode 100644 index 0000000..f21d0eb --- /dev/null +++ b/internal/archdocs/templates/_head.html @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/internal/archdocs/templates/_header.html b/internal/archdocs/templates/_header.html new file mode 100644 index 0000000..be20f32 --- /dev/null +++ b/internal/archdocs/templates/_header.html @@ -0,0 +1,37 @@ + + + + + diff --git a/internal/archdocs/templates/_main.js b/internal/archdocs/templates/_main.js new file mode 100644 index 0000000..370d523 --- /dev/null +++ b/internal/archdocs/templates/_main.js @@ -0,0 +1,783 @@ +function toSlug(s) { return s.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-+|-+$/g, ""); } + +window.addEventListener("load", function() { + + // --- Architecture Map --- + var archDataEl = document.getElementById("arch-map-data"); + var archEl = document.getElementById("arch-map-container"); + if (archDataEl && archEl) { + try { + var raw = archDataEl.textContent.trim(); + var data = JSON.parse(raw); + if (typeof data === "string") data = JSON.parse(data); + var svgContainer = archEl.querySelector(".arch-map-svg"); + if (data && svgContainer) { + var items = []; + if (data.domain) items.push(data.domain); + if (data.subdomain) items.push(data.subdomain); + if (data.file) items.push(data.file); + if (data.entity) items.push(data.entity); + + if (items.length > 1) { + var boxW = 140, boxH = 36, arrowW = 28, pad = 12; + var totalW = items.length * boxW + (items.length - 1) * arrowW + pad * 2; + var totalH = boxH + pad * 2; + var svg = ''; + + for (var i = 0; i < items.length; i++) { + var x = pad + i * (boxW + arrowW); + var y = pad; + var isLast = i === items.length - 1; + var fill = isLast ? "#71B9BC" : "#161616"; + var stroke = isLast ? "#8CC6C9" : "#202020"; + var textColor = isLast ? "#fff" : "#FFFFFF"; + var label = items[i].name || ""; + if (label.length > 16) label = label.substring(0, 14) + ".."; + + if (items[i].slug && !isLast) { + svg += ''; + } + svg += ''; + svg += '' + label + ''; + if (items[i].slug && !isLast) { + svg += ''; + } + + if (i < items.length - 1) { + var ax = x + boxW + 4; + var ay = y + boxH / 2; + svg += ''; + svg += ''; + } + } + + svg += ''; + svgContainer.innerHTML = svg; + } + } + } catch (e) { + console.error("Architecture map error:", e); + } + } + + // --- Force-Directed Graph (D3) — enriched nodes --- + var graphDataEl = document.getElementById("graph-data"); + var graphEl = document.getElementById("force-graph"); + if (graphDataEl && graphEl && typeof d3 !== "undefined") { + try { + var rawGraph = graphDataEl.textContent.trim(); + var graphData = JSON.parse(rawGraph); + if (typeof graphData === "string") graphData = JSON.parse(graphData); + var centerSlug = graphEl.getAttribute("data-center"); + + if (graphData && graphData.nodes && graphData.nodes.length > 1) { + var width = graphEl.clientWidth || 600; + var height = 420; + + var typeColors = { + File: "#5C9699", Function: "#7CCE86", Class: "#D0A27D", + Type: "#E589C6", Domain: "#71B9BC", Subdomain: "#8E8CE9", Directory: "#808080" + }; + var edgeColors = { + imports: "#5C9699", calls: "#7CCE86", defines: "#D0A27D", + extends: "#E589C6", contains: "#808080", belongsTo: "#8E8CE9", partOf: "#71B9BC" + }; + + // Compute node radius from enriched lineCount data + var maxLC = d3.max(graphData.nodes, function(d) { return d.lc || 0; }) || 1; + var rScale = d3.scaleSqrt().domain([0, maxLC]).range([6, 22]); + function nodeR(d) { + if (d.slug === centerSlug) return Math.max(rScale(d.lc || 0), 14); + if (d.lc > 0) return rScale(d.lc); + return 7; + } + + var svg = d3.select(graphEl).append("svg").attr("width", width).attr("height", height); + + // Edge type legend at top + var legendTypes = {}; + graphData.edges.forEach(function(e) { legendTypes[e.type] = true; }); + var legendKeys = Object.keys(legendTypes); + var lgX = 4; + legendKeys.forEach(function(t) { + svg.append("rect").attr("x", lgX).attr("y", 4).attr("width", 10).attr("height", 10).attr("rx", 2) + .attr("fill", edgeColors[t] || "#202020"); + svg.append("text").attr("x", lgX + 14).attr("y", 12).attr("fill", "#808080").attr("font-size", "10px") + .attr("font-family", "Public Sans,system-ui,sans-serif").text(t); + lgX += t.length * 6 + 26; + }); + + var simulation = d3.forceSimulation(graphData.nodes) + .force("link", d3.forceLink(graphData.edges).id(function(d) { return d.id; }).distance(90)) + .force("charge", d3.forceManyBody().strength(-250)) + .force("center", d3.forceCenter(width / 2, height / 2 + 10)) + .force("collision", d3.forceCollide().radius(function(d) { return nodeR(d) + 8; })); + + var link = svg.append("g").selectAll("line").data(graphData.edges).enter().append("line") + .attr("stroke", function(d) { return edgeColors[d.type] || "#202020"; }) + .attr("stroke-opacity", 0.6).attr("stroke-width", 1.5); + + var node = svg.append("g").selectAll("g").data(graphData.nodes).enter().append("g") + .style("cursor", function(d) { return d.slug ? "pointer" : "default"; }) + .call(d3.drag() + .on("start", function(event, d) { + if (!event.active) simulation.alphaTarget(0.3).restart(); + d.fx = d.x; d.fy = d.y; + }) + .on("drag", function(event, d) { d.fx = event.x; d.fy = event.y; }) + .on("end", function(event, d) { + if (!event.active) simulation.alphaTarget(0); + d.fx = null; d.fy = null; + }) + ); + + node.append("circle") + .attr("r", nodeR) + .attr("fill", function(d) { return typeColors[d.type] || "#808080"; }) + .attr("stroke", function(d) { return d.slug === centerSlug ? "#fff" : "none"; }) + .attr("stroke-width", function(d) { return d.slug === centerSlug ? 2.5 : 0; }) + .attr("opacity", function(d) { return d.slug === centerSlug ? 1 : 0.85; }); + + // Show line count inside larger nodes + node.filter(function(d) { return d.lc > 0 && nodeR(d) >= 14; }).append("text") + .text(function(d) { return d.lc; }) + .attr("text-anchor", "middle").attr("y", 4).attr("fill", "#fff") + .attr("font-size", "9px").attr("font-weight", "600") + .attr("font-family", "Public Sans,system-ui,sans-serif"); + + node.append("text") + .text(function(d) { var l = d.label || ""; return l.length > 22 ? l.substring(0, 20) + ".." : l; }) + .attr("x", 0) + .attr("y", function(d) { return -(nodeR(d) + 4); }) + .attr("text-anchor", "middle").attr("fill", "#808080") + .attr("font-size", "11px").attr("font-family", "Public Sans,system-ui,sans-serif"); + + // Enriched tooltip + node.append("title").text(function(d) { + var parts = [d.label, d.type]; + if (d.lang) parts.push(d.lang); + if (d.lc) parts.push(d.lc + " lines"); + if (d.cc) parts.push("calls " + d.cc); + if (d.cbc) parts.push("called by " + d.cbc); + return parts.join(" · "); + }); + + node.on("click", function(event, d) { + if (d.slug) window.location.href = __basePath + "/" + d.slug + ".html"; + }); + + simulation.on("tick", function() { + link.attr("x1", function(d) { return d.source.x; }).attr("y1", function(d) { return d.source.y; }) + .attr("x2", function(d) { return d.target.x; }).attr("y2", function(d) { return d.target.y; }); + node.attr("transform", function(d) { + d.x = Math.max(24, Math.min(width - 24, d.x)); + d.y = Math.max(24, Math.min(height - 24, d.y)); + return "translate(" + d.x + "," + d.y + ")"; + }); + }); + + var fgResizeTimer; + window.addEventListener("resize", function() { + clearTimeout(fgResizeTimer); + fgResizeTimer = setTimeout(function() { + width = graphEl.clientWidth || 600; + svg.attr("width", width); + simulation.force("center", d3.forceCenter(width / 2, height / 2 + 10)); + simulation.alpha(0.3).restart(); + }, 150); + }); + } + } catch (e) { + console.error("Force graph error:", e); + } + } + + // --- Entity Profile Chart (compact format) --- + var epDataEl = document.getElementById("entity-profile-data"); + var epChartEl = document.getElementById("entity-profile-chart"); + if (epDataEl && epChartEl && typeof d3 !== "undefined") { + try { + var ep = JSON.parse(epDataEl.textContent.trim()); + var epW = epChartEl.clientWidth || 700; + + // Build metrics from compact keys + var metricDefs = [ + { key: "lc", label: "Lines of Code", color: "#71B9BC" }, + { key: "co", label: "Calls Out", color: "#5C9699" }, + { key: "cb", label: "Called By", color: "#7CCE86" }, + { key: "ic", label: "Imports", color: "#D0A27D" }, + { key: "ib", label: "Imported By", color: "#8E8CE9" }, + { key: "fn", label: "Functions", color: "#A3A2ED" }, + { key: "cl", label: "Classes", color: "#E589C6" }, + { key: "tc", label: "Types", color: "#A98466" }, + { key: "fc", label: "Files", color: "#808080" } + ]; + var metrics = metricDefs.filter(function(d) { return ep[d.key] !== undefined; }) + .map(function(d) { return { label: d.label, value: ep[d.key], color: d.color }; }); + + // Edge types from compact map {type: count} + var et = ep.et || {}; + var edgeTypes = Object.keys(et).map(function(k) { return { type: k, count: et[k] }; }) + .sort(function(a, b) { return b.count - a.count; }); + + var epEdgeColors = { + calls: "#5C9699", defines: "#7CCE86", belongsTo: "#8E8CE9", + imports: "#D0A27D", extends: "#E589C6", contains: "#808080", partOf: "#71B9BC" + }; + + var hasAnyValue = metrics.some(function(d) { return d.value > 0; }); + var hasMetrics = metrics.length > 0; + var hasEdges = edgeTypes.length > 0; + if (!hasAnyValue && !hasEdges && !(ep.sl > 0 && ep.el > 0)) { + var epPanel = epChartEl.closest(".chart-panel, .entity-profile-panel"); + if (epPanel) epPanel.style.display = "none"; + } + var metricsH = hasMetrics ? metrics.length * 32 + 8 : 0; + var edgesH = hasEdges ? Math.max(edgeTypes.length * 22 + 40, 56) : 0; + var fileBarH = (ep.sl > 0 && ep.el > 0) ? 44 : 0; + var totalH = metricsH + edgesH + fileBarH + 4; + if (totalH < 40) totalH = 40; + + var svg = d3.select(epChartEl).append("svg").attr("width", epW).attr("height", totalH); + var yOff = 0; + + if (hasMetrics) { + var maxVal = d3.max(metrics, function(d) { return d.value; }) || 1; + var labelW = 100; + var barMaxW = Math.min(epW - labelW - 70, 400); + var barScale = d3.scaleLinear().domain([0, maxVal]).range([0, barMaxW]); + metrics.forEach(function(m, i) { + var y = yOff + i * 32 + 4; + svg.append("text").attr("x", labelW - 6).attr("y", y + 13).attr("text-anchor", "end") + .attr("fill", "#808080").attr("font-size", "12px").attr("font-family", "Public Sans,system-ui,sans-serif").text(m.label); + svg.append("rect").attr("x", labelW).attr("y", y).attr("width", Math.max(barScale(m.value), 4)).attr("height", 20) + .attr("rx", 3).attr("fill", m.color).attr("opacity", 0.85); + svg.append("text").attr("x", labelW + Math.max(barScale(m.value), 4) + 6).attr("y", y + 14) + .attr("fill", "#FFFFFF").attr("font-size", "13px").attr("font-weight", "600") + .attr("font-family", "Public Sans,system-ui,sans-serif").text(m.value); + }); + yOff += metricsH; + } + + if (hasEdges) { + var totalEdgeCount = edgeTypes.reduce(function(s, d) { return s + d.count; }, 0); + var stackW = Math.min(epW - 130, 500); + var stackScale = d3.scaleLinear().domain([0, totalEdgeCount]).range([0, stackW]); + var sx = 100, sy = yOff + 6; + svg.append("text").attr("x", 0).attr("y", sy + 2).attr("fill", "#808080").attr("font-size", "11px") + .attr("font-weight", "600").attr("font-family", "Public Sans,system-ui,sans-serif").text("RELATIONSHIPS"); + var cx = sx; + edgeTypes.forEach(function(e, i) { + var w = Math.max(stackScale(e.count), 3); + svg.append("rect").attr("x", cx).attr("y", sy - 6).attr("width", w).attr("height", 18) + .attr("rx", i === 0 ? 3 : 0).attr("fill", epEdgeColors[e.type] || "#808080").attr("opacity", 0.85); + cx += w; + }); + var ly = sy + 18, lx = sx; + edgeTypes.forEach(function(e) { + svg.append("rect").attr("x", lx).attr("y", ly).attr("width", 8).attr("height", 8).attr("rx", 2) + .attr("fill", epEdgeColors[e.type] || "#808080"); + svg.append("text").attr("x", lx + 12).attr("y", ly + 7).attr("fill", "#808080").attr("font-size", "10px") + .attr("font-family", "Public Sans,system-ui,sans-serif").text(e.type + " " + e.count); + lx += e.type.length * 6.5 + 36; + if (lx > epW - 60) { lx = sx; ly += 16; } + }); + yOff += edgesH; + } + + if (ep.sl > 0 && ep.el > 0) { + var fy = yOff + 8, fw = Math.min(epW - 130, 500), fx = 100; + svg.append("text").attr("x", 0).attr("y", fy + 2).attr("fill", "#808080").attr("font-size", "11px") + .attr("font-weight", "600").attr("font-family", "Public Sans,system-ui,sans-serif").text("FILE POSITION"); + svg.append("rect").attr("x", fx).attr("y", fy - 5).attr("width", fw).attr("height", 14).attr("rx", 3) + .attr("fill", "#161616").attr("stroke", "#202020").attr("stroke-width", 1); + var est = Math.max(ep.el * 1.15, ep.el + 20); + var hx = fx + (ep.sl / est) * fw, hw = Math.max(((ep.el - ep.sl) / est) * fw, 3); + svg.append("rect").attr("x", hx).attr("y", fy - 5).attr("width", hw).attr("height", 14).attr("rx", 2) + .attr("fill", "#71B9BC").attr("opacity", 0.8); + svg.append("text").attr("x", fx + fw + 6).attr("y", fy + 4).attr("fill", "#808080").attr("font-size", "10px") + .attr("font-family", "Public Sans,system-ui,sans-serif").text("L" + ep.sl + "–" + ep.el); + } + } catch (e) { console.error("Entity profile chart error:", e); } + } + + // --- Architecture Overview (Homepage Force Graph) --- + var archOverDataEl = document.getElementById("arch-overview-data"); + var archOverEl = document.getElementById("arch-overview"); + if (archOverDataEl && archOverEl && typeof d3 !== "undefined") { + try { + var archData = JSON.parse(archOverDataEl.textContent.trim()); + if (archData && archData.nodes && archData.nodes.length > 1) { + var aoW = archOverEl.clientWidth || 800; + var aoH = 420; + var aoTypeColors = { root: "#71B9BC", domain: "#5C9699", subdomain: "#8E8CE9" }; + var aoSvg = d3.select(archOverEl).append("svg").attr("width", aoW).attr("height", aoH); + + var maxCount = d3.max(archData.nodes, function(d) { return d.count; }) || 1; + var radiusScale = d3.scaleSqrt().domain([0, maxCount]).range([8, 36]); + + var aoSim = d3.forceSimulation(archData.nodes) + .force("link", d3.forceLink(archData.links).id(function(d) { return d.id; }).distance(function(d) { + return d.source.type === "root" || d.source === "root" ? 140 : 90; + })) + .force("charge", d3.forceManyBody().strength(-300)) + .force("center", d3.forceCenter(aoW / 2, aoH / 2)) + .force("collision", d3.forceCollide().radius(function(d) { return radiusScale(d.count) + 12; })); + + var aoLink = aoSvg.append("g").selectAll("line").data(archData.links).enter().append("line") + .attr("stroke", "#202020").attr("stroke-opacity", 0.6).attr("stroke-width", 1.5); + + var aoNode = aoSvg.append("g").selectAll("g").data(archData.nodes).enter().append("g") + .style("cursor", function(d) { return d.slug ? "pointer" : "default"; }) + .call(d3.drag() + .on("start", function(event, d) { + if (!event.active) aoSim.alphaTarget(0.3).restart(); + d.fx = d.x; d.fy = d.y; + }) + .on("drag", function(event, d) { d.fx = event.x; d.fy = event.y; }) + .on("end", function(event, d) { + if (!event.active) aoSim.alphaTarget(0); + d.fx = null; d.fy = null; + }) + ); + + aoNode.append("circle") + .attr("r", function(d) { return d.type === "root" ? 24 : radiusScale(d.count); }) + .attr("fill", function(d) { return aoTypeColors[d.type] || "#808080"; }) + .attr("opacity", 0.9) + .attr("stroke", function(d) { return d.type === "root" ? "#8CC6C9" : "none"; }) + .attr("stroke-width", function(d) { return d.type === "root" ? 2 : 0; }); + + aoNode.append("text") + .text(function(d) { var l = d.name; return l.length > 20 ? l.substring(0, 18) + ".." : l; }) + .attr("x", 0) + .attr("y", function(d) { return (d.type === "root" ? 24 : radiusScale(d.count)) + 14; }) + .attr("text-anchor", "middle").attr("fill", "#808080") + .attr("font-size", function(d) { return d.type === "root" ? "13px" : "11px"; }) + .attr("font-weight", function(d) { return d.type === "root" ? "600" : "400"; }) + .attr("font-family", "Public Sans,system-ui,sans-serif"); + + aoNode.filter(function(d) { return d.type !== "root" && d.count > 0; }).append("text") + .text(function(d) { return d.count; }) + .attr("text-anchor", "middle").attr("y", 4).attr("fill", "#fff") + .attr("font-size", "11px").attr("font-weight", "600") + .attr("font-family", "Public Sans,system-ui,sans-serif"); + + aoNode.on("click", function(event, d) { + if (d.slug) window.location.href = __basePath + "/" + d.slug + ".html"; + }); + + aoNode.append("title").text(function(d) { + return d.name + (d.count ? " (" + d.count + " entities)" : ""); + }); + + aoSim.on("tick", function() { + aoLink.attr("x1", function(d) { return d.source.x; }).attr("y1", function(d) { return d.source.y; }) + .attr("x2", function(d) { return d.target.x; }).attr("y2", function(d) { return d.target.y; }); + aoNode.attr("transform", function(d) { + d.x = Math.max(40, Math.min(aoW - 40, d.x)); + d.y = Math.max(40, Math.min(aoH - 40, d.y)); + return "translate(" + d.x + "," + d.y + ")"; + }); + }); + + var aoResizeTimer; + window.addEventListener("resize", function() { + clearTimeout(aoResizeTimer); + aoResizeTimer = setTimeout(function() { + aoW = archOverEl.clientWidth || 800; + aoSvg.attr("width", aoW); + aoSim.force("center", d3.forceCenter(aoW / 2, aoH / 2)); + aoSim.alpha(0.3).restart(); + }, 150); + }); + } + } catch (e) { console.error("Architecture overview error:", e); } + } + + // --- Homepage Composition (flexbox) --- + var hpDataEl = document.getElementById("homepage-chart-data"); + var hpChartEl = document.getElementById("homepage-chart"); + if (hpDataEl && hpChartEl) { + try { + var hpData = JSON.parse(hpDataEl.textContent.trim()); + var hpColors = ["#71B9BC", "#5C9699", "#7CCE86", "#D0A27D", "#E589C6", "#8E8CE9", "#A3A2ED", "#808080"]; + var children = (hpData.taxonomies || []).map(function(t) { + return { name: t.name, value: t.count, slug: t.slug }; + }).sort(function(a, b) { return b.value - a.value; }); + + if (children.length > 0) { + var total = children.reduce(function(s, d) { return s + d.value; }, 0); + var container = document.createElement("div"); + container.className = "hp-composition"; + children.forEach(function(d, i) { + var el = document.createElement("a"); + el.href = __basePath + "/" + d.slug + "/index.html"; + el.className = "hp-comp-item"; + var pct = (d.value / total) * 100; + el.style.flexBasis = Math.max(pct, 10) + "%"; + el.style.flexGrow = "0"; + el.style.flexShrink = "0"; + el.style.backgroundColor = hpColors[i % hpColors.length]; + el.title = d.name + ": " + d.value + " entries"; + el.innerHTML = '' + d.name + '' + d.value + ''; + container.appendChild(el); + }); + hpChartEl.appendChild(container); + } + } catch (e) { console.error("Homepage composition error:", e); } + } + + // --- Hub Charts (donut + top entities) --- + var hubDataEl = document.getElementById("hub-chart-data"); + var hubChartEl = document.getElementById("hub-chart"); + var hubSecEl = document.getElementById("hub-chart-secondary"); + if (hubDataEl && hubChartEl && typeof d3 !== "undefined") { + try { + var hubData = JSON.parse(hubDataEl.textContent.trim()); + var distributions = hubData.distributions || {}; + var hasAnyDist = Object.keys(distributions).some(function(k) { return distributions[k] && distributions[k].length > 0; }); + var hasTopEnts = hubData.topEntities && hubData.topEntities.length > 0; + var isDirectory = hubData.entryName === "Directory"; + if (isDirectory && !hasAnyDist && !hasTopEnts) { + var hubPanel = hubChartEl.closest(".chart-panel"); + if (hubPanel) hubPanel.style.display = "none"; + } + var hubColors = ["#71B9BC", "#5C9699", "#7CCE86", "#D0A27D", "#E589C6", "#8E8CE9", "#A3A2ED", "#808080"]; + var dimLabels = { node_type: "Node Types", language: "Languages", domain: "Domains", extension: "File Extensions" }; + var dimOrder = ["node_type", "language", "domain", "extension"]; + + // Pick the distribution with the most entries (>1 entry preferred) + var bestKey = null; + var bestLen = 0; + dimOrder.forEach(function(key) { + var arr = distributions[key] || []; + if (arr.length > bestLen) { bestLen = arr.length; bestKey = key; } + }); + + var dist = bestKey ? (distributions[bestKey] || []) : []; + + // LEFT: Donut or profile bars + if (dist.length > 1) { + var hubW = hubChartEl.clientWidth || 400; + var hubH = 220; + var radius = Math.min(hubW * 0.3, hubH * 0.42); + var innerR = radius * 0.55; + var pie = d3.pie().value(function(d) { return d.count; }).sort(null); + var arc = d3.arc().innerRadius(innerR).outerRadius(radius); + var svg = d3.select(hubChartEl).append("svg").attr("width", hubW).attr("height", hubH); + var cx = Math.min(hubH / 2 + 10, hubW * 0.3); + var g = svg.append("g").attr("transform", "translate(" + cx + "," + (hubH / 2) + ")"); + var arcs = g.selectAll("path").data(pie(dist)).enter().append("path").attr("d", arc).attr("fill", function(d, i) { return hubColors[i % hubColors.length]; }).attr("stroke", "#000000").attr("stroke-width", 2).style("cursor", "pointer") + .on("click", function(event, d) { window.location.href = __basePath + "/" + bestKey + "/" + toSlug(d.data.name) + ".html"; }); + arcs.append("title").text(function(d) { return d.data.name + ": " + d.data.count; }); + g.append("text").attr("text-anchor", "middle").attr("y", 6).attr("fill", "#FFFFFF").attr("font-size", "20px").attr("font-weight", "700").attr("font-family", "Public Sans,system-ui,sans-serif").text(hubData.totalEntities || ""); + svg.append("text").attr("x", cx).attr("y", hubH - 4).attr("text-anchor", "middle").attr("fill", "#808080").attr("font-size", "11px").attr("font-family", "Public Sans,system-ui,sans-serif").text(dimLabels[bestKey] || bestKey); + var legendX = cx + radius + 20; + dist.forEach(function(d, i) { + if (i >= 8) return; + var ly = 16 + i * 22; + var lg = svg.append("g").style("cursor", "pointer").on("click", function() { window.location.href = __basePath + "/" + bestKey + "/" + toSlug(d.name) + ".html"; }); + lg.append("rect").attr("x", legendX).attr("y", ly).attr("width", 10).attr("height", 10).attr("rx", 2).attr("fill", hubColors[i % hubColors.length]); + lg.append("text").attr("x", legendX + 16).attr("y", ly + 9).attr("fill", "#808080").attr("font-size", "11px").attr("font-family", "Public Sans,system-ui,sans-serif").text(d.name + " (" + d.count + ")"); + }); + } else { + var profileBars = []; + dimOrder.forEach(function(key) { + var arr = distributions[key] || []; + if (arr.length > 0) { + profileBars.push({ name: dimLabels[key] || key, count: arr.length, detail: arr.map(function(d) { return d.name; }).join(", ") }); + } + }); + if (profileBars.length > 0) { + var pbW = hubChartEl.clientWidth || 400; + var pbBarH = 28; + var pbGap = 5; + var pbH = profileBars.length * (pbBarH + pbGap) + 30; + var pbLabelW = 120; + var pbMax = d3.max(profileBars, function(d) { return d.count; }) || 1; + var pbScale = d3.scaleLinear().domain([0, pbMax]).range([0, pbW - pbLabelW - 100]); + var svg = d3.select(hubChartEl).append("svg").attr("width", pbW).attr("height", pbH); + svg.append("text").attr("x", 0).attr("y", 14).attr("fill", "#808080").attr("font-size", "11px").attr("font-family", "Public Sans,system-ui,sans-serif").text(hubData.entryName + " — " + hubData.totalEntities + " entities"); + profileBars.forEach(function(d, i) { + var y = 24 + i * (pbBarH + pbGap); + svg.append("text").attr("x", pbLabelW - 6).attr("y", y + pbBarH / 2 + 4).attr("text-anchor", "end").attr("fill", "#808080").attr("font-size", "12px").attr("font-family", "Public Sans,system-ui,sans-serif").text(d.name); + svg.append("rect").attr("x", pbLabelW).attr("y", y).attr("width", Math.max(pbScale(d.count), 4)).attr("height", pbBarH).attr("rx", 3).attr("fill", hubColors[i % hubColors.length]).attr("opacity", 0.85); + svg.append("text").attr("x", pbLabelW + Math.max(pbScale(d.count), 4) + 6).attr("y", y + pbBarH / 2 + 4).attr("fill", "#FFFFFF").attr("font-size", "12px").attr("font-weight", "600").attr("font-family", "Public Sans,system-ui,sans-serif").text(d.detail); + }); + } + } + + // RIGHT: Top entities by line count + var topEnts = hubData.topEntities || []; + if (hubSecEl && topEnts.length > 0) { + var teW = hubSecEl.clientWidth || 400; + var teBarH = 22; + var teGap = 3; + var teH = topEnts.length * (teBarH + teGap) + 24; + var teLabelW = Math.min(teW * 0.45, 200); + var teMax = d3.max(topEnts, function(d) { return d.lines; }) || 1; + var teScale = d3.scaleLinear().domain([0, teMax]).range([0, teW - teLabelW - 60]); + var typeColors = { Function: "#7CCE86", Class: "#D0A27D", File: "#5C9699", Type: "#E589C6", Domain: "#71B9BC", Subdomain: "#8E8CE9" }; + + var teSvg = d3.select(hubSecEl).append("svg").attr("width", teW).attr("height", teH); + teSvg.append("text").attr("x", 0).attr("y", 12).attr("fill", "#808080").attr("font-size", "11px").attr("font-weight", "600") + .attr("text-transform", "uppercase").attr("letter-spacing", "0.04em") + .attr("font-family", "Public Sans,system-ui,sans-serif").text("LARGEST BY LINES OF CODE"); + + topEnts.forEach(function(d, i) { + var y = 22 + i * (teBarH + teGap); + var label = d.name.replace(/ — .*/, ""); + if (label.length > 26) label = label.substring(0, 24) + ".."; + var g = teSvg.append("g").style("cursor", "pointer") + .on("click", function() { window.location.href = __basePath + "/" + d.slug + ".html"; }); + g.append("text").attr("x", teLabelW - 6).attr("y", y + teBarH / 2 + 4).attr("text-anchor", "end") + .attr("fill", "#808080").attr("font-size", "11px").attr("font-family", "Public Sans,system-ui,sans-serif").text(label); + g.append("rect").attr("x", teLabelW).attr("y", y).attr("width", Math.max(teScale(d.lines), 3)).attr("height", teBarH) + .attr("rx", 3).attr("fill", typeColors[d.type] || "#71B9BC").attr("opacity", 0.85); + g.append("text").attr("x", teLabelW + Math.max(teScale(d.lines), 3) + 5).attr("y", y + teBarH / 2 + 4) + .attr("fill", "#808080").attr("font-size", "10px").attr("font-family", "Public Sans,system-ui,sans-serif").text(d.lines); + g.append("title").text(d.name + " (" + d.type + ") — " + d.lines + " lines"); + }); + } + } catch (e) { console.error("Hub chart error:", e); } + } + + // --- Taxonomy Index Bar Chart --- + var taxDataEl = document.getElementById("taxonomy-chart-data"); + var taxChartEl = document.getElementById("taxonomy-chart"); + if (taxDataEl && taxChartEl && typeof d3 !== "undefined") { + try { + var taxData = JSON.parse(taxDataEl.textContent.trim()); + var entries = (taxData.entries || []).slice(0, 20); + var taxKey = taxData.taxonomyKey || ""; + if (entries.length > 0) { + var taxW = taxChartEl.clientWidth || 800; + var barH = 26; + var gap = 4; + var taxH = entries.length * (barH + gap); + var labelW = 180; + var maxCount = d3.max(entries, function(d) { return d.count; }) || 1; + var barScale = d3.scaleLinear().domain([0, maxCount]).range([0, taxW - labelW - 80]); + var svg = d3.select(taxChartEl).append("svg").attr("width", taxW).attr("height", taxH); + entries.forEach(function(d, i) { + var y = i * (barH + gap); + var label = d.name.length > 22 ? d.name.substring(0, 20) + ".." : d.name; + var g = svg.append("g").style("cursor", "pointer").on("click", function() { if (taxKey) window.location.href = __basePath + "/" + taxKey + "/" + toSlug(d.name) + ".html"; }); + g.append("text").attr("x", labelW - 8).attr("y", y + barH / 2 + 4).attr("text-anchor", "end").attr("fill", "#808080").attr("font-size", "13px").attr("font-family", "Public Sans,system-ui,sans-serif").text(label); + g.append("rect").attr("x", labelW).attr("y", y).attr("width", Math.max(barScale(d.count), 4)).attr("height", barH).attr("rx", 3).attr("fill", "#71B9BC").attr("opacity", 0.85); + g.append("text").attr("x", labelW + Math.max(barScale(d.count), 4) + 8).attr("y", y + barH / 2 + 4).attr("fill", "#808080").attr("font-size", "12px").attr("font-family", "Public Sans,system-ui,sans-serif").text(d.count); + }); + } + } catch (e) { console.error("Taxonomy chart error:", e); } + } + + // --- All Entities Packed Circles --- + var aeDataEl = document.getElementById("all-entities-chart-data"); + var aeChartEl = document.getElementById("all-entities-chart"); + if (aeDataEl && aeChartEl && typeof d3 !== "undefined") { + try { + var aeData = JSON.parse(aeDataEl.textContent.trim()); + var types = aeData.typeDistribution || []; + if (types.length > 0) { + var aeW = aeChartEl.clientWidth || 800; + var aeH = 320; + var aeColors = ["#71B9BC", "#5C9699", "#7CCE86", "#D0A27D", "#E589C6", "#8E8CE9", "#A3A2ED", "#808080"]; + var root = d3.hierarchy({ children: types }).sum(function(d) { return d.count || 0; }); + d3.pack().size([aeW, aeH]).padding(4)(root); + var svg = d3.select(aeChartEl).append("svg").attr("width", aeW).attr("height", aeH); + var node = svg.selectAll("g").data(root.leaves()).enter().append("g").attr("transform", function(d) { return "translate(" + d.x + "," + d.y + ")"; }) + .style("cursor", "pointer").on("click", function(event, d) { window.location.href = __basePath + "/node_type/" + toSlug(d.data.name) + ".html"; }); + node.append("circle").attr("r", function(d) { return d.r; }).attr("fill", function(d, i) { return aeColors[i % aeColors.length]; }).attr("opacity", 0.8).attr("stroke", "#000000").attr("stroke-width", 1); + node.append("text").attr("text-anchor", "middle").attr("y", -4).attr("fill", "#fff").attr("font-size", function(d) { return Math.max(10, Math.min(16, d.r / 3)) + "px"; }).attr("font-weight", "600").attr("font-family", "Public Sans,system-ui,sans-serif").text(function(d) { return d.r > 25 ? d.data.name : ""; }); + node.append("text").attr("text-anchor", "middle").attr("y", 12).attr("fill", "rgba(255,255,255,0.7)").attr("font-size", "11px").attr("font-family", "Public Sans,system-ui,sans-serif").text(function(d) { return d.r > 20 ? d.data.count : ""; }); + node.append("title").text(function(d) { return d.data.name + ": " + d.data.count; }); + } + } catch (e) { console.error("All entities chart error:", e); } + } + + // --- Letter Page Bar Chart --- + var ltDataEl = document.getElementById("letter-chart-data"); + var ltChartEl = document.getElementById("letter-chart"); + if (ltDataEl && ltChartEl && typeof d3 !== "undefined") { + try { + var ltData = JSON.parse(ltDataEl.textContent.trim()); + var ltEntries = (ltData.entries || []).slice(0, 15); + var ltKey = ltData.taxonomyKey || ""; + if (ltEntries.length > 0) { + var ltW = ltChartEl.clientWidth || 800; + var ltBarH = 26; + var ltGap = 4; + var ltH = ltEntries.length * (ltBarH + ltGap); + var ltLabelW = 180; + var ltMax = d3.max(ltEntries, function(d) { return d.count; }) || 1; + var ltScale = d3.scaleLinear().domain([0, ltMax]).range([0, ltW - ltLabelW - 80]); + var svg = d3.select(ltChartEl).append("svg").attr("width", ltW).attr("height", ltH); + ltEntries.forEach(function(d, i) { + var y = i * (ltBarH + ltGap); + var label = d.name.length > 22 ? d.name.substring(0, 20) + ".." : d.name; + var g = svg.append("g").style("cursor", "pointer").on("click", function() { if (ltKey) window.location.href = __basePath + "/" + ltKey + "/" + toSlug(d.name) + ".html"; }); + g.append("text").attr("x", ltLabelW - 8).attr("y", y + ltBarH / 2 + 4).attr("text-anchor", "end").attr("fill", "#808080").attr("font-size", "13px").attr("font-family", "Public Sans,system-ui,sans-serif").text(label); + g.append("rect").attr("x", ltLabelW).attr("y", y).attr("width", Math.max(ltScale(d.count), 4)).attr("height", ltBarH).attr("rx", 3).attr("fill", "#71B9BC").attr("opacity", 0.85); + g.append("text").attr("x", ltLabelW + Math.max(ltScale(d.count), 4) + 8).attr("y", y + ltBarH / 2 + 4).attr("fill", "#808080").attr("font-size", "12px").attr("font-family", "Public Sans,system-ui,sans-serif").text(d.count); + }); + } + } catch (e) { console.error("Letter chart error:", e); } + } + + // --- Mermaid Init --- + if (typeof mermaid !== "undefined") { + try { + mermaid.initialize({ + startOnLoad: false, + theme: "dark", + themeVariables: { + primaryColor: "#71B9BC", primaryTextColor: "#FFFFFF", + primaryBorderColor: "#8CC6C9", lineColor: "#202020", + secondaryColor: "#161616", tertiaryColor: "#08191C", + background: "#161616", mainBkg: "#161616", + nodeBorder: "#202020", clusterBkg: "#000000", + clusterBorder: "#202020", titleColor: "#FFFFFF", + edgeLabelBackground: "#161616" + } + }); + mermaid.run(); + } catch (e) { + console.error("Mermaid error:", e); + } + } + +}); + +// --- Site Search --- +(function() { + var overlay = document.getElementById("search-overlay"); + var input = document.getElementById("search-input"); + var resultsEl = document.getElementById("search-results"); + var toggleBtn = document.querySelector(".search-toggle"); + if (!overlay || !input || !resultsEl) return; + + var index = null; + var activeIdx = -1; + var results = []; + + function openSearch() { + overlay.hidden = false; + input.value = ""; + resultsEl.innerHTML = ""; + activeIdx = -1; + input.focus(); + if (!index) loadIndex(); + } + + function closeSearch() { + overlay.hidden = true; + input.blur(); + } + + function loadIndex() { + fetch("/search-index.json") + .then(function(r) { return r.json(); }) + .then(function(data) { index = data; }) + .catch(function() { resultsEl.innerHTML = '
Failed to load search index.
'; }); + } + + function search(query) { + if (!index || !query) { resultsEl.innerHTML = ""; results = []; activeIdx = -1; return; } + var q = query.toLowerCase(); + var tokens = q.split(/\s+/).filter(Boolean); + + var scored = []; + for (var i = 0; i < index.length; i++) { + var e = index[i]; + var haystack = (e.t + " " + (e.d || "") + " " + (e.n || "") + " " + (e.l || "") + " " + (e.m || "")).toLowerCase(); + var titleLower = e.t.toLowerCase(); + var allMatch = true; + for (var j = 0; j < tokens.length; j++) { + if (haystack.indexOf(tokens[j]) === -1) { allMatch = false; break; } + } + if (!allMatch) continue; + + var score = 0; + if (titleLower === q) score += 100; + else if (titleLower.indexOf(q) === 0) score += 50; + else if (titleLower.indexOf(q) >= 0) score += 20; + for (var k = 0; k < tokens.length; k++) { + if (titleLower.indexOf(tokens[k]) >= 0) score += 5; + } + + scored.push({ entry: e, score: score }); + } + + scored.sort(function(a, b) { return b.score - a.score; }); + results = scored.slice(0, 20); + activeIdx = results.length > 0 ? 0 : -1; + renderResults(); + } + + function renderResults() { + if (results.length === 0) { + resultsEl.innerHTML = input.value ? '
No results found.
' : ""; + return; + } + var html = ""; + for (var i = 0; i < results.length; i++) { + var e = results[i].entry; + var cls = i === activeIdx ? "search-result active" : "search-result"; + html += ''; + html += '
' + escHtml(e.t) + '
'; + if (e.d) html += '
' + escHtml(e.d) + '
'; + html += '
'; + if (e.n) html += '' + escHtml(e.n) + ''; + if (e.l) html += '' + escHtml(e.l) + ''; + if (e.m) html += '' + escHtml(e.m) + ''; + html += '
'; + } + resultsEl.innerHTML = html; + } + + function escHtml(s) { + var d = document.createElement("div"); + d.appendChild(document.createTextNode(s)); + return d.innerHTML; + } + + if (toggleBtn) toggleBtn.addEventListener("click", openSearch); + + overlay.addEventListener("click", function(e) { + if (e.target === overlay) closeSearch(); + }); + + input.addEventListener("input", function() { search(input.value.trim()); }); + + input.addEventListener("keydown", function(e) { + if (e.key === "Escape") { closeSearch(); } + else if (e.key === "ArrowDown") { e.preventDefault(); if (activeIdx < results.length - 1) { activeIdx++; renderResults(); scrollActive(); } } + else if (e.key === "ArrowUp") { e.preventDefault(); if (activeIdx > 0) { activeIdx--; renderResults(); scrollActive(); } } + else if (e.key === "Enter" && activeIdx >= 0 && results[activeIdx]) { e.preventDefault(); window.location.href = __basePath + "/" + results[activeIdx].entry.s + ".html"; } + }); + + function scrollActive() { + var el = resultsEl.querySelector(".search-result.active"); + if (el) el.scrollIntoView({ block: "nearest" }); + } + + document.addEventListener("keydown", function(e) { + if (overlay.hidden && e.key === "/" && !isInput(e.target)) { + e.preventDefault(); + openSearch(); + } + if (overlay.hidden && e.key === "k" && (e.metaKey || e.ctrlKey)) { + e.preventDefault(); + openSearch(); + } + if (!overlay.hidden && e.key === "Escape") { + closeSearch(); + } + }); + + function isInput(el) { + var tag = el.tagName; + return tag === "INPUT" || tag === "TEXTAREA" || tag === "SELECT" || el.isContentEditable; + } +})(); diff --git a/internal/archdocs/templates/_og.html b/internal/archdocs/templates/_og.html new file mode 100644 index 0000000..2833ae8 --- /dev/null +++ b/internal/archdocs/templates/_og.html @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/internal/archdocs/templates/_styles.css b/internal/archdocs/templates/_styles.css new file mode 100644 index 0000000..1669e01 --- /dev/null +++ b/internal/archdocs/templates/_styles.css @@ -0,0 +1,830 @@ +:root { + --bg: #000000; + --bg-card: #161616; + --bg-hover: #08191C; + --border: #202020; + --text: #FFFFFF; + --text-muted: #808080; + --accent: #71B9BC; + --accent-light: #8CC6C9; + --green: #7CCE86; + --orange: #D0A27D; + --red: #E589C6; + --blue: #8E8CE9; + --font: 'Public Sans', -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif; + --mono: 'Martian Mono', "SFMono-Regular", Consolas, "Liberation Mono", Menlo, monospace; + --max-w: 1200px; + --radius: 0px; +} + +* { margin: 0; padding: 0; box-sizing: border-box; } + +html { overflow-x: hidden; } + +body { + font-family: var(--font); + background: var(--bg); + color: var(--text); + line-height: 1.5; + font-weight: 300; + letter-spacing: 0em; + -webkit-font-smoothing: antialiased; + overflow-x: hidden; +} + +a { color: var(--accent-light); text-decoration: none; } +a:hover { text-decoration: underline; } +a:focus-visible, button:focus-visible { outline: 2px solid var(--accent-light); outline-offset: 2px; border-radius: 2px; } + +.skip-link { position: absolute; top: -100%; left: 16px; background: var(--accent); color: #000; padding: 8px 16px; border-radius: 6px; z-index: 1001; font-size: 14px; font-weight: 600; } +.skip-link:focus { top: 12px; } + +.container { max-width: var(--max-w); margin: 0 auto; padding: 0 24px; } + +/* Header */ +.site-header { + border-bottom: 1px solid var(--border); + padding: 16px 0; + position: sticky; + top: 0; + background: var(--bg); + z-index: 100; +} +.site-header .container { + display: flex; + align-items: center; + justify-content: space-between; + gap: 16px; +} +.site-brand { + display: flex; + align-items: center; + gap: 10px; + flex-shrink: 0; +} +.site-brand:hover { text-decoration: none; opacity: 0.8; } +.logo-icon { height: 20px; display: flex; align-items: center; } +.logo-icon svg { height: 100%; width: auto; } +.logo-wordmark { height: 16px; display: flex; align-items: center; } +.logo-wordmark svg { height: 100%; width: auto; } +.site-nav { display: flex; gap: 16px; align-items: center; overflow-x: auto; -webkit-overflow-scrolling: touch; } +.site-nav a { color: var(--text-muted); font-size: 0.68rem; font-weight: 300; font-family: var(--mono); text-transform: uppercase; letter-spacing: .08em; white-space: nowrap; } +.site-nav a:hover { color: var(--text); text-decoration: none; } +.nav-all-repos { color: var(--accent-light) !important; padding-right: 12px; margin-right: 4px; border-right: 1px solid var(--border); } +.nav-all-repos:hover { color: var(--text) !important; } + +/* Footer */ +.site-footer { + border-top: 1px solid var(--border); + padding: 32px 0; + margin-top: 64px; + color: var(--text-muted); + font-size: 13px; + text-align: center; +} + +/* Entity Page */ +.entity-page { padding: 32px 0; } +.entity-header { margin-bottom: 32px; } +.entity-breadcrumb { + font-size: 13px; + color: var(--text-muted); + margin-bottom: 12px; + display: flex; + gap: 6px; + align-items: center; + flex-wrap: wrap; +} +.entity-breadcrumb a { color: var(--text-muted); } +.entity-breadcrumb a:hover { color: var(--accent-light); } +.entity-breadcrumb .sep { opacity: 0.4; } +.entity-title { + font-size: 28px; + font-weight: 300; + line-height: 1.3; + letter-spacing: -0.02em; + margin-bottom: 8px; + overflow-wrap: break-word; + word-break: break-word; +} +.entity-desc { + color: var(--text-muted); + font-size: 15px; + max-width: 700px; + overflow-wrap: break-word; +} + +/* Summary */ +.entity-summary { + margin-top: 16px; + padding: 16px 20px; + background: var(--bg-card); + border: 1px solid var(--border); + border-radius: var(--radius); + border-left: 3px solid var(--accent); +} +.entity-summary p { + color: var(--text); + font-size: 15px; + line-height: 1.6; +} + +/* Meta pills */ +.entity-meta { + display: flex; + flex-wrap: wrap; + gap: 8px; + margin-top: 16px; +} +.pill { + display: inline-flex; + align-items: center; + gap: 4px; + padding: 4px 10px; + background: var(--bg-card); + border: 1px solid var(--border); + border-radius: 20px; + font-size: 12px; + color: var(--text-muted); + font-weight: 400; + font-family: var(--mono); + text-decoration: none; +} +a.pill:hover { + border-color: var(--accent-light); + color: var(--text); +} +.pill-accent { border-color: var(--accent); color: var(--accent-light); } +.pill-green { border-color: var(--green); color: var(--green); } +.pill-orange { border-color: var(--orange); color: var(--orange); } +.pill-blue { border-color: var(--blue); color: var(--blue); } + +/* Sections */ +.entity-section { + margin-bottom: 32px; +} +.entity-section h2 { + font-size: 18px; + font-weight: 400; + letter-spacing: -0.02em; + margin-bottom: 12px; + padding-bottom: 8px; + border-bottom: 1px solid var(--border); +} +.entity-section ul { + list-style: none; + display: flex; + flex-direction: column; + gap: 4px; +} +.entity-section li { + font-size: 14px; + font-family: var(--mono); + padding: 6px 12px; + background: var(--bg-card); + border-radius: 4px; + border: 1px solid transparent; + overflow-wrap: break-word; + word-break: break-all; +} +.entity-section li:hover { + border-color: var(--border); +} + +/* Cards grid */ +.card-grid { + display: grid; + grid-template-columns: repeat(auto-fill, minmax(320px, 1fr)); + gap: 16px; +} +.card { + background: var(--bg-card); + border: 1px solid var(--border); + border-radius: var(--radius); + padding: 20px; + transition: border-color 0.2s; +} +.card:hover { + border-color: var(--accent); + text-decoration: none; +} +.card-title { + font-size: 15px; + font-weight: 600; + letter-spacing: -0.02em; + color: var(--text); + margin-bottom: 6px; + overflow-wrap: break-word; + word-break: break-word; +} +.card-desc { + font-size: 13px; + color: var(--text-muted); + display: -webkit-box; + -webkit-line-clamp: 2; + -webkit-box-orient: vertical; + overflow: hidden; +} +.card-meta { + display: flex; + gap: 8px; + margin-top: 10px; + flex-wrap: wrap; +} + +/* Homepage */ +.hero { + padding: 64px 0 48px; + text-align: center; +} +.hero h1 { + font-size: 36px; + font-weight: 200; + letter-spacing: -0.04em; + margin-bottom: 12px; +} +.hero p { + color: var(--text-muted); + font-size: 18px; + max-width: 600px; + margin: 0 auto; +} +.hero-actions { + display: flex; + justify-content: center; + gap: 10px; + margin-bottom: 16px; + flex-wrap: wrap; +} +.hero-btn { + display: inline-flex; + align-items: center; + gap: 6px; + padding: 8px 16px; + border-radius: 20px; + font-size: 13px; + font-weight: 500; + background: var(--bg-card); + border: 1px solid var(--border); + color: var(--text); + transition: border-color 0.2s, background 0.2s; +} +.hero-btn:hover { + border-color: var(--accent); + background: var(--bg-hover); + text-decoration: none; +} +.hero-btn svg { width: 16px; height: 16px; flex-shrink: 0; } +.hero-btn-star { border-color: var(--orange); color: var(--orange); } +.hero-btn-star:hover { background: rgba(208, 162, 125, 0.1); } +.hero-btn-fork { border-color: var(--blue); color: var(--blue); } +.hero-btn-fork:hover { background: rgba(142, 140, 233, 0.1); } +.hero-stats { + display: flex; + justify-content: center; + flex-wrap: wrap; + gap: 16px 32px; + margin-top: 32px; +} +.hero-stat { text-align: center; } +.hero-stat .num { + font-size: 28px; + font-weight: 300; + letter-spacing: -0.02em; + color: var(--accent-light); +} +.hero-stat .label { + font-size: 13px; + color: var(--text-muted); +} +.hero-stat-link { + padding: 12px 16px; + border-radius: var(--radius); + border: 1px solid transparent; + transition: border-color 0.2s, background 0.2s; +} +.hero-stat-link:hover { + border-color: var(--border); + background: var(--bg-card); + text-decoration: none; +} + +.section-title { + font-size: 22px; + font-weight: 300; + letter-spacing: -0.02em; + margin-bottom: 16px; +} +.section-subtitle { + font-size: 14px; + color: var(--text-muted); + margin-bottom: 20px; +} +.section { margin-bottom: 48px; } + +/* Hub / Taxonomy */ +.hub-header { + padding: 32px 0; + border-bottom: 1px solid var(--border); + margin-bottom: 32px; +} +.hub-header h1 { + font-size: 28px; + font-weight: 300; + letter-spacing: -0.02em; +} +.hub-desc { + color: var(--text-muted); + margin-top: 8px; + font-size: 15px; + max-width: 700px; +} +.hub-meta { + color: var(--text-muted); + margin-top: 6px; + font-size: 13px; +} + +/* Pagination */ +.pagination { + display: flex; + justify-content: center; + align-items: center; + gap: 8px; + margin-top: 32px; + padding-top: 24px; + border-top: 1px solid var(--border); +} +.pagination a, .pagination span { + padding: 8px 14px; + border-radius: 6px; + font-size: 14px; + font-weight: 500; +} +.pagination a { + background: var(--bg-card); + border: 1px solid var(--border); + color: var(--text); +} +.pagination a:hover { + border-color: var(--accent); + text-decoration: none; +} +.pagination .current { + background: var(--accent); + color: #000; +} + +/* Taxonomy Index */ +.tax-grid { + display: grid; + grid-template-columns: repeat(auto-fill, minmax(240px, 1fr)); + gap: 12px; +} +.tax-entry { + display: flex; + justify-content: space-between; + align-items: center; + padding: 14px 16px; + background: var(--bg-card); + border: 1px solid var(--border); + border-radius: var(--radius); + color: var(--text); + font-size: 14px; + font-weight: 500; + transition: border-color 0.2s; +} +.tax-entry:hover { + border-color: var(--accent); + text-decoration: none; +} +.tax-entry-left { + display: flex; + align-items: center; + gap: 8px; + min-width: 0; + flex: 1; + overflow: hidden; +} +.tax-entry-left > span:first-child { + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; +} +.tax-domain-tag { + font-size: 10px; + color: var(--text-muted); + background: var(--bg); + padding: 2px 8px; + border-radius: 10px; + white-space: nowrap; + flex-shrink: 0; +} +.tax-count { + font-size: 12px; + color: var(--text-muted); + background: var(--bg); + padding: 2px 8px; + border-radius: 10px; + flex-shrink: 0; +} + +/* Visuals Panel */ +.visuals-panel { + background: var(--bg-card); + border: 1px solid var(--border); + border-radius: var(--radius); + padding: 16px; + margin-bottom: 32px; + display: flex; + flex-direction: column; + gap: 12px; +} +.visuals-panel h3 { + font-size: 10px; + font-weight: 400; + font-family: 'Lexend Peta', sans-serif; + color: var(--text-muted); + text-transform: uppercase; + letter-spacing: 0em; + margin: 0 0 8px 0; + padding: 0; + border: none; +} +.visuals-archmap { + padding-bottom: 12px; + border-bottom: 1px solid var(--border); +} +.arch-map-svg { min-height: 40px; } +.arch-map-svg svg { display: block; width: 100%; } +.arch-map-fallback { + display: flex; + align-items: center; + gap: 8px; + flex-wrap: wrap; + font-size: 14px; + color: var(--text-muted); +} +.arch-sep { opacity: 0.4; } +.visuals-row { + display: grid; + grid-template-columns: 1fr 1fr; + gap: 12px; +} +.visuals-row > .visuals-cell:only-child { + grid-column: 1 / -1; +} +@media (max-width: 768px) { + .visuals-row { grid-template-columns: 1fr; } +} + +/* Mermaid Diagram */ +.mermaid-container { + overflow-x: auto; + min-height: 0; +} +.mermaid { background: transparent !important; } + +/* Force Graph */ +.force-graph-container { + overflow: hidden; + min-height: 0; +} +#force-graph { height: 320px; width: 100%; } +#force-graph svg { width: 100%; height: 100%; } + +/* FAQ Accordion */ +.entity-faqs { margin-bottom: 32px; } +.faq-item { + background: var(--bg-card); + border: 1px solid var(--border); + border-radius: var(--radius); + margin-bottom: 8px; + overflow: hidden; +} +.faq-question { + padding: 14px 18px; + font-size: 15px; + font-weight: 600; + cursor: pointer; + list-style: none; + display: flex; + align-items: center; + justify-content: space-between; + gap: 12px; +} +.faq-question::-webkit-details-marker { display: none; } +.faq-question::after { + content: "+"; + font-size: 18px; + font-weight: 400; + color: var(--text-muted); + flex-shrink: 0; + transition: transform 0.2s; +} +.faq-item[open] .faq-question::after { content: "\2212"; } +.faq-answer { + padding: 0 18px 14px; + font-size: 14px; + color: var(--text-muted); + line-height: 1.7; +} + +/* CTA Banner */ +.cta-section { + background: linear-gradient(135deg, var(--bg-card) 0%, #08191C 100%); + border: 1px solid var(--accent); + border-radius: var(--radius); + padding: 40px 32px; + text-align: center; + max-width: 720px; + margin: 48px auto 32px; +} +.cta-heading { + font-size: 22px; + font-weight: 300; + letter-spacing: -0.02em; + margin-bottom: 8px; + border-bottom: none !important; + padding-bottom: 0 !important; +} +.cta-description { + color: var(--text-muted); + font-size: 15px; + max-width: 540px; + margin: 0 auto 20px; +} +.cta-button { + display: inline-block; + padding: 12px 28px; + background: var(--accent); + color: #000; + font-size: 15px; + font-weight: 600; + border-radius: 6px; + transition: background 0.2s; +} +.cta-button:hover { + background: var(--accent-light); + text-decoration: none; +} + +/* Chart Panel */ +.chart-panel { + background: var(--bg-card); + border: 1px solid var(--border); + border-radius: var(--radius); + padding: 20px; + margin-bottom: 32px; +} +.chart-panel h3 { + font-size: 10px; + font-weight: 400; + font-family: 'Lexend Peta', sans-serif; + color: var(--text-muted); + text-transform: uppercase; + letter-spacing: 0em; + margin: 0 0 12px 0; +} +.chart-panel-compact { + padding: 16px; + margin-top: 16px; +} +.hub-charts { + display: flex; + gap: 24px; + align-items: flex-start; +} +.hub-chart-cell { + flex: 1; + min-width: 0; +} +.hub-chart-cell h4 { + font-size: 11px; + font-weight: 600; + color: var(--text-muted); + text-transform: uppercase; + letter-spacing: 0.04em; + margin: 0 0 8px 0; +} +.entity-profile-panel { + margin-top: 24px; +} +.source-code-section { + position: relative; +} +.source-location { + font-size: 12px; + color: var(--text-muted); + font-family: var(--mono); + margin: 0 0 8px 0; +} +.source-code { + background: #161616; + border: 1px solid var(--border); + border-radius: var(--radius); + padding: 16px; + overflow-x: auto; + max-height: 600px; + overflow-y: auto; + margin: 0; + line-height: 1.5; +} +.source-code code { + font-family: var(--mono); + font-size: 13px; + color: #FFFFFF; + white-space: pre; + tab-size: 2; + margin-bottom: 0; +} +/* Homepage Composition (flexbox) */ +.hp-composition { + display: flex; + flex-wrap: wrap; + gap: 3px; + height: 200px; +} +.hp-comp-item { + display: flex; + flex-direction: column; + justify-content: flex-end; + padding: 10px 12px; + text-decoration: none; + opacity: 0.85; + transition: opacity 0.2s; + box-sizing: border-box; + align-self: stretch; +} +.hp-comp-item:hover { opacity: 1; text-decoration: none; } +.hp-comp-name { + color: #fff; + font-size: 13px; + font-weight: 600; + font-family: var(--font); + white-space: nowrap; +} +.hp-comp-count { + color: rgba(255,255,255,0.7); + font-size: 12px; + font-family: var(--mono); + margin-top: 2px; +} + +#homepage-chart, #hub-chart, #all-entities-chart { width: 100%; } +#homepage-chart svg, #hub-chart svg, #taxonomy-chart svg, #all-entities-chart svg, #letter-chart svg { + width: 100%; + display: block; +} + +/* Architecture Overview */ +.arch-map-panel { padding: 20px 20px 12px; } +#arch-overview { width: 100%; min-height: 360px; } +#arch-overview svg { width: 100%; height: 100%; display: block; } + +/* Letter Navigation */ +.letter-nav { + display: flex; + flex-wrap: wrap; + gap: 6px; + margin-bottom: 24px; +} +.letter-link { + display: inline-flex; + align-items: center; + justify-content: center; + width: 36px; + height: 36px; + background: var(--bg-card); + border: 1px solid var(--border); + border-radius: 6px; + color: var(--text-muted); + font-size: 14px; + font-weight: 600; +} +.letter-link:hover { + border-color: var(--accent); + text-decoration: none; +} +.letter-active { + background: var(--accent); + color: #000; + border-color: var(--accent); +} + +/* JSON-LD script tags */ +.jsonld { display: none; } + +/* Search toggle button */ +.search-toggle { background: none; border: 1px solid var(--border); border-radius: 6px; color: var(--text-muted); cursor: pointer; display: flex; align-items: center; gap: 6px; padding: 5px 10px; font-size: 14px; } +.search-toggle:hover { border-color: var(--accent); color: var(--text); } +.search-toggle svg { width: 16px; height: 16px; } +.search-kbd { font-size: 11px; background: var(--bg-card); border: 1px solid var(--border); border-radius: 3px; padding: 1px 5px; font-family: inherit; } + +/* Search overlay */ +.search-overlay { position: fixed; inset: 0; z-index: 1000; background: rgba(0,0,0,0.6); backdrop-filter: blur(4px); display: flex; align-items: flex-start; justify-content: center; padding-top: 15vh; } +.search-overlay[hidden] { display: none; } +.search-modal { background: var(--bg-card); border: 1px solid var(--border); border-radius: 12px; width: 100%; max-width: 640px; max-height: 70vh; display: flex; flex-direction: column; box-shadow: 0 20px 60px rgba(0,0,0,0.5); } + +/* Search input */ +.search-input-wrap { display: flex; align-items: center; gap: 8px; padding: 12px 16px; border-bottom: 1px solid var(--border); } +.search-icon { width: 20px; height: 20px; flex-shrink: 0; color: var(--text-muted); } +.search-input { flex: 1; background: none; border: none; color: var(--text); font-size: 16px; font-family: inherit; outline: none; } +.search-input::placeholder { color: var(--text-muted); } +.search-esc { font-size: 11px; background: var(--bg); border: 1px solid var(--border); border-radius: 3px; padding: 1px 6px; color: var(--text-muted); } + +/* Search results */ +.search-results { overflow-y: auto; flex: 1; } +.search-result { display: block; padding: 10px 16px; color: var(--text); text-decoration: none; border-bottom: 1px solid var(--border); } +.search-result:hover, .search-result.active { background: var(--bg); } +.search-result-title { font-weight: 600; font-size: 14px; } +.search-result-desc { font-size: 12px; color: var(--text-muted); margin-top: 2px; white-space: nowrap; overflow: hidden; text-overflow: ellipsis; } +.search-result-meta { display: flex; gap: 6px; margin-top: 4px; } +.search-result-meta .pill { font-size: 11px; padding: 1px 6px; } +.search-no-results { padding: 24px 16px; text-align: center; color: var(--text-muted); font-size: 14px; } + +/* Search footer */ +.search-footer { display: flex; gap: 16px; padding: 8px 16px; border-top: 1px solid var(--border); font-size: 11px; color: var(--text-muted); } +.search-footer kbd { font-size: 10px; background: var(--bg); border: 1px solid var(--border); border-radius: 3px; padding: 0 4px; } + +/* Responsive */ +@media (max-width: 768px) { + .container { padding: 0 16px; } + .site-header { padding: 12px 0; } + .site-brand { font-size: 16px; } + .site-brand svg { width: 20px; height: 20px; } + .site-nav { gap: 12px; } + .site-nav a { font-size: 13px; } + + .hero { padding: 40px 0 32px; } + .hero h1 { font-size: 24px; } + .hero p { font-size: 15px; } + .hero-actions { gap: 8px; } + .hero-btn { padding: 6px 12px; font-size: 12px; } + .hero-stats { gap: 8px 16px; } + .hero-stat .num { font-size: 22px; } + .hero-stat .label { font-size: 12px; } + .hero-stat-link { padding: 8px 12px; } + + .entity-page { padding: 20px 0; } + .entity-header { margin-bottom: 20px; } + .entity-title { font-size: 20px; } + .entity-desc { font-size: 14px; } + .entity-meta { gap: 6px; } + .pill { font-size: 11px; padding: 3px 8px; } + .entity-summary { padding: 12px 14px; margin-top: 12px; } + .entity-summary p { font-size: 14px; } + + .entity-section { margin-bottom: 24px; } + .entity-section h2 { font-size: 16px; margin-bottom: 10px; } + .entity-section li { font-size: 13px; padding: 5px 10px; } + + .card-grid { grid-template-columns: 1fr; } + .card { padding: 14px; } + .card-title { font-size: 14px; } + .card-desc { font-size: 12px; } + + .tax-grid { grid-template-columns: 1fr; } + .tax-entry { padding: 12px 14px; font-size: 13px; } + + .hub-header { padding: 20px 0; margin-bottom: 24px; } + .hub-header h1 { font-size: 22px; } + .hub-desc { font-size: 14px; } + + .visuals-panel { padding: 12px; gap: 10px; } + .visuals-panel h3 { font-size: 12px; margin-bottom: 6px; } + .visuals-archmap { padding-bottom: 10px; } + #force-graph { height: 260px; } + + .faq-question { padding: 12px 14px; font-size: 14px; } + .faq-answer { padding: 0 14px 12px; font-size: 13px; } + + .cta-section { padding: 28px 20px; } + .cta-heading { font-size: 18px; } + .cta-description { font-size: 14px; } + .cta-button { padding: 10px 22px; font-size: 14px; } + + .pagination a, .pagination span { padding: 6px 10px; font-size: 13px; } + + .chart-panel { padding: 14px; margin-bottom: 24px; } + .chart-panel h3 { font-size: 12px; margin-bottom: 8px; } + .chart-panel-compact { padding: 12px; margin-top: 12px; } + .hub-charts { flex-direction: column; gap: 16px; } + .letter-nav { gap: 4px; margin-bottom: 16px; } + .letter-link { width: 32px; height: 32px; font-size: 12px; } + + .section-title { font-size: 18px; } + .section { margin-bottom: 36px; } + .site-footer { margin-top: 40px; padding: 24px 0; } + .search-kbd { display: none; } + .search-overlay { padding-top: 0; align-items: stretch; } + .search-modal { max-width: 100%; max-height: 100vh; border-radius: 0; } + .search-footer { display: none; } +} + +@media (max-width: 380px) { + .container { padding: 0 12px; } + .entity-title { font-size: 18px; } + .hero h1 { font-size: 20px; } + .hero-stat .num { font-size: 18px; } +} diff --git a/internal/archdocs/templates/all_entities.html b/internal/archdocs/templates/all_entities.html new file mode 100644 index 0000000..6cb1577 --- /dev/null +++ b/internal/archdocs/templates/all_entities.html @@ -0,0 +1,83 @@ + + + +{{template "_head.html"}} +All Entities{{if gt .Pagination.CurrentPage 1}} — Page {{.Pagination.CurrentPage}}{{end}} | {{.Site.Name}} + +{{$curPage := index .Pagination.PageURLs (sub .Pagination.CurrentPage 1)}} +{{template "_og.html" .}} + +{{.JsonLD}} + + +{{template "_header.html" .}} + +
+
+
+
+ Home + / + All Entities +
+

All Entities

+

Browse all {{.TotalEntities | formatNumber}} entities in the {{.Site.Name}} architecture documentation — every file, function, class, type, domain, and directory.

+

{{.TotalEntities | formatNumber}} entities · Page {{.Pagination.CurrentPage}} of {{.Pagination.TotalPages}}

+
+ + {{if eq .Pagination.CurrentPage 1}} +
+

Entity Types

+
+ +
+ {{end}} + + + + {{if gt .Pagination.TotalPages 1}} + + {{end}} +
+ + {{if .CTA.Enabled}} +
+

{{.CTA.Heading}}

+

{{.CTA.Description}}

+ {{.CTA.ButtonText}} +
+ {{end}} +
+ +{{template "_footer.html"}} + + + + diff --git a/internal/archdocs/templates/entity.html b/internal/archdocs/templates/entity.html new file mode 100644 index 0000000..d6a868d --- /dev/null +++ b/internal/archdocs/templates/entity.html @@ -0,0 +1,235 @@ + + + +{{template "_head.html"}} +{{.Entity.GetString "title"}} | {{.Site.Name}} + + +{{template "_og.html" .}} + +{{.JsonLD}} + + +{{template "_header.html" .}} + +
+
+
+
+ Home + / + {{if .Entity.GetString "node_type"}}{{.Entity.GetString "node_type"}}/{{end}} + {{.Entity.GetString "title"}} +
+

{{.Entity.GetString "title"}}

+ {{if not (.Entity.GetString "summary")}}

{{.Entity.GetString "description"}}

{{end}} + +
+ {{if .Entity.GetString "node_type"}}{{.Entity.GetString "node_type"}}{{end}} + {{if .Entity.GetString "language"}}{{.Entity.GetString "language"}}{{end}} + {{if .Entity.GetString "domain"}}{{.Entity.GetString "domain"}}{{end}} + {{if .Entity.GetString "subdomain"}}{{.Entity.GetString "subdomain"}}{{end}} + {{if .Entity.GetInt "import_count"}}{{.Entity.GetInt "import_count"}} imports{{end}} + {{if .Entity.GetInt "imported_by_count"}}{{.Entity.GetInt "imported_by_count"}} dependents{{end}} + {{if .Entity.GetInt "call_count"}}calls {{.Entity.GetInt "call_count"}}{{end}} + {{if .Entity.GetInt "called_by_count"}}called by {{.Entity.GetInt "called_by_count"}}{{end}} + {{if .Entity.GetInt "function_count"}}{{.Entity.GetInt "function_count"}} functions{{end}} + {{if .Entity.GetInt "class_count"}}{{.Entity.GetInt "class_count"}} classes{{end}} + {{if .Entity.GetInt "file_count"}}{{.Entity.GetInt "file_count"}} files{{end}} +
+ + {{if .Entity.GetString "summary"}} +
+

{{.Entity.GetString "summary"}}

+
+ {{end}} +
+ +
+

Entity Profile

+
+ +
+ + {{if or (.Entity.GetString "arch_map") (.Entity.GetString "mermaid_diagram") (.Entity.GetString "graph_data")}} +
+ {{if .Entity.GetString "arch_map"}} +
+
+ + +
+ {{end}} + + {{if or (.Entity.GetString "mermaid_diagram") (.Entity.GetString "graph_data")}} +
+ {{if .Entity.GetString "mermaid_diagram"}} +
+

Dependency Diagram

+
{{.Entity.GetString "mermaid_diagram" | safeHTML}}
+
+ {{end}} + + {{if .Entity.GetString "graph_data"}} +
+

Relationship Graph

+
+ + +
+ {{end}} +
+ {{end}} +
+ {{end}} + + {{if .SourceCode}} +
+

Source Code

+ {{if .Entity.GetInt "start_line"}}

{{.Entity.GetString "file_path"}} lines {{.Entity.GetInt "start_line"}}–{{.Entity.GetInt "end_line"}}

{{end}} +
{{.SourceCode}}
+
+ {{end}} + + {{$sections := .Entity.Sections}} + + {{with index $sections "Domain"}} +
+

Domain

+
    {{range .}}
  • {{. | safeHTML}}
  • {{end}}
+
+ {{end}} + + {{with index $sections "Subdomains"}} +
+

Subdomains

+
    {{range .}}
  • {{. | safeHTML}}
  • {{end}}
+
+ {{end}} + + {{with index $sections "Defined In"}} +
+

Defined In

+
    {{range .}}
  • {{. | safeHTML}}
  • {{end}}
+
+ {{end}} + + {{with index $sections "Functions"}} +
+

Functions

+
    {{range .}}
  • {{. | safeHTML}}
  • {{end}}
+
+ {{end}} + + {{with index $sections "Classes"}} +
+

Classes

+
    {{range .}}
  • {{. | safeHTML}}
  • {{end}}
+
+ {{end}} + + {{with index $sections "Types"}} +
+

Types

+
    {{range .}}
  • {{. | safeHTML}}
  • {{end}}
+
+ {{end}} + + {{with index $sections "Dependencies"}} +
+

Dependencies

+
    {{range .}}
  • {{. | safeHTML}}
  • {{end}}
+
+ {{end}} + + {{with index $sections "Imported By"}} +
+

Imported By

+
    {{range .}}
  • {{. | safeHTML}}
  • {{end}}
+
+ {{end}} + + {{with index $sections "Calls"}} +
+

Calls

+
    {{range .}}
  • {{. | safeHTML}}
  • {{end}}
+
+ {{end}} + + {{with index $sections "Called By"}} +
+

Called By

+
    {{range .}}
  • {{. | safeHTML}}
  • {{end}}
+
+ {{end}} + + {{with index $sections "Source Files"}} +
+

Source Files

+
    {{range .}}
  • {{. | safeHTML}}
  • {{end}}
+
+ {{end}} + + {{with index $sections "Subdirectories"}} +
+

Subdirectories

+
    {{range .}}
  • {{. | safeHTML}}
  • {{end}}
+
+ {{end}} + + {{with index $sections "Files"}} +
+

Files

+
    {{range .}}
  • {{. | safeHTML}}
  • {{end}}
+
+ {{end}} + + {{with index $sections "Extends"}} +
+

Extends

+
    {{range .}}
  • {{. | safeHTML}}
  • {{end}}
+
+ {{end}} + + {{with index $sections "Source"}} +
+

Source

+
    {{range .}}
  • {{. | safeHTML}}
  • {{end}}
+
+ {{end}} + + {{with .Entity.GetFAQs}} +
+

Frequently Asked Questions

+ {{range .}} +
+ {{.Question}} +
{{.Answer | safeHTML}}
+
+ {{end}} +
+ {{end}} + + {{if .CTA.Enabled}} +
+

{{.CTA.Heading}}

+

{{.CTA.Description}}

+ {{.CTA.ButtonText}} +
+ {{end}} + +
+
+ +{{template "_footer.html"}} +{{if .Entity.GetString "mermaid_diagram"}}{{end}} + + + + diff --git a/internal/archdocs/templates/hub.html b/internal/archdocs/templates/hub.html new file mode 100644 index 0000000..9956469 --- /dev/null +++ b/internal/archdocs/templates/hub.html @@ -0,0 +1,83 @@ + + + +{{template "_head.html"}} +{{.Entry.Name}} — {{.Taxonomy.Label}} | {{.Site.Name}} + +{{$curPage := index .Pagination.PageURLs (sub .Pagination.CurrentPage 1)}} + +{{template "_og.html" .}} + +{{.JsonLD}} + + +{{template "_header.html" .}} + +
+
+
+
+ Home + / + {{.Taxonomy.Label}} + / + {{.Entry.Name}} +
+

{{.Entry.Name}}

+

Browse all {{len .Entry.Entities}} {{.Taxonomy.LabelSingular | lower}} entities categorized under {{.Entry.Name}} in the {{.Site.Name}} architecture documentation.

+

{{len .Entry.Entities}} entities · Page {{.Pagination.CurrentPage}} of {{.Pagination.TotalPages}}

+
+
+
+ +
+
+ + + + {{if gt .Pagination.TotalPages 1}} + + {{end}} +
+ + {{if .CTA.Enabled}} +
+

{{.CTA.Heading}}

+

{{.CTA.Description}}

+ {{.CTA.ButtonText}} +
+ {{end}} +
+ +{{template "_footer.html"}} + + + + diff --git a/internal/archdocs/templates/index.html b/internal/archdocs/templates/index.html new file mode 100644 index 0000000..4cd0203 --- /dev/null +++ b/internal/archdocs/templates/index.html @@ -0,0 +1,94 @@ + + + +{{template "_head.html"}} +{{.Site.Name}} — Architecture Documentation + + +{{template "_og.html" .}} + +{{.JsonLD}} + + +{{template "_header.html" .}} + +
+
+
+

{{.Site.Name}}

+ {{if .Site.RepoURL}} + + {{end}} +

Architecture documentation generated from code analysis. Explore every file, function, class, and domain.

+ +
+ +
+

Architecture Overview

+
+ +
+ +
+

Codebase Composition

+
+ +
+ + {{range .Taxonomies}} + {{$taxName := .Name}} + + {{end}} +
+ + {{if .CTA.Enabled}} +
+

{{.CTA.Heading}}

+

{{.CTA.Description}}

+ {{.CTA.ButtonText}} +
+ {{end}} +
+ +{{template "_footer.html"}} + + + + diff --git a/internal/archdocs/templates/letter.html b/internal/archdocs/templates/letter.html new file mode 100644 index 0000000..f517c2c --- /dev/null +++ b/internal/archdocs/templates/letter.html @@ -0,0 +1,65 @@ + + + +{{template "_head.html"}} +{{.Taxonomy.Label}} — {{.Letter}} | {{.Site.Name}} + + +{{template "_og.html" .}} + +{{.JsonLD}} + + +{{template "_header.html" .}} + +
+
+
+
+ Home + / + {{.Taxonomy.Label}} + / + {{.Letter}} +
+

{{.Taxonomy.Label}} — {{.Letter}}

+

{{len .Entries}} entries starting with {{.Letter}}

+
+ +
+ {{range .Letters}} + {{if eq . $.Letter}}{{.}} + {{else}}{{.}}{{end}} + {{end}} +
+ +
+

Entry Sizes

+
+ +
+ +
+ {{range .Entries}} + + {{.Name}} + {{len .Entities}} + + {{end}} +
+
+ + {{if .CTA.Enabled}} +
+

{{.CTA.Heading}}

+

{{.CTA.Description}}

+ {{.CTA.ButtonText}} +
+ {{end}} +
+ +{{template "_footer.html"}} + + + + diff --git a/internal/archdocs/templates/taxonomy_index.html b/internal/archdocs/templates/taxonomy_index.html new file mode 100644 index 0000000..a12621c --- /dev/null +++ b/internal/archdocs/templates/taxonomy_index.html @@ -0,0 +1,56 @@ + + + +{{template "_head.html"}} +{{.Taxonomy.Label}} — {{.Site.Name}} + + +{{template "_og.html" .}} + +{{.JsonLD}} + + +{{template "_header.html" .}} + +
+
+
+
+ Home + / + {{.Taxonomy.Label}} +
+

{{.Taxonomy.Label}}

+

{{.Taxonomy.Config.IndexDescription}} · {{len .Taxonomy.Entries}} categories

+
+ +
+

Distribution

+
+ +
+ +
+ {{range .Taxonomy.Entries}} + + {{.Name}} + {{len .Entities}} + + {{end}} +
+
+ + {{if .CTA.Enabled}} +
+

{{.CTA.Heading}}

+

{{.CTA.Description}}

+ {{.CTA.ButtonText}} +
+ {{end}} +
+ +{{template "_footer.html"}} + + + + diff --git a/internal/archdocs/zip.go b/internal/archdocs/zip.go new file mode 100644 index 0000000..07066de --- /dev/null +++ b/internal/archdocs/zip.go @@ -0,0 +1,111 @@ +package archdocs + +import ( + "archive/zip" + "fmt" + "io" + "os" + "os/exec" + "path/filepath" + "strings" +) + +// skipDirs are directory names that should never be included in the archive. +var skipDirs = map[string]bool{ + ".git": true, + "node_modules": true, + "vendor": true, + "__pycache__": true, + ".venv": true, + "venv": true, + "dist": true, + "build": true, + "target": true, + ".next": true, + ".nuxt": true, + "coverage": true, + ".terraform": true, + ".tox": true, +} + +// createZip archives the repository at dir into a temporary ZIP file and +// returns its path. The caller is responsible for removing the file. +// +// Strategy: use git archive when inside a Git repo (respects .gitignore, +// deterministic output). Falls back to a manual directory walk otherwise. +func createZip(dir string) (string, error) { + f, err := os.CreateTemp("", "supermodel-*.zip") + if err != nil { + return "", fmt.Errorf("create temp file: %w", err) + } + dest := f.Name() + f.Close() + + if isGitRepo(dir) { + if err := gitArchive(dir, dest); err == nil { + return dest, nil + } + } + + if err := walkZip(dir, dest); err != nil { + os.Remove(dest) + return "", err + } + return dest, nil +} + +func isGitRepo(dir string) bool { + cmd := exec.Command("git", "-C", dir, "rev-parse", "--git-dir") + cmd.Stdout = io.Discard + cmd.Stderr = io.Discard + return cmd.Run() == nil +} + +func gitArchive(dir, dest string) error { + cmd := exec.Command("git", "-C", dir, "archive", "--format=zip", "-o", dest, "HEAD") + cmd.Stderr = os.Stderr + return cmd.Run() +} + +// walkZip creates a ZIP of dir, excluding skipDirs, hidden files, and +// files larger than 10 MB. +func walkZip(dir, dest string) error { + out, err := os.Create(dest) + if err != nil { + return err + } + defer out.Close() + + zw := zip.NewWriter(out) + defer zw.Close() + + return filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + rel, err := filepath.Rel(dir, path) + if err != nil { + return err + } + if info.IsDir() { + if skipDirs[info.Name()] { + return filepath.SkipDir + } + return nil + } + if strings.HasPrefix(info.Name(), ".") || info.Size() > 10<<20 { + return nil + } + w, err := zw.Create(filepath.ToSlash(rel)) + if err != nil { + return err + } + f, err := os.Open(path) + if err != nil { + return err + } + defer f.Close() + _, err = io.Copy(w, f) + return err + }) +}