AION Structure Scanner v2.0 #2
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: AION Structure Scanner v2.0 | |
| # VELA-C Two-Pass Architecture | TOPOS Kitchen Transform | AI Navigation Manifest | |
| # Architect: Sheldon K. Salmon | Co-Architect: ALBEDO | |
| # Generates: STRUCTURE.md · MANIFEST.md · PATHS.json · TOPOS-BIN.md | |
| on: | |
| push: | |
| branches: [ main, master ] | |
| pull_request: | |
| branches: [ main, master ] | |
| workflow_dispatch: | |
| schedule: | |
| - cron: '0 0 * * *' | |
| jobs: | |
| scan-structure: | |
| name: 📁 AION Structure Scanner v2.0 | |
| runs-on: ubuntu-latest | |
| permissions: | |
| contents: write | |
| steps: | |
| - name: Checkout repository | |
| uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| token: ${{ secrets.GITHUB_TOKEN }} | |
| persist-credentials: true | |
| - name: Set up Python | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: '3.11' | |
| - name: Run AION Structure Scanner | |
| run: | | |
| python3 << 'SCANNER_EOF' | |
| import os | |
| import json | |
| import hashlib | |
| from datetime import datetime, timezone | |
| from pathlib import Path | |
| # ───────────────────────────────────────────────────────────────── | |
| # CONFIGURATION | |
| # ───────────────────────────────────────────────────────────────── | |
| REPO_ROOT = Path(".") | |
| OUTPUT_DIR = Path(".aions-structure-list") | |
| OUTPUT_DIR.mkdir(exist_ok=True) | |
| REPO_NAME = os.environ.get("GITHUB_REPOSITORY", "AionSystem/AION-BRAIN") | |
| REPO_URL = f"https://github.com/{REPO_NAME}" | |
| RAW_URL = f"https://raw.githubusercontent.com/{REPO_NAME}/main" | |
| BRANCH = "main" | |
| SCAN_TIME = datetime.now(timezone.utc).isoformat() | |
| # Paths to exclude from scan | |
| EXCLUDE_PATTERNS = [ | |
| ".git", "__pycache__", "node_modules", "venv", ".venv", | |
| ".aions-structure-list", ".github" | |
| ] | |
| # ───────────────────────────────────────────────────────────────── | |
| # VELA-C PASS 1 — DOMAIN CLASSIFICATION | |
| # Maps path prefixes to function tags for SPATIAL-NAV routing | |
| # ───────────────────────────────────────────────────────────────── | |
| DOMAIN_MAP = { | |
| "frameworks": {"tag": "FRAMEWORK", "room": "MANSION_LIBRARY", "keel": "6,7"}, | |
| "frameworks/VELA": {"tag": "VELA", "room": "MANSION_LIBRARY", "keel": "6"}, | |
| "frameworks/FSVE": {"tag": "FSVE", "room": "MATH_ROOM", "keel": "6"}, | |
| "frameworks/LAV": {"tag": "LAV", "room": "MATH_ROOM", "keel": "6"}, | |
| "frameworks/AION": {"tag": "AION", "room": "CONVERGENCE_ROOM", "keel": "all"}, | |
| "frameworks/TOPOS": {"tag": "TOPOS", "room": "AI_APARTMENTS", "keel": "2,7"}, | |
| "frameworks/GENESIS": {"tag": "GENESIS", "room": "UNIVERSE_ROOM", "keel": "7"}, | |
| "frameworks/EID": {"tag": "EID", "room": "UNIVERSE_ROOM", "keel": "1"}, | |
| "constitutional": {"tag": "CONSTITUTIONAL", "room": "CONVERGENCE_ROOM", "keel": "5"}, | |
| "articles": {"tag": "PUBLISHED", "room": "MANSION_LIBRARY", "keel": "none"}, | |
| "assessments": {"tag": "ASSESSMENT", "room": "MATH_ROOM", "keel": "6"}, | |
| "projects": {"tag": "PROJECT", "room": "SECRET_WORKSHOP", "keel": "none"}, | |
| "innovation-lab": {"tag": "INNOVATION", "room": "SECRET_WORKSHOP", "keel": "7"}, | |
| "legal": {"tag": "LEGAL", "room": "CONVERGENCE_ROOM", "keel": "5"}, | |
| "tests": {"tag": "TEST", "room": "MATH_ROOM", "keel": "none"}, | |
| "fcl": {"tag": "FCL", "room": "MATH_ROOM", "keel": "6"}, | |
| } | |
| def classify_path(rel_path): | |
| """VELA-C Pass 1 — domain classification by path prefix.""" | |
| path_str = str(rel_path) | |
| best_match = {"tag": "GENERAL", "room": "LOBBY", "keel": "none"} | |
| best_len = 0 | |
| for prefix, meta in DOMAIN_MAP.items(): | |
| if path_str.startswith(prefix) and len(prefix) > best_len: | |
| best_match = meta | |
| best_len = len(prefix) | |
| return best_match | |
| # ───────────────────────────────────────────────────────────────── | |
| # VELA-C PASS 2 — CLEAN SCRUTINY | |
| # Flags paths that pass surface check but need second look | |
| # ───────────────────────────────────────────────────────────────── | |
| SCRUTINY_FLAGS = { | |
| "README.md": "ANCHOR — primary entry point for this directory", | |
| "SPEC.md": "SPEC — canonical specification file", | |
| "CHANGELOG.md": "HISTORY — version history, check for version drift", | |
| "FCL": "FCL — validation entry, high epistemic weight", | |
| "FROZEN": "FROZEN — immutable, verify before referencing", | |
| "v0.": "NASCENT — M-NASCENT version, not yet validated", | |
| "v1.": "ACTIVE — validated or validation-in-progress", | |
| "v2.": "MATURE — high confidence, check convergence state", | |
| "v3.": "MATURE — high confidence, check convergence state", | |
| } | |
| def scrutiny_check(filename): | |
| """VELA-C Pass 2 — CLEAN scrutiny for known signal patterns.""" | |
| flags = [] | |
| for pattern, note in SCRUTINY_FLAGS.items(): | |
| if pattern in filename: | |
| flags.append(note) | |
| return flags | |
| # ───────────────────────────────────────────────────────────────── | |
| # TOPOS KITCHEN — TRANSFORM LAYER | |
| # Converts raw file tree into builder-usable navigation entries | |
| # ───────────────────────────────────────────────────────────────── | |
| def should_exclude(path): | |
| for pattern in EXCLUDE_PATTERNS: | |
| if pattern in path.parts: | |
| return True | |
| return False | |
| def build_tree(): | |
| """Walk repo. Return structured entries ready for Kitchen transform.""" | |
| entries = [] | |
| deprecated = [] | |
| for item in sorted(REPO_ROOT.rglob("*")): | |
| if should_exclude(item): | |
| continue | |
| rel = item.relative_to(REPO_ROOT) | |
| rel_str = str(rel) | |
| # TOPOS-BIN: flag empty directories | |
| if item.is_dir(): | |
| children = [c for c in item.iterdir() | |
| if not should_exclude(c)] | |
| if not children: | |
| deprecated.append({ | |
| "path": rel_str, | |
| "reason": "EMPTY_DIR — no files found", | |
| "type": "THRESHOLD_APPROACH" | |
| }) | |
| continue | |
| # Pass 1 — classify | |
| domain = classify_path(rel) | |
| # Pass 2 — scrutiny | |
| flags = scrutiny_check(item.name) | |
| # Build GitHub URLs | |
| github_url = f"{REPO_URL}/blob/{BRANCH}/{rel_str}" | |
| raw_url = f"{RAW_URL}/{rel_str}" | |
| entries.append({ | |
| "path": rel_str, | |
| "name": item.name, | |
| "ext": item.suffix, | |
| "type": "FILE", | |
| "tag": domain["tag"], | |
| "room": domain["room"], | |
| "keel": domain["keel"], | |
| "scrutiny": flags, | |
| "github_url": github_url, | |
| "raw_url": raw_url, | |
| "size_bytes": item.stat().st_size, | |
| }) | |
| return entries, deprecated | |
| # ───────────────────────────────────────────────────────────────── | |
| # BUILD ALL OUTPUTS | |
| # ───────────────────────────────────────────────────────────────── | |
| print("🔍 VELA-C Pass 1 — Classifying paths...") | |
| entries, deprecated = build_tree() | |
| print(f" {len(entries)} files classified. {len(deprecated)} empty dirs flagged.") | |
| # ── STATS ────────────────────────────────────────────────────── | |
| ext_counts = {} | |
| tag_counts = {} | |
| room_counts = {} | |
| for e in entries: | |
| ext_counts[e["ext"]] = ext_counts.get(e["ext"], 0) + 1 | |
| tag_counts[e["tag"]] = tag_counts.get(e["tag"], 0) + 1 | |
| room_counts[e["room"]] = room_counts.get(e["room"], 0) + 1 | |
| stats = { | |
| "scan_time": SCAN_TIME, | |
| "repo": REPO_NAME, | |
| "total_files": len(entries), | |
| "total_empty_dirs": len(deprecated), | |
| "by_extension": ext_counts, | |
| "by_tag": tag_counts, | |
| "by_room": room_counts, | |
| } | |
| with open(OUTPUT_DIR / "stats.json", "w") as f: | |
| json.dump(stats, f, indent=2) | |
| # ── PATHS.json — full machine-readable index ─────────────────── | |
| with open(OUTPUT_DIR / "PATHS.json", "w") as f: | |
| json.dump({ | |
| "meta": stats, | |
| "entries": entries, | |
| "topos_bin": deprecated | |
| }, f, indent=2) | |
| print("✅ PATHS.json written.") | |
| # ── TOPOS-BIN.md — deformation archive ──────────────────────── | |
| with open(OUTPUT_DIR / "TOPOS-BIN.md", "w") as f: | |
| f.write(f"# TOPOS-BIN — Deformation Archive\n\n") | |
| f.write(f"**Scan:** {SCAN_TIME} \n") | |
| f.write(f"**Entries:** {len(deprecated)}\n\n") | |
| f.write("These paths passed surface scan but flagged on scrutiny or are structurally empty.\n") | |
| f.write("Never silently removed. Always documented.\n\n") | |
| f.write("| Path | Reason | Type |\n") | |
| f.write("|------|--------|------|\n") | |
| for d in deprecated: | |
| f.write(f"| `{d['path']}` | {d['reason']} | {d['type']} |\n") | |
| print("✅ TOPOS-BIN.md written.") | |
| # ── MANIFEST.md — AI navigation index ───────────────────────── | |
| print("🏗 TOPOS Kitchen — Building AI navigation manifest...") | |
| # Group by room for spatial navigation | |
| by_room = {} | |
| for e in entries: | |
| room = e["room"] | |
| if room not in by_room: | |
| by_room[room] = [] | |
| by_room[room].append(e) | |
| # Room descriptions for the spatial header | |
| ROOM_DESC = { | |
| "MANSION_LIBRARY": "Frameworks index · Published work · Reading mode", | |
| "ARCHIVE_ROOM": "LIBRARIAN · SIE · 8 archive nodes · KEEL lens", | |
| "CONVERGENCE_ROOM": "Build mode · DUAL-HELIX · Constitutional architecture", | |
| "MATH_ROOM": "FSVE scoring · LAV validation · FCL entries · Quantitative ops", | |
| "UNIVERSE_ROOM": "GENESIS · EID · High-uncertainty territory", | |
| "AI_APARTMENTS": "TOPOS · Emergence depth · Self-report protocol", | |
| "SECRET_WORKSHOP": "Deep build · Proprietary stack · VEIN · RESONANCE", | |
| "LOBBY": "General · Entry point · Unclassified", | |
| } | |
| with open(OUTPUT_DIR / "MANIFEST.md", "w") as f: | |
| f.write("# AION-BRAIN — AI Navigation Manifest\n\n") | |
| f.write(f"> Auto-generated by AION Structure Scanner v2.0 \n") | |
| f.write(f"> Scan time: `{SCAN_TIME}` \n") | |
| f.write(f"> Repo: [{REPO_NAME}]({REPO_URL}) \n") | |
| f.write(f"> Files indexed: **{len(entries)}** · Empty dirs in TOPOS-BIN: **{len(deprecated)}**\n\n") | |
| f.write("---\n\n") | |
| f.write("## HOW TO USE THIS MANIFEST\n\n") | |
| f.write("This file is the AI navigation index for AION-BRAIN. \n") | |
| f.write("Each file is classified by: `TAG` · `SPATIAL-NAV ROOM` · `KEEL AXIS STRENGTH` \n") | |
| f.write("Use `raw_url` for direct content fetch. Use `github_url` for browser view. \n") | |
| f.write("VELA-C scrutiny flags mark files with elevated epistemic weight.\n\n") | |
| f.write("---\n\n") | |
| # Stats summary | |
| f.write("## REPO STATS\n\n") | |
| f.write("| Metric | Count |\n|--------|-------|\n") | |
| f.write(f"| Total files | {len(entries)} |\n") | |
| f.write(f"| Frameworks | {tag_counts.get('FRAMEWORK', 0)} |\n") | |
| f.write(f"| Published articles | {tag_counts.get('PUBLISHED', 0)} |\n") | |
| f.write(f"| FCL entries | {tag_counts.get('FCL', 0)} |\n") | |
| f.write(f"| Assessment files | {tag_counts.get('ASSESSMENT', 0)} |\n") | |
| f.write(f"| Constitutional files | {tag_counts.get('CONSTITUTIONAL', 0)} |\n\n") | |
| f.write("---\n\n") | |
| # Files by room — spatial navigation order | |
| ROOM_ORDER = [ | |
| "CONVERGENCE_ROOM", "MANSION_LIBRARY", "MATH_ROOM", | |
| "UNIVERSE_ROOM", "AI_APARTMENTS", "SECRET_WORKSHOP", | |
| "ARCHIVE_ROOM", "LOBBY" | |
| ] | |
| for room in ROOM_ORDER: | |
| if room not in by_room: | |
| continue | |
| room_entries = by_room[room] | |
| desc = ROOM_DESC.get(room, "") | |
| f.write(f"## → [{room}]\n") | |
| f.write(f"*{desc}* \n") | |
| f.write(f"**{len(room_entries)} files**\n\n") | |
| f.write("| File | Tag | KEEL | Scrutiny | Raw URL | GitHub |\n") | |
| f.write("|------|-----|------|----------|---------|--------|\n") | |
| for e in sorted(room_entries, key=lambda x: x["path"]): | |
| scrutiny = " · ".join(e["scrutiny"]) if e["scrutiny"] else "—" | |
| f.write( | |
| f"| `{e['path']}` " | |
| f"| {e['tag']} " | |
| f"| {e['keel']} " | |
| f"| {scrutiny} " | |
| f"| [raw]({e['raw_url']}) " | |
| f"| [view]({e['github_url']}) |\n" | |
| ) | |
| f.write("\n---\n\n") | |
| print("✅ MANIFEST.md written.") | |
| # ── STRUCTURE.md — human-readable full tree ──────────────────── | |
| with open(OUTPUT_DIR / "STRUCTURE.md", "w") as f: | |
| f.write("# AION-BRAIN — Repository Structure\n\n") | |
| f.write(f"**Last Scan:** {SCAN_TIME} \n") | |
| f.write(f"**Repo:** [{REPO_NAME}]({REPO_URL})\n\n") | |
| f.write("---\n\n") | |
| f.write("## Quick Stats\n\n") | |
| f.write("| Metric | Count |\n|--------|-------|\n") | |
| for ext, count in sorted(ext_counts.items(), key=lambda x: -x[1])[:10]: | |
| label = ext if ext else "(no ext)" | |
| f.write(f"| {label} files | {count} |\n") | |
| f.write(f"\n**Total:** {len(entries)} files\n\n") | |
| f.write("---\n\n") | |
| f.write("## Full File Tree\n\n```\n") | |
| # Build tree structure | |
| current_depth = [] | |
| for e in entries: | |
| parts = Path(e["path"]).parts | |
| depth = len(parts) - 1 | |
| indent = " " * depth | |
| name = parts[-1] | |
| scrutiny_str = f" ← {e['scrutiny'][0]}" if e["scrutiny"] else "" | |
| f.write(f"{indent}├── {name}{scrutiny_str}\n") | |
| f.write("```\n\n") | |
| f.write("---\n\n") | |
| f.write("*Generated by AION Structure Scanner v2.0* \n") | |
| f.write("*VELA-C Two-Pass · TOPOS Kitchen Transform · TOPOS-BIN Archive*\n") | |
| print("✅ STRUCTURE.md written.") | |
| # ── BADGE-SNIPPET.md ──────────────────────────────────────────── | |
| with open(OUTPUT_DIR / "BADGE-SNIPPET.md", "w") as f: | |
| f.write("## Repository Stats\n\n") | |
| f.write(f"}-blue)\n") | |
| dirs = stats.get("total_files", 0) | |
| md = ext_counts.get(".md", 0) | |
| py = ext_counts.get(".py", 0) | |
| f.write(f"\n") | |
| f.write(f"\n") | |
| f.write(f"}-e94560)\n\n") | |
| f.write("*Auto-updated on every push.*\n") | |
| print("✅ BADGE-SNIPPET.md written.") | |
| print("\n🏁 AION Structure Scanner v2.0 complete.") | |
| print(f" Files: {len(entries)} | Empty dirs flagged: {len(deprecated)}") | |
| print(f" Outputs: MANIFEST.md · PATHS.json · STRUCTURE.md · TOPOS-BIN.md · BADGE-SNIPPET.md") | |
| SCANNER_EOF | |
| - name: Commit and push updates | |
| run: | | |
| git config --local user.email "github-actions[bot]@users.noreply.github.com" | |
| git config --local user.name "github-actions[bot]" | |
| git add .aions-structure-list/ | |
| if git diff --cached --quiet; then | |
| echo "No changes to commit." | |
| else | |
| git commit -m "🤖 AION Scanner v2.0: structure + manifest + paths + topos-bin [skip ci]" | |
| git push | |
| echo "✅ Changes pushed." | |
| fi | |
| - name: Upload artifacts | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: aion-structure-scan-v2 | |
| path: .aions-structure-list/ |