diff --git a/orchestrator/README.md b/orchestrator/README.md new file mode 100644 index 000000000..d7be9972f --- /dev/null +++ b/orchestrator/README.md @@ -0,0 +1,12 @@ +Orchestrator (MVP) +- api: FastAPI service for projects/repos/branches/PRs/runs/analyses, SSE streams +- workers: simple polling workers for Codegen logs and Graph-sitter findings +- web: minimal React/Vite UI for project cards (proxy to /api) + +Run API +- see orchestrator/api/README.md + +Run Web +- cd orchestrator/web && npm install && npm run dev +- proxy routes to API at http://127.0.0.1:8001 + diff --git a/orchestrator/__init__.py b/orchestrator/__init__.py new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/orchestrator/__init__.py @@ -0,0 +1 @@ + diff --git a/orchestrator/api/README.md b/orchestrator/api/README.md new file mode 100644 index 000000000..425cae183 --- /dev/null +++ b/orchestrator/api/README.md @@ -0,0 +1,30 @@ +Project Orchestrator API (MVP) + +Quick start +1) Python 3.12+ recommended +2) Install deps: + pip install -r orchestrator/api/requirements.txt +3) Set env: + export ORCH_DATABASE_URL=sqlite:///./orchestrator.db + export CODEGEN_API_URL=https://api.codegen.com + export CODEGEN_API_TOKEN=[REDACTED] + export CODEGEN_ORG_ID=[YOUR_ORG_ID] + export GITHUB_TOKEN=[REDACTED] + export GRAPH_SITTER_URL=http://localhost:8080 +4) Run server: + uvicorn orchestrator.api.app:app --reload --port 8001 + +Endpoints (selection) +- GET /health +- POST /projects, GET /projects +- POST /repos, GET /repos, POST /repos/pin, POST /repos/unpin +- GET /branches?repository_id=1, POST /branches +- POST /prs +- POST /runs, POST /runs/{id}/poll, GET /runs/{id}/events/stream (SSE) +- POST /analyses, POST /analyses/{id}/poll, GET /analyses/{id}/stream (SSE) + +Notes +- SQLite used for MVP; swap ORCH_DATABASE_URL to Postgres in production. +- SSE endpoints poll DB periodically; wire a worker to call /runs/{id}/poll and /analyses/{id}/poll on intervals. +- Graph-sitter API contract assumed; adapt adapter if your server exposes different routes. + diff --git a/orchestrator/api/__init__.py b/orchestrator/api/__init__.py new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/orchestrator/api/__init__.py @@ -0,0 +1 @@ + diff --git a/orchestrator/api/adapters/analysis_adapter.py b/orchestrator/api/adapters/analysis_adapter.py new file mode 100644 index 000000000..f3bd9f3d5 --- /dev/null +++ b/orchestrator/api/adapters/analysis_adapter.py @@ -0,0 +1,31 @@ +from __future__ import annotations + +import os +from typing import Dict, Any, Optional + +import httpx + +GRAPH_SITTER_URL = os.getenv("GRAPH_SITTER_URL") # e.g., http://graph-sitter:8000 + + +class AnalysisAdapter: + def __init__(self): + if not GRAPH_SITTER_URL: + raise RuntimeError("GRAPH_SITTER_URL is not set") + self._client = httpx.Client(base_url=GRAPH_SITTER_URL, timeout=60.0) + + def start_analysis(self, repo_full_name: str, branch: str, snapshot_digest: Optional[str] = None) -> Dict[str, Any]: + payload = { + "repo": repo_full_name, + "branch": branch, + "snapshot": snapshot_digest, + } + r = self._client.post("/analyze", json=payload) + r.raise_for_status() + return r.json() + + def get_findings(self, analysis_id: str, skip: int = 0, limit: int = 100) -> Dict[str, Any]: + r = self._client.get(f"/analyses/{analysis_id}/findings", params={"skip": skip, "limit": limit}) + r.raise_for_status() + return r.json() + diff --git a/orchestrator/api/adapters/codegen_adapter.py b/orchestrator/api/adapters/codegen_adapter.py new file mode 100644 index 000000000..b63a61433 --- /dev/null +++ b/orchestrator/api/adapters/codegen_adapter.py @@ -0,0 +1,31 @@ +from __future__ import annotations + +import os +from typing import Any, Dict, List, Optional + +import httpx + +CODEGEN_API_URL = os.getenv("CODEGEN_API_URL", "https://api.codegen.com") +CODEGEN_API_TOKEN = os.getenv("CODEGEN_API_TOKEN") + + +class CodegenAdapter: + def __init__(self, org_id: str): + self.org_id = org_id + if not CODEGEN_API_TOKEN: + raise RuntimeError("CODEGEN_API_TOKEN is not set") + self._client = httpx.Client(base_url=CODEGEN_API_URL, headers={ + "Authorization": f"Bearer {CODEGEN_API_TOKEN}", + "Content-Type": "application/json", + }, timeout=30.0) + + def get_run_logs(self, agent_run_id: str, skip: int = 0, limit: int = 100) -> Dict[str, Any]: + url = f"/v1/organizations/{self.org_id}/agent/run/{agent_run_id}/logs" + params = {"skip": skip, "limit": limit} + resp = self._client.get(url, params=params) + resp.raise_for_status() + return resp.json() + + # Placeholder for potential run start / PR creation via Codegen + # def create_pr(...): pass + diff --git a/orchestrator/api/adapters/github_adapter.py b/orchestrator/api/adapters/github_adapter.py new file mode 100644 index 000000000..edb371889 --- /dev/null +++ b/orchestrator/api/adapters/github_adapter.py @@ -0,0 +1,56 @@ +from __future__ import annotations + +import os +from typing import Optional, Dict, Any + +import httpx + +GITHUB_API_URL = os.getenv("GITHUB_API_URL", "https://api.github.com") +GITHUB_TOKEN = os.getenv("GITHUB_TOKEN") + + +class GitHubAdapter: + def __init__(self): + if not GITHUB_TOKEN: + raise RuntimeError("GITHUB_TOKEN is not set") + self._client = httpx.Client(base_url=GITHUB_API_URL, headers={ + "Authorization": f"Bearer {GITHUB_TOKEN}", + "Accept": "application/vnd.github+json", + }, timeout=30.0) + + def get_repo(self, org: str, name: str) -> Dict[str, Any]: + r = self._client.get(f"/repos/{org}/{name}") + r.raise_for_status() + return r.json() + + def get_branch(self, org: str, name: str, branch: str) -> Dict[str, Any]: + r = self._client.get(f"/repos/{org}/{name}/git/ref/heads/{branch}") + r.raise_for_status() + return r.json() + + def create_branch(self, org: str, name: str, branch: str, from_ref: Optional[str] = None) -> Dict[str, Any]: + # Determine base SHA + base_branch = from_ref or self.get_repo(org, name).get("default_branch", "main") + ref_data = self.get_branch(org, name, base_branch) + base_sha = ref_data["object"]["sha"] + # Create new ref + payload = {"ref": f"refs/heads/{branch}", "sha": base_sha} + r = self._client.post(f"/repos/{org}/{name}/git/refs", json=payload) + r.raise_for_status() + return r.json() + + def create_pr(self, org: str, name: str, head_branch: str, base_branch: str, title: str, body: Optional[str] = None) -> Dict[str, Any]: + payload = {"title": title, "head": head_branch, "base": base_branch, "body": body or ""} + r = self._client.post(f"/repos/{org}/{name}/pulls", json=payload) + r.raise_for_status() + return r.json() + + def get_pr_checks(self, org: str, name: str, pr_number: int) -> Dict[str, Any]: + # Use the Checks API indirectly via PR HEAD SHA + pr = self._client.get(f"/repos/{org}/{name}/pulls/{pr_number}") + pr.raise_for_status() + head_sha = pr.json()["head"]["sha"] + checks = self._client.get(f"/repos/{org}/{name}/commits/{head_sha}/check-runs") + checks.raise_for_status() + return checks.json() + diff --git a/orchestrator/api/app.py b/orchestrator/api/app.py new file mode 100644 index 000000000..3c54a7ffa --- /dev/null +++ b/orchestrator/api/app.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +import asyncio +from typing import AsyncGenerator + +from fastapi import FastAPI, Depends, HTTPException +from fastapi.middleware.cors import CORSMiddleware +from starlette.responses import JSONResponse +from starlette.requests import Request +from starlette.background import BackgroundTasks +from sse_starlette.sse import EventSourceResponse + +from .db import Base, engine, get_session +from .routes import projects, repos, branches, prs, runs, analyses + +app = FastAPI(title="Project Orchestrator API", version="0.1.0") + +# CORS for local dev; tighten in production +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + + +@app.on_event("startup") +def on_startup() -> None: + Base.metadata.create_all(bind=engine) + + +# Routers +app.include_router(projects.router, prefix="/projects", tags=["projects"]) +app.include_router(repos.router, prefix="/repos", tags=["repos"]) +app.include_router(branches.router, prefix="/branches", tags=["branches"]) +app.include_router(prs.router, prefix="/prs", tags=["prs"]) +app.include_router(runs.router, prefix="/runs", tags=["runs"]) +app.include_router(analyses.router, prefix="/analyses", tags=["analyses"]) + + +@app.get("/health") +async def health() -> JSONResponse: + return JSONResponse({"status": "ok"}) + diff --git a/orchestrator/api/db.py b/orchestrator/api/db.py new file mode 100644 index 000000000..c178b3c33 --- /dev/null +++ b/orchestrator/api/db.py @@ -0,0 +1,31 @@ +from __future__ import annotations + +import os +from contextlib import contextmanager +from typing import Generator + +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker, declarative_base, Session + +# Default to SQLite for MVP; swap to Postgres via DATABASE_URL env +DATABASE_URL = os.getenv("ORCH_DATABASE_URL", "sqlite:///./orchestrator.db") + +connect_args = {"check_same_thread": False} if DATABASE_URL.startswith("sqlite") else {} +engine = create_engine(DATABASE_URL, future=True, pool_pre_ping=True, connect_args=connect_args) +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=Session, future=True) + +Base = declarative_base() + + +@contextmanager +def get_session() -> Generator[Session, None, None]: + session = SessionLocal() + try: + yield session + session.commit() + except Exception: + session.rollback() + raise + finally: + session.close() + diff --git a/orchestrator/api/models.py b/orchestrator/api/models.py new file mode 100644 index 000000000..e128e57e9 --- /dev/null +++ b/orchestrator/api/models.py @@ -0,0 +1,154 @@ +from __future__ import annotations + +from datetime import datetime +from typing import Optional + +from sqlalchemy import ( + Column, + DateTime, + ForeignKey, + Integer, + String, + Text, + UniqueConstraint, + Boolean, +) +from sqlalchemy.orm import relationship, Mapped, mapped_column + +from .db import Base + + +class Project(Base): + __tablename__ = "projects" + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + name: Mapped[str] = mapped_column(String(255), unique=True, nullable=False) + description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow) + + repositories = relationship("ProjectRepository", back_populates="project", cascade="all, delete-orphan") + + +class Repository(Base): + __tablename__ = "repositories" + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + provider: Mapped[str] = mapped_column(String(32), default="github") + org: Mapped[str] = mapped_column(String(255)) + name: Mapped[str] = mapped_column(String(255)) + default_branch: Mapped[str] = mapped_column(String(128), default="main") + install_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) + visibility: Mapped[str] = mapped_column(String(32), default="private") + + __table_args__ = (UniqueConstraint("provider", "org", "name", name="uq_repo"),) + + projects = relationship("ProjectRepository", back_populates="repository", cascade="all, delete-orphan") + + +class ProjectRepository(Base): + __tablename__ = "project_repositories" + id: Mapped[int] = mapped_column(Integer, primary_key=True) + project_id: Mapped[int] = mapped_column(ForeignKey("projects.id", ondelete="CASCADE")) + repository_id: Mapped[int] = mapped_column(ForeignKey("repositories.id", ondelete="CASCADE")) + + project = relationship(Project, back_populates="repositories") + repository = relationship(Repository, back_populates="projects") + + __table_args__ = (UniqueConstraint("project_id", "repository_id", name="uq_proj_repo"),) + + +class Pin(Base): + __tablename__ = "pins" + id: Mapped[int] = mapped_column(Integer, primary_key=True) + project_id: Mapped[int] = mapped_column(ForeignKey("projects.id", ondelete="CASCADE")) + repository_id: Mapped[int] = mapped_column(ForeignKey("repositories.id", ondelete="CASCADE")) + user_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) + + __table_args__ = (UniqueConstraint("project_id", "repository_id", "user_id", name="uq_pin"),) + + +class Branch(Base): + __tablename__ = "branches" + id: Mapped[int] = mapped_column(Integer, primary_key=True) + repository_id: Mapped[int] = mapped_column(ForeignKey("repositories.id", ondelete="CASCADE")) + name: Mapped[str] = mapped_column(String(255)) + head_sha: Mapped[Optional[str]] = mapped_column(String(64), nullable=True) + status: Mapped[Optional[str]] = mapped_column(String(64), nullable=True) + + __table_args__ = (UniqueConstraint("repository_id", "name", name="uq_branch"),) + + +class PullRequest(Base): + __tablename__ = "pull_requests" + id: Mapped[int] = mapped_column(Integer, primary_key=True) + repository_id: Mapped[int] = mapped_column(ForeignKey("repositories.id", ondelete="CASCADE")) + number: Mapped[int] = mapped_column(Integer) + head_branch_id: Mapped[int] = mapped_column(ForeignKey("branches.id", ondelete="SET NULL")) + base_branch: Mapped[str] = mapped_column(String(255)) + state: Mapped[str] = mapped_column(String(64)) + checks_status: Mapped[Optional[str]] = mapped_column(String(64), nullable=True) + web_url: Mapped[Optional[str]] = mapped_column(String(2048), nullable=True) + + __table_args__ = (UniqueConstraint("repository_id", "number", name="uq_pr"),) + + +class AgentRun(Base): + __tablename__ = "agent_runs" + id: Mapped[int] = mapped_column(Integer, primary_key=True) + provider: Mapped[str] = mapped_column(String(32), default="codegen") + external_id: Mapped[str] = mapped_column(String(255)) + repository_id: Mapped[int] = mapped_column(ForeignKey("repositories.id", ondelete="CASCADE")) + branch_id: Mapped[Optional[int]] = mapped_column(ForeignKey("branches.id", ondelete="SET NULL")) + status: Mapped[str] = mapped_column(String(64), default="queued") + web_url: Mapped[Optional[str]] = mapped_column(String(2048), nullable=True) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow) + + __table_args__ = (UniqueConstraint("provider", "external_id", name="uq_run_external"),) + + +class AgentRunEvent(Base): + __tablename__ = "agent_run_events" + id: Mapped[int] = mapped_column(Integer, primary_key=True) + agent_run_id: Mapped[int] = mapped_column(ForeignKey("agent_runs.id", ondelete="CASCADE")) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow) + type: Mapped[str] = mapped_column(String(64)) # ACTION, PLAN_EVALUATION, ERROR, FINAL_ANSWER + tool_name: Mapped[Optional[str]] = mapped_column(String(128), nullable=True) + thought: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + observation: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + tool_input: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + tool_output: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + + +class Snapshot(Base): + __tablename__ = "snapshots" + id: Mapped[int] = mapped_column(Integer, primary_key=True) + repository_id: Mapped[int] = mapped_column(ForeignKey("repositories.id", ondelete="CASCADE")) + branch_id: Mapped[Optional[int]] = mapped_column(ForeignKey("branches.id", ondelete="SET NULL")) + digest: Mapped[str] = mapped_column(String(255)) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow) + meta: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + + +class Analysis(Base): + __tablename__ = "analyses" + id: Mapped[int] = mapped_column(Integer, primary_key=True) + repository_id: Mapped[int] = mapped_column(ForeignKey("repositories.id", ondelete="CASCADE")) + branch_id: Mapped[Optional[int]] = mapped_column(ForeignKey("branches.id", ondelete="SET NULL")) + snapshot_id: Mapped[Optional[int]] = mapped_column(ForeignKey("snapshots.id", ondelete="SET NULL")) + status: Mapped[str] = mapped_column(String(32), default="queued") + started_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) + external_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) + + finished_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) + + +class Finding(Base): + __tablename__ = "findings" + id: Mapped[int] = mapped_column(Integer, primary_key=True) + analysis_id: Mapped[int] = mapped_column(ForeignKey("analyses.id", ondelete="CASCADE")) + type: Mapped[str] = mapped_column(String(64)) # dead_code | relationship | lint | etc. + file: Mapped[Optional[str]] = mapped_column(String(1024), nullable=True) + start_line: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) + end_line: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) + symbol: Mapped[Optional[str]] = mapped_column(String(512), nullable=True) + context: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + confidence: Mapped[Optional[float]] = mapped_column(Integer, nullable=True) + status: Mapped[str] = mapped_column(String(32), default="open") diff --git a/orchestrator/api/requirements.txt b/orchestrator/api/requirements.txt new file mode 100644 index 000000000..06149c929 --- /dev/null +++ b/orchestrator/api/requirements.txt @@ -0,0 +1,7 @@ +fastapi==0.111.0 +uvicorn[standard]==0.30.1 +SQLAlchemy==2.0.36 +httpx==0.27.0 +sse-starlette==2.1.0 +pydantic==2.8.2 + diff --git a/orchestrator/api/routes/__init__.py b/orchestrator/api/routes/__init__.py new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/orchestrator/api/routes/__init__.py @@ -0,0 +1 @@ + diff --git a/orchestrator/api/routes/analyses.py b/orchestrator/api/routes/analyses.py new file mode 100644 index 000000000..e1e9cee99 --- /dev/null +++ b/orchestrator/api/routes/analyses.py @@ -0,0 +1,113 @@ +from __future__ import annotations + +import asyncio +from datetime import datetime +from typing import Any, Dict + +from fastapi import APIRouter, Depends, HTTPException +from sqlalchemy.orm import Session +from sse_starlette.sse import EventSourceResponse + +from ..db import get_session +from .. import models, schemas +from ..adapters.analysis_adapter import AnalysisAdapter + +router = APIRouter() + + +@router.post("/", response_model=schemas.AnalysisOut) +def start_analysis(payload: schemas.AnalysisStart, session: Session = Depends(get_session)): + repo = session.get(models.Repository, payload.repository_id) + if not repo: + raise HTTPException(status_code=404, detail="Repository not found") + branch = session.get(models.Branch, payload.branch_id) if payload.branch_id else None + branch_name = branch.name if branch else repo.default_branch + + # Call analysis server + try: + adapter = AnalysisAdapter() + res = adapter.start_analysis(f"{repo.org}/{repo.name}", branch_name, None) + external_id = str(res.get("id") or res.get("analysis_id") or "") + except Exception as e: + external_id = "" + + analysis = models.Analysis( + repository_id=repo.id, + branch_id=payload.branch_id, + snapshot_id=payload.snapshot_id, + status="running", + started_at=datetime.utcnow(), + external_id=external_id or None, + ) + session.add(analysis) + session.flush() + return analysis + + +@router.post("/{analysis_id}/poll", response_model=dict) +def poll_findings(analysis_id: int, skip: int = 0, limit: int = 100, session: Session = Depends(get_session)): + analysis = session.get(models.Analysis, analysis_id) + if not analysis: + raise HTTPException(status_code=404, detail="Analysis not found") + if not analysis.external_id: + return {"stored": 0, "status": analysis.status} + + adapter = AnalysisAdapter() + try: + data = adapter.get_findings(analysis.external_id, skip=skip, limit=limit) + except Exception as e: + raise HTTPException(status_code=500, detail=f"Analysis fetch failed: {e}") + + stored = 0 + for f in data.get("findings", []): + finding = models.Finding( + analysis_id=analysis.id, + type=str(f.get("type") or "unknown"), + file=f.get("file"), + start_line=f.get("start_line"), + end_line=f.get("end_line"), + symbol=f.get("symbol"), + context=str(f.get("context") or ""), + confidence=f.get("confidence"), + status="open", + ) + session.add(finding) + stored += 1 + session.flush() + + return {"stored": stored, "status": analysis.status} + + +@router.get("/{analysis_id}/stream") +async def stream_findings(analysis_id: int, session: Session = Depends(get_session)): + analysis = session.get(models.Analysis, analysis_id) + if not analysis: + raise HTTPException(status_code=404, detail="Analysis not found") + + async def gen(): + last_id = 0 + while True: + records = ( + session.query(models.Finding) + .filter(models.Finding.analysis_id == analysis.id, models.Finding.id > last_id) + .order_by(models.Finding.id.asc()) + .all() + ) + for f in records: + last_id = f.id + yield { + "event": f.type, + "id": str(f.id), + "data": { + "file": f.file, + "range": [f.start_line, f.end_line], + "symbol": f.symbol, + "context": f.context, + "confidence": f.confidence, + "status": f.status, + }, + } + await asyncio.sleep(1.0) + + return EventSourceResponse(gen()) + diff --git a/orchestrator/api/routes/branches.py b/orchestrator/api/routes/branches.py new file mode 100644 index 000000000..87dcc77c3 --- /dev/null +++ b/orchestrator/api/routes/branches.py @@ -0,0 +1,51 @@ +from __future__ import annotations + +from fastapi import APIRouter, Depends, HTTPException +from sqlalchemy.orm import Session + +from ..db import get_session +from .. import models, schemas +from ..adapters.github_adapter import GitHubAdapter + +router = APIRouter() + + +@router.get("/", response_model=list[schemas.BranchOut]) +def list_branches(repository_id: int, session: Session = Depends(get_session)): + items = session.query(models.Branch).filter(models.Branch.repository_id == repository_id).all() + return items + + +@router.post("/", response_model=schemas.BranchOut) +def create_branch(payload: schemas.BranchCreate, session: Session = Depends(get_session)): + repo = session.get(models.Repository, payload.repository_id) + if not repo: + raise HTTPException(status_code=404, detail="Repository not found") + + # Try to create branch in GitHub + try: + gh = GitHubAdapter() + gh.create_branch(repo.org, repo.name, payload.name, payload.from_ref or repo.default_branch) + # Fetch head SHA for the new branch + ref = gh.get_branch(repo.org, repo.name, payload.name) + head_sha = ref["object"]["sha"] + except Exception as e: + # Still create a local branch record for tracking, but without head_sha + head_sha = None + + # Persist branch + existing = ( + session.query(models.Branch) + .filter(models.Branch.repository_id == repo.id, models.Branch.name == payload.name) + .first() + ) + if existing: + if head_sha: + existing.head_sha = head_sha + return existing + + br = models.Branch(repository_id=repo.id, name=payload.name, head_sha=head_sha, status=None) + session.add(br) + session.flush() + return br + diff --git a/orchestrator/api/routes/projects.py b/orchestrator/api/routes/projects.py new file mode 100644 index 000000000..9d4c54b82 --- /dev/null +++ b/orchestrator/api/routes/projects.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +from fastapi import APIRouter, Depends, HTTPException +from sqlalchemy.orm import Session + +from ..db import get_session +from .. import models, schemas + +router = APIRouter() + + +@router.post("/", response_model=schemas.ProjectOut) +def create_project(payload: schemas.ProjectCreate, session: Session = Depends(get_session)): + existing = session.query(models.Project).filter(models.Project.name == payload.name).first() + if existing: + raise HTTPException(status_code=400, detail="Project name already exists") + project = models.Project(name=payload.name, description=payload.description) + session.add(project) + session.flush() + return project + + +@router.get("/", response_model=list[schemas.ProjectOut]) +def list_projects(session: Session = Depends(get_session)): + items = session.query(models.Project).order_by(models.Project.created_at.desc()).all() + return items + + +@router.post("/{project_id}/link", response_model=dict) +def link_repository(project_id: int, link: schemas.ProjectRepositoryLink, session: Session = Depends(get_session)): + project = session.get(models.Project, project_id) + repo = session.get(models.Repository, link.repository_id) + if not project or not repo: + raise HTTPException(status_code=404, detail="Project or repository not found") + existing = ( + session.query(models.ProjectRepository) + .filter(models.ProjectRepository.project_id == project_id, models.ProjectRepository.repository_id == link.repository_id) + .first() + ) + if existing: + return {"status": "already_linked"} + pr = models.ProjectRepository(project_id=project_id, repository_id=link.repository_id) + session.add(pr) + return {"status": "linked"} + diff --git a/orchestrator/api/routes/prs.py b/orchestrator/api/routes/prs.py new file mode 100644 index 000000000..bb3ef232d --- /dev/null +++ b/orchestrator/api/routes/prs.py @@ -0,0 +1,49 @@ +from __future__ import annotations + +from fastapi import APIRouter, Depends, HTTPException +from sqlalchemy.orm import Session + +from ..db import get_session +from .. import models, schemas +from ..adapters.github_adapter import GitHubAdapter + +router = APIRouter() + + +@router.post("/", response_model=schemas.PROut) +def open_pr(payload: schemas.PROpenRequest, session: Session = Depends(get_session)): + repo = session.get(models.Repository, payload.repository_id) + if not repo: + raise HTTPException(status_code=404, detail="Repository not found") + + # Create PR on GitHub + try: + gh = GitHubAdapter() + pr_json = gh.create_pr(repo.org, repo.name, payload.head_branch, payload.base_branch, payload.title, payload.body) + pr_number = pr_json["number"] + web_url = pr_json.get("html_url") + # map head_branch -> branch id + br = ( + session.query(models.Branch) + .filter(models.Branch.repository_id == repo.id, models.Branch.name == payload.head_branch) + .first() + ) + if not br: + br = models.Branch(repository_id=repo.id, name=payload.head_branch) + session.add(br) + session.flush() + pr = models.PullRequest( + repository_id=repo.id, + number=pr_number, + head_branch_id=br.id, + base_branch=payload.base_branch, + state=pr_json.get("state", "open"), + checks_status=None, + web_url=web_url, + ) + session.add(pr) + session.flush() + return pr + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to open PR: {e}") + diff --git a/orchestrator/api/routes/repos.py b/orchestrator/api/routes/repos.py new file mode 100644 index 000000000..559d00bce --- /dev/null +++ b/orchestrator/api/routes/repos.py @@ -0,0 +1,74 @@ +from __future__ import annotations + +from fastapi import APIRouter, Depends, HTTPException +from sqlalchemy.orm import Session + +from ..db import get_session +from .. import models, schemas + +router = APIRouter() + + +@router.post("/", response_model=schemas.RepositoryOut) +def create_repo(payload: schemas.RepositoryCreate, session: Session = Depends(get_session)): + existing = ( + session.query(models.Repository) + .filter(models.Repository.provider == payload.provider, + models.Repository.org == payload.org, + models.Repository.name == payload.name) + .first() + ) + if existing: + return existing + repo = models.Repository( + provider=payload.provider, + org=payload.org, + name=payload.name, + default_branch=payload.default_branch, + visibility=payload.visibility, + ) + session.add(repo) + session.flush() + return repo + + +@router.get("/", response_model=list[schemas.RepositoryOut]) +def list_repos(session: Session = Depends(get_session)): + items = session.query(models.Repository).all() + return items + + +@router.post("/pin", response_model=dict) +def pin_repo(pin: schemas.PinCreate, session: Session = Depends(get_session)): + existing = ( + session.query(models.Pin) + .filter( + models.Pin.project_id == pin.project_id, + models.Pin.repository_id == pin.repository_id, + models.Pin.user_id == pin.user_id, + ) + .first() + ) + if existing: + return {"status": "already_pinned"} + p = models.Pin(project_id=pin.project_id, repository_id=pin.repository_id, user_id=pin.user_id) + session.add(p) + return {"status": "pinned"} + + +@router.post("/unpin", response_model=dict) +def unpin_repo(pin: schemas.PinCreate, session: Session = Depends(get_session)): + existing = ( + session.query(models.Pin) + .filter( + models.Pin.project_id == pin.project_id, + models.Pin.repository_id == pin.repository_id, + models.Pin.user_id == pin.user_id, + ) + .first() + ) + if not existing: + return {"status": "not_pinned"} + session.delete(existing) + return {"status": "unpinned"} + diff --git a/orchestrator/api/routes/runs.py b/orchestrator/api/routes/runs.py new file mode 100644 index 000000000..c999b3482 --- /dev/null +++ b/orchestrator/api/routes/runs.py @@ -0,0 +1,99 @@ +from __future__ import annotations + +import asyncio +import os +from typing import Any, Dict + +from fastapi import APIRouter, Depends, HTTPException +from sqlalchemy.orm import Session +from sse_starlette.sse import EventSourceResponse + +from ..db import get_session +from .. import models, schemas +from ..adapters.codegen_adapter import CodegenAdapter + +router = APIRouter() + +CODEGEN_ORG_ID = os.getenv("CODEGEN_ORG_ID") + + +@router.post("/", response_model=schemas.AgentRunOut) +def register_run(payload: schemas.AgentRunCreate, session: Session = Depends(get_session)): + repo = session.get(models.Repository, payload.repository_id) + if not repo: + raise HTTPException(status_code=404, detail="Repository not found") + run = models.AgentRun( + provider=payload.provider, + external_id=payload.external_id, + repository_id=repo.id, + branch_id=payload.branch_id, + status="registered", + web_url=None, + ) + session.add(run) + session.flush() + return run + + +@router.post("/{run_id}/poll", response_model=dict) +def poll_run_logs(run_id: int, skip: int = 0, limit: int = 100, session: Session = Depends(get_session)): + run = session.get(models.AgentRun, run_id) + if not run: + raise HTTPException(status_code=404, detail="Run not found") + if run.provider != "codegen": + raise HTTPException(status_code=400, detail="Only codegen runs supported") + org = CODEGEN_ORG_ID + if not org: + raise HTTPException(status_code=400, detail="CODEGEN_ORG_ID env not set") + adapter = CodegenAdapter(org) + data = adapter.get_run_logs(run.external_id, skip=skip, limit=limit) + total = 0 + for log in data.get("logs", []): + evt = models.AgentRunEvent( + agent_run_id=run.id, + type=log.get("message_type", "ACTION"), + tool_name=log.get("tool_name"), + thought=log.get("thought"), + observation=str(log.get("observation")), + tool_input=str(log.get("tool_input")), + tool_output=str(log.get("tool_output")), + ) + session.add(evt) + total += 1 + session.flush() + return {"stored": total, "status": data.get("status"), "total_logs": data.get("total_logs")} + + +@router.get("/{run_id}/events/stream") +async def stream_run_events(run_id: int, session: Session = Depends(get_session)): + run = session.get(models.AgentRun, run_id) + if not run: + raise HTTPException(status_code=404, detail="Run not found") + + async def gen(): + last_id = 0 + while True: + # Poll DB for new events + events = ( + session.query(models.AgentRunEvent) + .filter(models.AgentRunEvent.agent_run_id == run.id, models.AgentRunEvent.id > last_id) + .order_by(models.AgentRunEvent.id.asc()) + .all() + ) + for e in events: + last_id = e.id + yield { + "event": e.type.lower(), + "id": str(e.id), + "data": { + "tool": e.tool_name, + "thought": e.thought, + "observation": e.observation, + "tool_input": e.tool_input, + "tool_output": e.tool_output, + }, + } + await asyncio.sleep(1.0) + + return EventSourceResponse(gen()) + diff --git a/orchestrator/api/schemas.py b/orchestrator/api/schemas.py new file mode 100644 index 000000000..2b4d02dbd --- /dev/null +++ b/orchestrator/api/schemas.py @@ -0,0 +1,148 @@ +from __future__ import annotations + +from datetime import datetime +from typing import Optional, List + +from pydantic import BaseModel, Field + + +# Projects +class ProjectCreate(BaseModel): + name: str + description: Optional[str] = None + + +class ProjectOut(BaseModel): + id: int + name: str + description: Optional[str] = None + created_at: datetime + + class Config: + from_attributes = True + + +# Repositories +class RepositoryCreate(BaseModel): + provider: str = "github" + org: str + name: str + default_branch: str = "main" + visibility: str = "private" + + +class RepositoryOut(BaseModel): + id: int + provider: str + org: str + name: str + default_branch: str + visibility: str + + class Config: + from_attributes = True + + +class ProjectRepositoryLink(BaseModel): + project_id: int + repository_id: int + + +class PinCreate(BaseModel): + project_id: int + repository_id: int + user_id: Optional[str] = None + + +class BranchCreate(BaseModel): + repository_id: int + name: str + from_ref: Optional[str] = None # default branch if None + + +class BranchOut(BaseModel): + id: int + repository_id: int + name: str + head_sha: Optional[str] = None + status: Optional[str] = None + + class Config: + from_attributes = True + + +class PROpenRequest(BaseModel): + repository_id: int + head_branch: str + base_branch: str + title: str + body: Optional[str] = None + + +class PROut(BaseModel): + id: int + repository_id: int + number: int + head_branch_id: int + base_branch: str + state: str + checks_status: Optional[str] = None + web_url: Optional[str] = None + + class Config: + from_attributes = True + + +class AgentRunCreate(BaseModel): + provider: str = "codegen" + external_id: str + repository_id: int + branch_id: Optional[int] = None + + +class AgentRunOut(BaseModel): + id: int + provider: str + external_id: str + repository_id: int + branch_id: Optional[int] + status: str + web_url: Optional[str] + created_at: datetime + + class Config: + from_attributes = True + + +class AnalysisStart(BaseModel): + repository_id: int + branch_id: Optional[int] = None + snapshot_id: Optional[int] = None + + +class AnalysisOut(BaseModel): + id: int + repository_id: int + branch_id: Optional[int] + snapshot_id: Optional[int] + status: str + + class Config: + from_attributes = True + + +class FindingOut(BaseModel): + id: int + analysis_id: int + type: str + file: Optional[str] = None + start_line: Optional[int] = None + end_line: Optional[int] = None + symbol: Optional[str] = None + context: Optional[str] = None + confidence: Optional[float] = None + status: str + + class Config: + from_attributes = True + diff --git a/orchestrator/web/index.html b/orchestrator/web/index.html new file mode 100644 index 000000000..beff419d7 --- /dev/null +++ b/orchestrator/web/index.html @@ -0,0 +1,13 @@ + + +
+ + +{project.description || 'No description'}
+