Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,4 @@ npm/bin/
*.exe
supermodel
cli
.supermodel/
24 changes: 21 additions & 3 deletions internal/analyze/handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,14 @@ package analyze

import (
"context"
"encoding/json"
"fmt"
"io"
"os"
"path/filepath"

"github.com/supermodeltools/cli/internal/api"
"github.com/supermodeltools/cli/internal/build"
"github.com/supermodeltools/cli/internal/cache"
"github.com/supermodeltools/cli/internal/config"
"github.com/supermodeltools/cli/internal/ui"
Expand Down Expand Up @@ -38,7 +41,7 @@ func GetGraph(ctx context.Context, cfg *config.Config, dir string, force bool) (
if !force {
fingerprint, err := cache.RepoFingerprint(dir)
if err == nil {
key := cache.AnalysisKey(fingerprint, "graph")
key := cache.AnalysisKey(fingerprint, "graph", build.Version)
if g, _ := cache.Get(key); g != nil {
ui.Success("Using cached analysis (repoId: %s)", g.RepoID())
return g, key, nil
Expand Down Expand Up @@ -70,16 +73,18 @@ func GetGraph(ctx context.Context, cfg *config.Config, dir string, force bool) (

client := api.New(cfg)
spin = ui.Start("Uploading and analyzing repository…")
g, err := client.Analyze(ctx, zipPath, "analyze-"+hash[:16])
ir, err := client.AnalyzeSidecars(ctx, zipPath, "analyze-"+hash[:16])
spin.Stop()
if err != nil {
return nil, hash, err
}

g := api.GraphFromSidecarIR(ir)

// Cache under both keys: fingerprint (fast lookup) and zip hash (fallback).
fingerprint, fpErr := cache.RepoFingerprint(dir)
if fpErr == nil {
fpKey := cache.AnalysisKey(fingerprint, "graph")
fpKey := cache.AnalysisKey(fingerprint, "graph", build.Version)
if err := cache.Put(fpKey, g); err != nil {
ui.Warn("could not write cache: %v", err)
}
Expand All @@ -88,6 +93,19 @@ func GetGraph(ctx context.Context, cfg *config.Config, dir string, force bool) (
ui.Warn("could not write cache: %v", err)
}

// Also populate the sidecar cache (.supermodel/graph.json) so that
// files.Generate() called after analyze reuses this result without a
// second API upload.
sidecarCacheFile := filepath.Join(dir, ".supermodel", "graph.json")
if irJSON, marshalErr := json.MarshalIndent(ir, "", " "); marshalErr == nil {
if mkErr := os.MkdirAll(filepath.Dir(sidecarCacheFile), 0o755); mkErr == nil {
tmp := sidecarCacheFile + ".tmp"
if writeErr := os.WriteFile(tmp, irJSON, 0o644); writeErr == nil {
_ = os.Rename(tmp, sidecarCacheFile)
}
}
}

ui.Success("Analysis complete (repoId: %s)", g.RepoID())
return g, hash, nil
}
Expand Down
13 changes: 13 additions & 0 deletions internal/api/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,19 @@ type SidecarSubdomain struct {
KeyFiles []string `json:"keyFiles"`
}

// GraphFromSidecarIR builds a display Graph from a SidecarIR response.
// SidecarIR uses the same Node/Relationship types, so this is a zero-copy
// extraction that also populates the repoId metadata field.
func GraphFromSidecarIR(ir *SidecarIR) *Graph {
return &Graph{
Nodes: ir.Graph.Nodes,
Relationships: ir.Graph.Relationships,
Metadata: map[string]any{
"repoId": ir.Repo,
},
}
}

// JobResponse is the async envelope returned by the API for long-running jobs.
type JobResponse struct {
Status string `json:"status"`
Expand Down
33 changes: 32 additions & 1 deletion internal/blastradius/handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import (
"strings"

"github.com/supermodeltools/cli/internal/api"
"github.com/supermodeltools/cli/internal/build"
"github.com/supermodeltools/cli/internal/cache"
"github.com/supermodeltools/cli/internal/config"
"github.com/supermodeltools/cli/internal/ui"
Expand All @@ -22,6 +23,25 @@ type Options struct {

// Run uploads the repo and runs impact analysis via the dedicated API endpoint.
func Run(ctx context.Context, cfg *config.Config, dir string, targets []string, opts Options) error {
targetStr := strings.Join(targets, ",")

// Fast-path: check cache by git fingerprint (skip when a diff is supplied,
// since the diff changes the result independently of repo state).
if !opts.Force && opts.Diff == "" {
if fp, err := cache.RepoFingerprint(dir); err == nil {
analysisType := "impact"
if targetStr != "" {
analysisType += ":" + targetStr
}
key := cache.AnalysisKey(fp, analysisType, build.Version)
var cached api.ImpactResult
if hit, _ := cache.GetJSON(key, &cached); hit {
ui.Success("Using cached impact analysis")
return printResults(os.Stdout, &cached, ui.ParseFormat(opts.Output))
}
}
}

spin := ui.Start("Creating repository archive…")
zipPath, err := createZip(dir)
spin.Stop()
Expand All @@ -36,7 +56,6 @@ func Run(ctx context.Context, cfg *config.Config, dir string, targets []string,
}

idempotencyKey := "impact-" + hash[:16]
targetStr := strings.Join(targets, ",")
if targetStr != "" {
idempotencyKey += "-" + targetStr
}
Expand All @@ -49,6 +68,18 @@ func Run(ctx context.Context, cfg *config.Config, dir string, targets []string,
return err
}

// Store result in cache.
if opts.Diff == "" {
if fp, err := cache.RepoFingerprint(dir); err == nil {
analysisType := "impact"
if targetStr != "" {
analysisType += ":" + targetStr
}
key := cache.AnalysisKey(fp, analysisType, build.Version)
_ = cache.PutJSON(key, result)
}
}

return printResults(os.Stdout, result, ui.ParseFormat(opts.Output))
}

Expand Down
33 changes: 33 additions & 0 deletions internal/cache/cache.go
Original file line number Diff line number Diff line change
Expand Up @@ -77,3 +77,36 @@ func Evict(hash string) error {
}
return err
}

// PutJSON serialises v as JSON and stores it under hash. Unlike Put, it works
// with any value type — useful for dead-code and blast-radius results.
func PutJSON(hash string, v any) error {
if err := os.MkdirAll(dir(), 0o700); err != nil {
return fmt.Errorf("create cache dir: %w", err)
}
data, err := json.Marshal(v)
if err != nil {
return err
}
tmp := filepath.Join(dir(), hash+".json.tmp")
if err := os.WriteFile(tmp, data, 0o600); err != nil {
return fmt.Errorf("write cache: %w", err)
}
return os.Rename(tmp, filepath.Join(dir(), hash+".json"))
}

// GetJSON reads the cached JSON for hash and unmarshals it into v.
// Returns (true, nil) on hit, (false, nil) on miss, (false, err) on error.
func GetJSON(hash string, v any) (bool, error) {
data, err := os.ReadFile(filepath.Join(dir(), hash+".json"))
if os.IsNotExist(err) {
return false, nil
}
if err != nil {
return false, fmt.Errorf("read cache: %w", err)
}
if err := json.Unmarshal(data, v); err != nil {
return false, fmt.Errorf("parse cache: %w", err)
}
return true, nil
}
6 changes: 4 additions & 2 deletions internal/cache/fingerprint.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,10 @@ func gitOutput(dir string, args ...string) (string, error) {
}

// AnalysisKey builds a cache key for a specific analysis type on a repo state.
func AnalysisKey(fingerprint, analysisType string) string {
// version is the CLI version string and is included so the cache is invalidated
// automatically after an upgrade.
func AnalysisKey(fingerprint, analysisType, version string) string {
h := sha256.New()
fmt.Fprintf(h, "%s\x00%s", fingerprint, analysisType)
fmt.Fprintf(h, "%s\x00%s\x00%s", fingerprint, analysisType, version)
return hex.EncodeToString(h.Sum(nil))
}
8 changes: 4 additions & 4 deletions internal/cache/fingerprint_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -98,16 +98,16 @@ func TestRepoFingerprint_NotGitRepo(t *testing.T) {

func TestAnalysisKey_DifferentTypes(t *testing.T) {
fp := "abc123"
k1 := AnalysisKey(fp, "graph")
k2 := AnalysisKey(fp, "dead-code")
k1 := AnalysisKey(fp, "graph", "dev")
k2 := AnalysisKey(fp, "dead-code", "dev")
if k1 == k2 {
t.Error("different analysis types should produce different keys")
}
}

func TestAnalysisKey_Stable(t *testing.T) {
k1 := AnalysisKey("abc", "graph")
k2 := AnalysisKey("abc", "graph")
k1 := AnalysisKey("abc", "graph", "dev")
k2 := AnalysisKey("abc", "graph", "dev")
if k1 != k2 {
t.Error("same inputs should produce same key")
}
Expand Down
22 changes: 22 additions & 0 deletions internal/deadcode/handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import (
"os"

"github.com/supermodeltools/cli/internal/api"
"github.com/supermodeltools/cli/internal/build"
"github.com/supermodeltools/cli/internal/cache"
"github.com/supermodeltools/cli/internal/config"
"github.com/supermodeltools/cli/internal/ui"
Expand All @@ -24,6 +25,21 @@ type Options struct {

// Run uploads the repo and runs dead code analysis via the dedicated API endpoint.
func Run(ctx context.Context, cfg *config.Config, dir string, opts *Options) error {
// Fast-path: check cache by git fingerprint before creating the zip.
if !opts.Force {
if fp, err := cache.RepoFingerprint(dir); err == nil {
key := cache.AnalysisKey(fp, "dead-code", build.Version)
var cached api.DeadCodeResult
if hit, _ := cache.GetJSON(key, &cached); hit {
ui.Success("Using cached dead-code analysis")
if len(opts.Ignore) > 0 {
cached.DeadCodeCandidates = filterIgnored(cached.DeadCodeCandidates, opts.Ignore)
}
return printResults(os.Stdout, &cached, ui.ParseFormat(opts.Output))
}
}
}

spin := ui.Start("Creating repository archive…")
zipPath, err := createZip(dir)
spin.Stop()
Expand All @@ -45,6 +61,12 @@ func Run(ctx context.Context, cfg *config.Config, dir string, opts *Options) err
return err
}

// Store result in cache for subsequent calls.
if fp, err := cache.RepoFingerprint(dir); err == nil {
key := cache.AnalysisKey(fp, "dead-code", build.Version)
_ = cache.PutJSON(key, result)
}

if len(opts.Ignore) > 0 {
result.DeadCodeCandidates = filterIgnored(result.DeadCodeCandidates, opts.Ignore)
}
Expand Down
44 changes: 44 additions & 0 deletions internal/files/daemon.go
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,8 @@ func (d *Daemon) Run(ctx context.Context) error {
debounceTimer.Stop()
}
d.logf("Shutting down...")
d.logf("Cleaning sidecar files...")
_ = Clean(context.Background(), nil, d.cfg.RepoDir, false)
return nil

case filePath, ok := <-d.notifyCh:
Expand Down Expand Up @@ -191,6 +193,11 @@ func (d *Daemon) fullGenerate(ctx context.Context) error {
d.logf("Fetching full graph from Supermodel API...")
idemKey := newUUID()

if fileList, listErr := DryRunList(d.cfg.RepoDir); listErr == nil {
stats := LanguageStats(fileList)
PrintLanguageBarChart(stats, len(fileList))
}

zipPath, err := CreateZipFile(d.cfg.RepoDir, nil)
if err != nil {
return fmt.Errorf("creating zip: %w", err)
Expand Down Expand Up @@ -506,11 +513,48 @@ func (d *Daemon) mergeGraph(incremental *api.SidecarIR, changedFiles []string) {
// contain domains classified from only the changed files, which are
// incorrect for the repo as a whole. Domains only refresh on full generate.

// Assign new files to existing domains by directory-prefix matching.
d.assignNewFilesToDomains(newNodes)

if len(extRemap) > 0 {
d.logf("Resolved %d external references to internal nodes", len(extRemap))
}
}

// assignNewFilesToDomains assigns newly merged File nodes to the best-matching
// existing domain using longest common directory-prefix heuristic.
func (d *Daemon) assignNewFilesToDomains(newNodes []api.Node) {
if len(d.ir.Domains) == 0 {
return
}

for _, n := range newNodes {
if !n.HasLabel("File") {
continue
}
fp := n.Prop("filePath")
if fp == "" {
continue
}
dir := filepath.Dir(fp)

bestDomain := -1
bestLen := -1
for i, domain := range d.ir.Domains {
for _, kf := range domain.KeyFiles {
prefix := filepath.Dir(kf)
if strings.HasPrefix(dir+"/", prefix+"/") && len(prefix) > bestLen {
bestLen = len(prefix)
bestDomain = i
}
}
}
if bestDomain >= 0 {
d.ir.Domains[bestDomain].KeyFiles = append(d.ir.Domains[bestDomain].KeyFiles, fp)
}
}
}

// computeAffectedFiles returns changed files plus their 1-hop dependents.
func (d *Daemon) computeAffectedFiles(changedFiles []string) []string {
affected := make(map[string]bool)
Expand Down
7 changes: 6 additions & 1 deletion internal/files/handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,11 @@ func Generate(ctx context.Context, cfg *config.Config, dir string, opts Generate
}
}

if fileList, listErr := DryRunList(repoDir); listErr == nil {
stats := LanguageStats(fileList)
PrintLanguageBarChart(stats, len(fileList))
}

spin := ui.Start("Creating repository archive…")
zipPath, err := CreateZipFile(repoDir, nil)
spin.Stop()
Expand Down Expand Up @@ -314,7 +319,7 @@ func Hook(port int) error {
addr := fmt.Sprintf("127.0.0.1:%d", port)
conn, err := net.Dial("udp", addr)
if err != nil {
// Daemon not running — silently exit
fmt.Fprintf(os.Stderr, "[supermodel] watch daemon not running on :%d — run `supermodel watch` to enable live updates\n", port)
return nil
}
defer conn.Close()
Expand Down
Loading
Loading