diff --git a/compact/compact_test.go b/compact/compact_test.go new file mode 100644 index 0000000..f03e762 --- /dev/null +++ b/compact/compact_test.go @@ -0,0 +1,225 @@ +package compact + +import ( + "context" + "path/filepath" + "testing" + + "github.com/google/uuid" + "github.com/GrayCodeAI/yaad/storage" +) + +func setupStore(t *testing.T) storage.Storage { + t.Helper() + dir := t.TempDir() + store, err := storage.NewStore(filepath.Join(dir, "test.db")) + if err != nil { + t.Fatal(err) + } + t.Cleanup(func() { store.Close() }) + return store +} + +func TestNeedsCompaction_EmptyProject(t *testing.T) { + store := setupStore(t) + c := New(store, 100) + ctx := context.Background() + + needs, tokens := c.NeedsCompaction(ctx, "proj1") + if needs { + t.Error("empty project should not need compaction") + } + if tokens != 0 { + t.Errorf("expected 0 tokens, got %d", tokens) + } +} + +func TestNeedsCompaction_OverBudget(t *testing.T) { + store := setupStore(t) + ctx := context.Background() + + // Create nodes with enough content to exceed a low token budget + for i := 0; i < 5; i++ { + content := make([]byte, 200) + for j := range content { + content[j] = 'a' + } + _ = store.CreateNode(ctx, &storage.Node{ + ID: uuid.New().String(), + Type: "convention", + Content: string(content), + ContentHash: uuid.New().String(), + Scope: "project", + Project: "proj1", + Confidence: 0.3, + Version: 1, + }) + } + + // 5 nodes * 200 chars / 4 = 250 tokens. Budget = 100. + c := New(store, 100) + needs, tokens := c.NeedsCompaction(ctx, "proj1") + if !needs { + t.Errorf("expected compaction needed, tokens=%d", tokens) + } + if tokens < 200 { + t.Errorf("expected tokens >= 200, got %d", tokens) + } +} + +func TestCompact_NoNodesNoError(t *testing.T) { + store := setupStore(t) + c := New(store, 100) + ctx := context.Background() + + count, err := c.Compact(ctx, "empty-project") + if err != nil { + t.Fatalf("Compact on empty project: %v", err) + } + if count != 0 { + t.Errorf("expected 0 compacted, got %d", count) + } +} + +func TestCompact_SkipsHighConfidenceNodes(t *testing.T) { + store := setupStore(t) + ctx := context.Background() + + // Create high-confidence nodes - should not be compacted + for i := 0; i < 5; i++ { + _ = store.CreateNode(ctx, &storage.Node{ + ID: uuid.New().String(), + Type: "convention", + Content: "high confidence content", + ContentHash: uuid.New().String(), + Scope: "project", + Project: "proj1", + Confidence: 0.9, + AccessCount: 10, + Version: 1, + }) + } + + c := New(store, 10) + count, err := c.Compact(ctx, "proj1") + if err != nil { + t.Fatalf("Compact: %v", err) + } + if count != 0 { + t.Errorf("high-confidence nodes should not be compacted, got %d", count) + } +} + +func TestCompact_CompactsLowConfidenceNodes(t *testing.T) { + store := setupStore(t) + ctx := context.Background() + + // Create 4 low-confidence convention nodes (> 3 threshold) + var ids []string + for i := 0; i < 4; i++ { + id := uuid.New().String() + ids = append(ids, id) + _ = store.CreateNode(ctx, &storage.Node{ + ID: id, + Type: "convention", + Content: "low confidence content item", + ContentHash: uuid.New().String(), + Scope: "project", + Project: "proj1", + Confidence: 0.3, + AccessCount: 1, + Version: 1, + }) + } + + c := New(store, 10) + count, err := c.Compact(ctx, "proj1") + if err != nil { + t.Fatalf("Compact: %v", err) + } + if count != 4 { + t.Errorf("expected 4 compacted nodes, got %d", count) + } + + // Verify compacted nodes have confidence 0 + for _, id := range ids { + n, _ := store.GetNode(ctx, id) + if n != nil && n.Confidence != 0 { + t.Errorf("node %s should have confidence 0 after compaction, got %f", id, n.Confidence) + } + } +} + +func TestCompact_SkipsAnchorTypes(t *testing.T) { + store := setupStore(t) + ctx := context.Background() + + // Create low-confidence "file" nodes - should NOT be compacted (anchor type) + for i := 0; i < 5; i++ { + _ = store.CreateNode(ctx, &storage.Node{ + ID: uuid.New().String(), + Type: "file", + Content: "file anchor content", + ContentHash: uuid.New().String(), + Scope: "project", + Project: "proj1", + Confidence: 0.1, + AccessCount: 0, + Version: 1, + }) + } + + c := New(store, 10) + count, err := c.Compact(ctx, "proj1") + if err != nil { + t.Fatalf("Compact: %v", err) + } + if count != 0 { + t.Errorf("file/entity/session types should be skipped, got %d", count) + } +} + +func TestCompact_CancelledContext(t *testing.T) { + store := setupStore(t) + c := New(store, 100) + ctx, cancel := context.WithCancel(context.Background()) + cancel() + + _, err := c.Compact(ctx, "proj1") + if err == nil { + t.Error("expected error from cancelled context") + } +} + +func TestDefaultSummarizer(t *testing.T) { + s := DefaultSummarizer{} + ctx := context.Background() + + contents := []string{"item one", "item two", "item three"} + summary, err := s.Summarize(ctx, "convention", contents) + if err != nil { + t.Fatalf("Summarize: %v", err) + } + if summary == "" { + t.Error("expected non-empty summary") + } + if len(summary) == 0 { + t.Error("summary should contain content") + } +} + +func TestNew_DefaultMaxTokens(t *testing.T) { + store := setupStore(t) + c := New(store, 0) + if c.maxTokens != 50000 { + t.Errorf("expected default maxTokens=50000, got %d", c.maxTokens) + } +} + +func TestNew_NegativeMaxTokens(t *testing.T) { + store := setupStore(t) + c := New(store, -10) + if c.maxTokens != 50000 { + t.Errorf("expected default maxTokens=50000 for negative input, got %d", c.maxTokens) + } +} diff --git a/exportimport/export_test.go b/exportimport/export_test.go new file mode 100644 index 0000000..5c8a787 --- /dev/null +++ b/exportimport/export_test.go @@ -0,0 +1,264 @@ +package exportimport + +import ( + "context" + "encoding/json" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/google/uuid" + "github.com/GrayCodeAI/yaad/storage" +) + +func setupStore(t *testing.T) storage.Storage { + t.Helper() + dir := t.TempDir() + store, err := storage.NewStore(filepath.Join(dir, "test.db")) + if err != nil { + t.Fatal(err) + } + t.Cleanup(func() { store.Close() }) + return store +} + +func TestExportJSON_EmptyProject(t *testing.T) { + store := setupStore(t) + ctx := context.Background() + + data, err := ExportJSON(ctx, store, "empty-project") + if err != nil { + t.Fatalf("ExportJSON: %v", err) + } + + var export GraphExport + if err := json.Unmarshal(data, &export); err != nil { + t.Fatalf("unmarshal: %v", err) + } + if export.Version != "1.0" { + t.Errorf("expected version 1.0, got %s", export.Version) + } + if len(export.Nodes) != 0 { + t.Errorf("expected 0 nodes, got %d", len(export.Nodes)) + } +} + +func TestExportJSON_WithNodes(t *testing.T) { + store := setupStore(t) + ctx := context.Background() + + node := &storage.Node{ + ID: uuid.New().String(), + Type: "decision", + Content: "Use SQLite for persistence", + ContentHash: "hash1", + Scope: "project", + Project: "myproj", + Confidence: 0.9, + Version: 1, + } + if err := store.CreateNode(ctx, node); err != nil { + t.Fatal(err) + } + + data, err := ExportJSON(ctx, store, "myproj") + if err != nil { + t.Fatalf("ExportJSON: %v", err) + } + + var export GraphExport + if err := json.Unmarshal(data, &export); err != nil { + t.Fatalf("unmarshal: %v", err) + } + if len(export.Nodes) != 1 { + t.Fatalf("expected 1 node, got %d", len(export.Nodes)) + } + if export.Nodes[0].Content != "Use SQLite for persistence" { + t.Errorf("unexpected content: %s", export.Nodes[0].Content) + } +} + +func TestImportJSON_RoundTrip(t *testing.T) { + store := setupStore(t) + ctx := context.Background() + + // Create a node and export + node := &storage.Node{ + ID: uuid.New().String(), + Type: "convention", + Content: "Always use gofmt", + ContentHash: "hash2", + Scope: "project", + Project: "proj1", + Confidence: 0.8, + Version: 1, + } + _ = store.CreateNode(ctx, node) + + data, _ := ExportJSON(ctx, store, "proj1") + + // Import into a fresh store + store2 := setupStore(t) + nodes, edges, err := ImportJSON(ctx, store2, data) + if err != nil { + t.Fatalf("ImportJSON: %v", err) + } + if nodes != 1 { + t.Errorf("expected 1 node imported, got %d", nodes) + } + if edges != 0 { + t.Errorf("expected 0 edges imported, got %d", edges) + } + + // Verify node exists in store2 + got, err := store2.GetNode(ctx, node.ID) + if err != nil { + t.Fatalf("GetNode: %v", err) + } + if got.Content != "Always use gofmt" { + t.Errorf("unexpected content: %s", got.Content) + } +} + +func TestImportJSON_InvalidJSON(t *testing.T) { + store := setupStore(t) + ctx := context.Background() + + _, _, err := ImportJSON(ctx, store, []byte("not json")) + if err == nil { + t.Error("expected error for invalid JSON") + } +} + +func TestImportJSON_DuplicateSkipped(t *testing.T) { + store := setupStore(t) + ctx := context.Background() + + node := &storage.Node{ + ID: uuid.New().String(), + Type: "decision", + Content: "duplicate test", + ContentHash: "hash3", + Scope: "project", + Project: "proj1", + Version: 1, + } + _ = store.CreateNode(ctx, node) + + data, _ := ExportJSON(ctx, store, "proj1") + + // Import the same data again - duplicates should be skipped + nodes, _, err := ImportJSON(ctx, store, data) + if err != nil { + t.Fatalf("ImportJSON: %v", err) + } + if nodes != 0 { + t.Errorf("expected 0 nodes on re-import (duplicates), got %d", nodes) + } +} + +func TestExportMarkdown_EmptyProject(t *testing.T) { + store := setupStore(t) + ctx := context.Background() + + md, err := ExportMarkdown(ctx, store, "empty") + if err != nil { + t.Fatalf("ExportMarkdown: %v", err) + } + if !strings.Contains(md, "# Yaad Memory Export") { + t.Error("expected header in markdown output") + } + if !strings.Contains(md, "empty") { + t.Error("expected project name in markdown output") + } +} + +func TestExportMarkdown_WithNodes(t *testing.T) { + store := setupStore(t) + ctx := context.Background() + + _ = store.CreateNode(ctx, &storage.Node{ + ID: uuid.New().String(), + Type: "convention", + Content: "Use tabs not spaces", + ContentHash: "hash4", + Scope: "project", + Project: "proj1", + Confidence: 0.75, + Tags: "style,format", + Version: 1, + }) + + md, err := ExportMarkdown(ctx, store, "proj1") + if err != nil { + t.Fatalf("ExportMarkdown: %v", err) + } + if !strings.Contains(md, "Conventions") { + t.Error("expected Conventions section header") + } + if !strings.Contains(md, "Use tabs not spaces") { + t.Error("expected node content in markdown") + } + if !strings.Contains(md, "style,format") { + t.Error("expected tags in markdown") + } +} + +func TestExportObsidian_CreatesFiles(t *testing.T) { + store := setupStore(t) + ctx := context.Background() + + _ = store.CreateNode(ctx, &storage.Node{ + ID: uuid.New().String(), + Type: "decision", + Content: "Use Obsidian for notes", + ContentHash: "hash5", + Scope: "project", + Project: "proj1", + Confidence: 0.8, + Version: 1, + }) + + vaultDir := filepath.Join(t.TempDir(), "vault") + written, err := ExportObsidian(ctx, store, "proj1", vaultDir) + if err != nil { + t.Fatalf("ExportObsidian: %v", err) + } + if written != 1 { + t.Errorf("expected 1 file written, got %d", written) + } + + // Verify the vault directory was created + entries, err := os.ReadDir(vaultDir) + if err != nil { + t.Fatalf("ReadDir: %v", err) + } + if len(entries) != 1 { + t.Errorf("expected 1 file in vault, got %d", len(entries)) + } +} + +func TestExportObsidian_RelativePathError(t *testing.T) { + store := setupStore(t) + ctx := context.Background() + + _, err := ExportObsidian(ctx, store, "proj1", "relative/path") + if err == nil { + t.Error("expected error for relative path") + } +} + +func TestExportObsidian_EmptyProject(t *testing.T) { + store := setupStore(t) + ctx := context.Background() + + vaultDir := filepath.Join(t.TempDir(), "vault") + written, err := ExportObsidian(ctx, store, "empty", vaultDir) + if err != nil { + t.Fatalf("ExportObsidian: %v", err) + } + if written != 0 { + t.Errorf("expected 0 files for empty project, got %d", written) + } +} diff --git a/git/watcher_test.go b/git/watcher_test.go new file mode 100644 index 0000000..03d6b32 --- /dev/null +++ b/git/watcher_test.go @@ -0,0 +1,177 @@ +package git + +import ( + "context" + "os" + "os/exec" + "path/filepath" + "testing" + "time" + + "github.com/google/uuid" + "github.com/GrayCodeAI/yaad/graph" + "github.com/GrayCodeAI/yaad/storage" +) + +func setupTestRepo(t *testing.T) (string, storage.Storage, graph.Graph) { + t.Helper() + dir := t.TempDir() + + // Initialize a git repo + cmd := exec.Command("git", "init", dir) + if out, err := cmd.CombinedOutput(); err != nil { + t.Fatalf("git init: %s: %v", out, err) + } + + // Configure git user for commits + cmd = exec.Command("git", "-C", dir, "config", "user.email", "test@test.com") + cmd.Run() + cmd = exec.Command("git", "-C", dir, "config", "user.name", "Test") + cmd.Run() + + // Create a file and initial commit + if err := os.WriteFile(filepath.Join(dir, "main.go"), []byte("package main\n"), 0644); err != nil { + t.Fatal(err) + } + exec.Command("git", "-C", dir, "add", ".").Run() + exec.Command("git", "-C", dir, "commit", "-m", "init").Run() + + dbDir := t.TempDir() + store, err := storage.NewStore(filepath.Join(dbDir, "test.db")) + if err != nil { + t.Fatal(err) + } + t.Cleanup(func() { store.Close() }) + + g := graph.New(store, store.DB()) + return dir, store, g +} + +func TestNew_ValidDirectory(t *testing.T) { + dir, store, g := setupTestRepo(t) + + w, err := New(store, g, dir) + if err != nil { + t.Fatalf("New: %v", err) + } + if w == nil { + t.Error("expected non-nil watcher") + } +} + +func TestNew_NonExistentDirectory(t *testing.T) { + dir := t.TempDir() + store, _ := storage.NewStore(filepath.Join(dir, "test.db")) + defer store.Close() + g := graph.New(store, store.DB()) + + _, err := New(store, g, "/nonexistent/path/xyz123") + if err == nil { + t.Error("expected error for non-existent directory") + } +} + +func TestNew_FileNotDirectory(t *testing.T) { + dir := t.TempDir() + filePath := filepath.Join(dir, "file.txt") + os.WriteFile(filePath, []byte("hello"), 0644) + + store, _ := storage.NewStore(filepath.Join(dir, "test.db")) + defer store.Close() + g := graph.New(store, store.DB()) + + _, err := New(store, g, filePath) + if err == nil { + t.Error("expected error for file (not directory)") + } +} + +func TestCurrentHash_ValidRepo(t *testing.T) { + dir, _, _ := setupTestRepo(t) + + hash := CurrentHash(dir) + if hash == "" { + t.Error("expected non-empty hash for valid git repo") + } + if len(hash) != 40 { + t.Errorf("expected 40-char git hash, got %d chars: %s", len(hash), hash) + } +} + +func TestCurrentHash_NonGitDir(t *testing.T) { + dir := t.TempDir() + hash := CurrentHash(dir) + if hash != "" { + t.Errorf("expected empty hash for non-git dir, got %s", hash) + } +} + +func TestWatchFile(t *testing.T) { + dir, store, g := setupTestRepo(t) + + w, err := New(store, g, dir) + if err != nil { + t.Fatal(err) + } + + ctx := context.Background() + nodeID := uuid.New().String() + err = w.WatchFile(ctx, "main.go", nodeID, "abc123") + if err != nil { + t.Fatalf("WatchFile: %v", err) + } +} + +func TestStalesSince_NoChanges(t *testing.T) { + dir, store, g := setupTestRepo(t) + + w, err := New(store, g, dir) + if err != nil { + t.Fatal(err) + } + + ctx := context.Background() + // Use a time in the future so no commits qualify + reports, err := w.StalesSince(ctx, time.Now().Add(time.Hour)) + if err != nil { + t.Fatalf("StalesSince: %v", err) + } + if len(reports) != 0 { + t.Errorf("expected 0 stale reports for future time, got %d", len(reports)) + } +} + +func TestStalesSince_WithRecentChange(t *testing.T) { + dir, store, g := setupTestRepo(t) + + ctx := context.Background() + + // Add a node related to main.go + node := &storage.Node{ + ID: uuid.New().String(), + Type: "file", + Content: "main.go", + ContentHash: "fhash1", + Scope: "project", + Project: "proj1", + Version: 1, + } + _ = store.CreateNode(ctx, node) + _ = g.AddNode(ctx, node) + + w, err := New(store, g, dir) + if err != nil { + t.Fatal(err) + } + + // Check stale since before the initial commit + since := time.Now().Add(-time.Hour) + reports, err := w.StalesSince(ctx, since) + if err != nil { + t.Fatalf("StalesSince: %v", err) + } + // Reports depend on graph.Impact matching file names to nodes + // The test verifies the function runs without error + _ = reports +} + diff --git a/go.mod b/go.mod index 87794db..32971e7 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/GrayCodeAI/yaad -go 1.25.5 +go 1.26.1 require ( github.com/BurntSushi/toml v1.5.0 diff --git a/ingest/dual_stream_test.go b/ingest/dual_stream_test.go new file mode 100644 index 0000000..21d4fe4 --- /dev/null +++ b/ingest/dual_stream_test.go @@ -0,0 +1,194 @@ +package ingest + +import ( + "context" + "path/filepath" + "testing" + "time" + + "github.com/GrayCodeAI/yaad/engine" + "github.com/GrayCodeAI/yaad/graph" + "github.com/GrayCodeAI/yaad/storage" +) + +func setupEngine(t *testing.T) (*engine.Engine, storage.Storage) { + t.Helper() + dir := t.TempDir() + store, err := storage.NewStore(filepath.Join(dir, "test.db")) + if err != nil { + t.Fatal(err) + } + t.Cleanup(func() { store.Close() }) + + g := graph.New(store, store.DB()) + eng := engine.New(store, g) + t.Cleanup(func() { eng.Close() }) + return eng, store +} + +func TestNew_CreatesStream(t *testing.T) { + eng, _ := setupEngine(t) + ds := New(eng) + if ds == nil { + t.Fatal("expected non-nil DualStream") + } + ds.Stop() +} + +func TestRemember_FastPath(t *testing.T) { + eng, store := setupEngine(t) + ds := New(eng) + defer ds.Stop() + + ctx := context.Background() + node, err := ds.Remember(ctx, engine.RememberInput{ + Type: "decision", + Content: "Use PostgreSQL for production", + Scope: "project", + Project: "proj1", + }) + if err != nil { + t.Fatalf("Remember: %v", err) + } + if node == nil { + t.Fatal("expected non-nil node") + } + if node.ID == "" { + t.Error("expected non-empty node ID") + } + + // Verify node was stored + got, err := store.GetNode(ctx, node.ID) + if err != nil { + t.Fatalf("GetNode: %v", err) + } + if got.Content != "Use PostgreSQL for production" { + t.Errorf("unexpected content: %s", got.Content) + } +} + +func TestRemember_TemporalEdge(t *testing.T) { + eng, store := setupEngine(t) + ds := New(eng) + defer ds.Stop() + + ctx := context.Background() + + // First node - no temporal edge expected + node1, err := ds.Remember(ctx, engine.RememberInput{ + Type: "decision", + Content: "First decision", + Scope: "project", + Project: "proj1", + }) + if err != nil { + t.Fatalf("Remember first: %v", err) + } + + // Second node - temporal edge from node1 → node2 expected + node2, err := ds.Remember(ctx, engine.RememberInput{ + Type: "convention", + Content: "Second convention", + Scope: "project", + Project: "proj1", + }) + if err != nil { + t.Fatalf("Remember second: %v", err) + } + + // Verify temporal edge exists + edges, err := store.GetEdgesFrom(ctx, node1.ID) + if err != nil { + t.Fatalf("GetEdgesFrom: %v", err) + } + + found := false + for _, e := range edges { + if e.ToID == node2.ID && e.Type == "learned_in" { + found = true + break + } + } + if !found { + t.Error("expected temporal backbone edge (learned_in) from node1 to node2") + } +} + +func TestRemember_MultipleProjects(t *testing.T) { + eng, _ := setupEngine(t) + ds := New(eng) + defer ds.Stop() + + ctx := context.Background() + + // Nodes in different projects should have independent temporal chains + n1, _ := ds.Remember(ctx, engine.RememberInput{ + Type: "decision", Content: "proj1 first", Scope: "project", Project: "proj1", + }) + n2, _ := ds.Remember(ctx, engine.RememberInput{ + Type: "decision", Content: "proj2 first", Scope: "project", Project: "proj2", + }) + n3, _ := ds.Remember(ctx, engine.RememberInput{ + Type: "decision", Content: "proj1 second", Scope: "project", Project: "proj1", + }) + + if n1 == nil || n2 == nil || n3 == nil { + t.Fatal("expected all nodes to be created") + } + + // proj1's second node should link from proj1's first node, not proj2's + ds.mu.Lock() + lastProj1 := ds.lastNode["proj1"] + lastProj2 := ds.lastNode["proj2"] + ds.mu.Unlock() + + if lastProj1 != n3.ID { + t.Errorf("proj1 last node should be n3, got %s", lastProj1) + } + if lastProj2 != n2.ID { + t.Errorf("proj2 last node should be n2, got %s", lastProj2) + } +} + +func TestStop_GracefulShutdown(t *testing.T) { + eng, _ := setupEngine(t) + ds := New(eng) + + ctx := context.Background() + // Add some work + _, _ = ds.Remember(ctx, engine.RememberInput{ + Type: "decision", Content: "test stop", Scope: "project", Project: "proj1", + }) + + // Stop should not hang + done := make(chan struct{}) + go func() { + ds.Stop() + close(done) + }() + + select { + case <-done: + // OK + case <-time.After(5 * time.Second): + t.Fatal("Stop() timed out") + } +} + +func TestRemember_EmptyContent(t *testing.T) { + eng, _ := setupEngine(t) + ds := New(eng) + defer ds.Stop() + + ctx := context.Background() + // Engine may reject empty content + _, err := ds.Remember(ctx, engine.RememberInput{ + Type: "decision", + Content: "", + Scope: "project", + Project: "proj1", + }) + // This may either error or succeed depending on engine validation + // Just ensure no panic + _ = err +} diff --git a/internal/server/mcp.go b/internal/server/mcp.go index 373717e..c8ffae9 100644 --- a/internal/server/mcp.go +++ b/internal/server/mcp.go @@ -30,8 +30,11 @@ func NewMCPServer(eng *engine.Engine, _ string) *MCPServer { s.server = mcpserver.NewMCPServer("yaad", "0.1.0", mcpserver.WithToolCapabilities(true), mcpserver.WithResourceCapabilities(true, false), + mcpserver.WithPromptCapabilities(true), ) s.registerTools() + s.registerResources() + s.registerPrompts() return s } @@ -524,6 +527,147 @@ func (s *MCPServer) handleSessionRecap(ctx context.Context, req mcp.CallToolRequ return mcp.NewToolResultText(engine.FormatContext(nodes)), nil } +// --- resources --- + +func (s *MCPServer) registerResources() { + s.server.AddResource(mcp.Resource{ + URI: "yaad://context", + Name: "context", + Description: "Hot-tier context nodes for session injection", + MIMEType: "application/json", + }, s.handleResourceContext) + + s.server.AddResource(mcp.Resource{ + URI: "yaad://graph/stats", + Name: "graph_stats", + Description: "Node/edge counts and health metrics", + MIMEType: "application/json", + }, s.handleResourceStats) + + s.server.AddResource(mcp.Resource{ + URI: "yaad://stale", + Name: "stale", + Description: "Potentially stale memory nodes based on git changes", + MIMEType: "application/json", + }, s.handleResourceStale) +} + +func (s *MCPServer) handleResourceContext(_ context.Context, _ mcp.ReadResourceRequest) ([]mcp.ResourceContents, error) { + ctx := context.Background() + result, err := s.eng.Context(ctx, "") + if err != nil { + return nil, err + } + b, err := json.MarshalIndent(result, "", " ") + if err != nil { + return nil, err + } + return []mcp.ResourceContents{ + mcp.TextResourceContents{URI: "yaad://context", MIMEType: "application/json", Text: string(b)}, + }, nil +} + +func (s *MCPServer) handleResourceStats(_ context.Context, _ mcp.ReadResourceRequest) ([]mcp.ResourceContents, error) { + ctx := context.Background() + status, err := s.eng.Status(ctx, "") + if err != nil { + return nil, err + } + b, err := json.MarshalIndent(status, "", " ") + if err != nil { + return nil, err + } + return []mcp.ResourceContents{ + mcp.TextResourceContents{URI: "yaad://graph/stats", MIMEType: "application/json", Text: string(b)}, + }, nil +} + +func (s *MCPServer) handleResourceStale(_ context.Context, _ mcp.ReadResourceRequest) ([]mcp.ResourceContents, error) { + ctx := context.Background() + watcher, err := gitwatch.New(s.eng.Store(), s.eng.Graph(), "") + if err != nil { + return []mcp.ResourceContents{ + mcp.TextResourceContents{URI: "yaad://stale", MIMEType: "application/json", Text: "[]"}, + }, nil + } + stale, err := watcher.StalesSince(ctx, time.Now().Add(-24*time.Hour)) + if err != nil { + return []mcp.ResourceContents{ + mcp.TextResourceContents{URI: "yaad://stale", MIMEType: "application/json", Text: "[]"}, + }, nil + } + b, err := json.MarshalIndent(stale, "", " ") + if err != nil { + return nil, err + } + return []mcp.ResourceContents{ + mcp.TextResourceContents{URI: "yaad://stale", MIMEType: "application/json", Text: string(b)}, + }, nil +} + +// --- prompts --- + +func (s *MCPServer) registerPrompts() { + s.server.AddPrompt(mcp.NewPrompt("recall_context", + mcp.WithPromptDescription("Search memory and format as injectable context"), + mcp.WithArgument("query", mcp.ArgumentDescription("Search query"), mcp.RequiredArgument()), + mcp.WithArgument("project", mcp.ArgumentDescription("Project path filter")), + mcp.WithArgument("depth", mcp.ArgumentDescription("Graph expansion depth (default 2)")), + ), s.handlePromptRecallContext) + + s.server.AddPrompt(mcp.NewPrompt("session_handoff", + mcp.WithPromptDescription("Generate handoff summary for session continuation"), + mcp.WithArgument("project", mcp.ArgumentDescription("Project path")), + ), s.handlePromptSessionHandoff) +} + +func (s *MCPServer) handlePromptRecallContext(ctx context.Context, req mcp.GetPromptRequest) (*mcp.GetPromptResult, error) { + query := req.Params.Arguments["query"] + project := req.Params.Arguments["project"] + depth := 2 + if d, ok := req.Params.Arguments["depth"]; ok && d != "" { + fmt.Sscanf(d, "%d", &depth) + } + + result, err := s.eng.Recall(ctx, engine.RecallOpts{ + Query: query, + Depth: depth, + Limit: 10, + Project: project, + }) + if err != nil { + return nil, err + } + + text := engine.FormatContext(result.Nodes) + return &mcp.GetPromptResult{ + Description: "Retrieved context for: " + query, + Messages: []mcp.PromptMessage{ + mcp.NewPromptMessage(mcp.RoleUser, mcp.NewTextContent(text)), + }, + }, nil +} + +func (s *MCPServer) handlePromptSessionHandoff(ctx context.Context, req mcp.GetPromptRequest) (*mcp.GetPromptResult, error) { + project := req.Params.Arguments["project"] + + result, err := s.eng.Context(ctx, project) + if err != nil { + return nil, err + } + + text := "## Session Handoff\n\n" + text += "Here is the relevant context from previous sessions:\n\n" + text += engine.FormatContext(result.Nodes) + + return &mcp.GetPromptResult{ + Description: "Session handoff context", + Messages: []mcp.PromptMessage{ + mcp.NewPromptMessage(mcp.RoleUser, mcp.NewTextContent(text)), + }, + }, nil +} + // --- helpers --- func strArg(req mcp.CallToolRequest, key string) string { diff --git a/skill/skill_test.go b/skill/skill_test.go new file mode 100644 index 0000000..165a96e --- /dev/null +++ b/skill/skill_test.go @@ -0,0 +1,200 @@ +package skill + +import ( + "context" + "path/filepath" + "strings" + "testing" + + "github.com/GrayCodeAI/yaad/engine" + "github.com/GrayCodeAI/yaad/graph" + "github.com/GrayCodeAI/yaad/storage" +) + +func setupEngine(t *testing.T) (*engine.Engine, storage.Storage) { + t.Helper() + dir := t.TempDir() + store, err := storage.NewStore(filepath.Join(dir, "test.db")) + if err != nil { + t.Fatal(err) + } + t.Cleanup(func() { store.Close() }) + + g := graph.New(store, store.DB()) + eng := engine.New(store, g) + t.Cleanup(func() { eng.Close() }) + return eng, store +} + +func TestStore_CreatesSkillNode(t *testing.T) { + eng, store := setupEngine(t) + ctx := context.Background() + + s := &Skill{ + Name: "deploy", + Description: "Deploy the application", + Steps: []Step{ + {Order: 1, Description: "Build binary", Command: "go build ./..."}, + {Order: 2, Description: "Run tests", Command: "go test ./..."}, + {Order: 3, Description: "Push to registry"}, + }, + Tags: "ops", + } + + node, err := Store(ctx, eng, s, "proj1") + if err != nil { + t.Fatalf("Store: %v", err) + } + if node == nil { + t.Fatal("expected non-nil node") + } + if node.Type != "skill" { + t.Errorf("expected type=skill, got %s", node.Type) + } + + // Verify stored in DB + got, err := store.GetNode(ctx, node.ID) + if err != nil { + t.Fatalf("GetNode: %v", err) + } + if !strings.Contains(got.Content, "deploy") { + t.Error("expected skill name in content") + } +} + +func TestLoad_FindsSkillByName(t *testing.T) { + eng, store := setupEngine(t) + ctx := context.Background() + + s := &Skill{ + Name: "test-skill", + Description: "A test skill", + Steps: []Step{ + {Order: 1, Description: "Step one"}, + }, + } + + _, err := Store(ctx, eng, s, "proj1") + if err != nil { + t.Fatalf("Store: %v", err) + } + + loaded, err := Load(ctx, store, "test-skill", "proj1") + if err != nil { + t.Fatalf("Load: %v", err) + } + if loaded.Name != "test-skill" { + t.Errorf("expected name=test-skill, got %s", loaded.Name) + } + if loaded.Description != "A test skill" { + t.Errorf("unexpected description: %s", loaded.Description) + } + if len(loaded.Steps) != 1 { + t.Errorf("expected 1 step, got %d", len(loaded.Steps)) + } +} + +func TestLoad_NotFound(t *testing.T) { + _, store := setupEngine(t) + ctx := context.Background() + + _, err := Load(ctx, store, "nonexistent", "proj1") + if err == nil { + t.Error("expected error for non-existent skill") + } + if !strings.Contains(err.Error(), "not found") { + t.Errorf("expected 'not found' in error, got: %v", err) + } +} + +func TestListSkills_Empty(t *testing.T) { + _, store := setupEngine(t) + ctx := context.Background() + + skills, err := ListSkills(ctx, store, "empty-proj") + if err != nil { + t.Fatalf("ListSkills: %v", err) + } + if len(skills) != 0 { + t.Errorf("expected 0 skills, got %d", len(skills)) + } +} + +func TestListSkills_MultipleSkills(t *testing.T) { + eng, store := setupEngine(t) + ctx := context.Background() + + skills := []*Skill{ + {Name: "build", Description: "Build project", Steps: []Step{{Order: 1, Description: "compile"}}}, + {Name: "deploy", Description: "Deploy project", Steps: []Step{{Order: 1, Description: "push"}}}, + {Name: "test", Description: "Test project", Steps: []Step{{Order: 1, Description: "run tests"}}}, + } + + for _, s := range skills { + if _, err := Store(ctx, eng, s, "proj1"); err != nil { + t.Fatalf("Store %s: %v", s.Name, err) + } + } + + loaded, err := ListSkills(ctx, store, "proj1") + if err != nil { + t.Fatalf("ListSkills: %v", err) + } + if len(loaded) != 3 { + t.Errorf("expected 3 skills, got %d", len(loaded)) + } +} + +func TestReplay_FormatsCorrectly(t *testing.T) { + s := &Skill{ + Name: "release", + Description: "Release a new version", + Steps: []Step{ + {Order: 1, Description: "Update changelog"}, + {Order: 2, Description: "Tag release", Command: "git tag v1.0.0"}, + {Order: 3, Description: "Push tags", Command: "git push --tags"}, + }, + } + + output := Replay(s) + if !strings.Contains(output, "## Skill: release") { + t.Error("expected skill header in replay output") + } + if !strings.Contains(output, "Release a new version") { + t.Error("expected description in replay output") + } + if !strings.Contains(output, "1. Update changelog") { + t.Error("expected step 1 in replay output") + } + if !strings.Contains(output, "git tag v1.0.0") { + t.Error("expected command in replay output") + } +} + +func TestReplay_EmptySteps(t *testing.T) { + s := &Skill{ + Name: "empty", + Description: "No steps", + Steps: []Step{}, + } + output := Replay(s) + if !strings.Contains(output, "## Skill: empty") { + t.Error("expected header even with no steps") + } +} + +func TestAddTag(t *testing.T) { + tests := []struct { + tags, tag, want string + }{ + {"", "new", "new"}, + {"existing", "new", "existing,new"}, + {"a,b", "c", "a,b,c"}, + } + for _, tt := range tests { + got := addTag(tt.tags, tt.tag) + if got != tt.want { + t.Errorf("addTag(%q, %q) = %q, want %q", tt.tags, tt.tag, got, tt.want) + } + } +} diff --git a/temporal/backbone_test.go b/temporal/backbone_test.go new file mode 100644 index 0000000..0be1880 --- /dev/null +++ b/temporal/backbone_test.go @@ -0,0 +1,245 @@ +package temporal + +import ( + "context" + "path/filepath" + "testing" + + "github.com/google/uuid" + "github.com/GrayCodeAI/yaad/storage" +) + +func setupStore(t *testing.T) storage.Storage { + t.Helper() + dir := t.TempDir() + store, err := storage.NewStore(filepath.Join(dir, "test.db")) + if err != nil { + t.Fatal(err) + } + t.Cleanup(func() { store.Close() }) + return store +} + +func createNode(t *testing.T, store storage.Storage, project string) *storage.Node { + t.Helper() + ctx := context.Background() + node := &storage.Node{ + ID: uuid.New().String(), + Type: "decision", + Content: "test node " + uuid.New().String()[:8], + ContentHash: uuid.New().String(), + Scope: "project", + Project: project, + Confidence: 0.8, + Version: 1, + } + if err := store.CreateNode(ctx, node); err != nil { + t.Fatalf("CreateNode: %v", err) + } + return node +} + +func TestNew(t *testing.T) { + store := setupStore(t) + b := New(store) + if b == nil { + t.Fatal("expected non-nil Backbone") + } +} + +func TestLink_FirstNode(t *testing.T) { + store := setupStore(t) + b := New(store) + ctx := context.Background() + + node := createNode(t, store, "proj1") + err := b.Link(ctx, node.ID, "proj1") + if err != nil { + t.Fatalf("Link first node: %v", err) + } + + // No edge expected for the first node (no predecessor) + edges, _ := store.GetEdgesTo(ctx, node.ID) + learnedIn := 0 + for _, e := range edges { + if e.Type == "learned_in" { + learnedIn++ + } + } + if learnedIn != 0 { + t.Errorf("first node should have no inbound learned_in edges, got %d", learnedIn) + } +} + +func TestLink_ChainCreatesEdges(t *testing.T) { + store := setupStore(t) + b := New(store) + ctx := context.Background() + + node1 := createNode(t, store, "proj1") + node2 := createNode(t, store, "proj1") + node3 := createNode(t, store, "proj1") + + if err := b.Link(ctx, node1.ID, "proj1"); err != nil { + t.Fatalf("Link node1: %v", err) + } + if err := b.Link(ctx, node2.ID, "proj1"); err != nil { + t.Fatalf("Link node2: %v", err) + } + if err := b.Link(ctx, node3.ID, "proj1"); err != nil { + t.Fatalf("Link node3: %v", err) + } + + // Verify edge from node1 → node2 + edges, _ := store.GetEdgesFrom(ctx, node1.ID) + found12 := false + for _, e := range edges { + if e.ToID == node2.ID && e.Type == "learned_in" { + found12 = true + } + } + if !found12 { + t.Error("expected learned_in edge from node1 to node2") + } + + // Verify edge from node2 → node3 + edges, _ = store.GetEdgesFrom(ctx, node2.ID) + found23 := false + for _, e := range edges { + if e.ToID == node3.ID && e.Type == "learned_in" { + found23 = true + } + } + if !found23 { + t.Error("expected learned_in edge from node2 to node3") + } +} + +func TestLink_IndependentProjects(t *testing.T) { + store := setupStore(t) + b := New(store) + ctx := context.Background() + + nodeA1 := createNode(t, store, "projA") + nodeB1 := createNode(t, store, "projB") + nodeA2 := createNode(t, store, "projA") + + _ = b.Link(ctx, nodeA1.ID, "projA") + _ = b.Link(ctx, nodeB1.ID, "projB") + _ = b.Link(ctx, nodeA2.ID, "projA") + + // Edge from nodeA1 → nodeA2 (same project) + edges, _ := store.GetEdgesFrom(ctx, nodeA1.ID) + found := false + for _, e := range edges { + if e.ToID == nodeA2.ID && e.Type == "learned_in" { + found = true + } + } + if !found { + t.Error("expected temporal edge within projA") + } + + // No edge from nodeA1 → nodeB1 (different projects) + for _, e := range edges { + if e.ToID == nodeB1.ID { + t.Error("unexpected cross-project temporal edge") + } + } +} + +func TestLink_CancelledContext(t *testing.T) { + store := setupStore(t) + b := New(store) + ctx, cancel := context.WithCancel(context.Background()) + cancel() + + err := b.Link(ctx, "some-id", "proj1") + if err == nil { + t.Error("expected error from cancelled context") + } +} + +func TestTimeline_Forward(t *testing.T) { + store := setupStore(t) + b := New(store) + ctx := context.Background() + + node1 := createNode(t, store, "proj1") + node2 := createNode(t, store, "proj1") + node3 := createNode(t, store, "proj1") + + _ = b.Link(ctx, node1.ID, "proj1") + _ = b.Link(ctx, node2.ID, "proj1") + _ = b.Link(ctx, node3.ID, "proj1") + + nodes, err := b.Timeline(ctx, node1.ID, "forward", 10) + if err != nil { + t.Fatalf("Timeline: %v", err) + } + if len(nodes) < 2 { + t.Errorf("expected at least 2 nodes in forward timeline, got %d", len(nodes)) + } + // First node should be node1 + if nodes[0].ID != node1.ID { + t.Errorf("first timeline node should be node1, got %s", nodes[0].ID) + } +} + +func TestTimeline_Backward(t *testing.T) { + store := setupStore(t) + b := New(store) + ctx := context.Background() + + node1 := createNode(t, store, "proj1") + node2 := createNode(t, store, "proj1") + node3 := createNode(t, store, "proj1") + + _ = b.Link(ctx, node1.ID, "proj1") + _ = b.Link(ctx, node2.ID, "proj1") + _ = b.Link(ctx, node3.ID, "proj1") + + nodes, err := b.Timeline(ctx, node3.ID, "backward", 10) + if err != nil { + t.Fatalf("Timeline backward: %v", err) + } + if len(nodes) < 2 { + t.Errorf("expected at least 2 nodes in backward timeline, got %d", len(nodes)) + } + if nodes[0].ID != node3.ID { + t.Errorf("first backward timeline node should be node3, got %s", nodes[0].ID) + } +} + +func TestTimeline_NonexistentNode(t *testing.T) { + store := setupStore(t) + b := New(store) + ctx := context.Background() + + nodes, err := b.Timeline(ctx, "nonexistent-id", "forward", 10) + if err != nil { + t.Fatalf("Timeline: %v", err) + } + if len(nodes) != 0 { + t.Errorf("expected 0 nodes for nonexistent start, got %d", len(nodes)) + } +} + +func TestTimeline_DefaultLimit(t *testing.T) { + store := setupStore(t) + b := New(store) + ctx := context.Background() + + node := createNode(t, store, "proj1") + _ = b.Link(ctx, node.ID, "proj1") + + // limit 0 should default to 20 + nodes, err := b.Timeline(ctx, node.ID, "forward", 0) + if err != nil { + t.Fatalf("Timeline: %v", err) + } + // Should still return at least the start node + if len(nodes) < 1 { + t.Error("expected at least 1 node with default limit") + } +}