From 87e92d6f0c6d341666c7ab923f325d10fbe2472b Mon Sep 17 00:00:00 2001 From: javanhut Date: Sat, 14 Feb 2026 17:47:59 +0000 Subject: [PATCH 01/12] updates for faster scanning and commits --- cli/diff.go | 7 + cli/fuse.go | 22 +-- cli/gather.go | 173 +----------------------- cli/gather_test.go | 40 ++---- cli/seal.go | 24 +--- cli/status.go | 69 +++------- cli/timeline.go | 5 + cli/utils.go | 3 + internal/ignore/ignore.go | 152 +++++++++++++++++++++ internal/ignore/ignore_test.go | 191 +++++++++++++++++++++++++++ internal/workspace/workspace.go | 108 +++++++++++++-- internal/workspace/workspace_test.go | 155 ++++++++++++++++++++++ 12 files changed, 653 insertions(+), 296 deletions(-) create mode 100644 internal/ignore/ignore.go create mode 100644 internal/ignore/ignore_test.go diff --git a/cli/diff.go b/cli/diff.go index 6652c3f..efec2e4 100644 --- a/cli/diff.go +++ b/cli/diff.go @@ -11,6 +11,7 @@ import ( "github.com/javanhut/Ivaldi-vcs/internal/commit" "github.com/javanhut/Ivaldi-vcs/internal/diffmerge" "github.com/javanhut/Ivaldi-vcs/internal/filechunk" + "github.com/javanhut/Ivaldi-vcs/internal/ignore" "github.com/javanhut/Ivaldi-vcs/internal/refs" "github.com/javanhut/Ivaldi-vcs/internal/workspace" "github.com/javanhut/Ivaldi-vcs/internal/wsindex" @@ -85,6 +86,8 @@ func diffWorkingOrStaged(casStore cas.CAS, ivaldiDir, workDir string) error { // Show working directory vs staged (or HEAD if nothing staged) materializer := workspace.NewMaterializer(casStore, ivaldiDir, workDir) + ignoreCache, _ := ignore.LoadPatternCache(workDir) + materializer.SetIgnorePatterns(ignoreCache) currentIndex, err := materializer.ScanWorkspace() if err != nil { return fmt.Errorf("failed to scan workspace: %w", err) @@ -153,6 +156,8 @@ func diffStagedVsHead(casStore cas.CAS, ivaldiDir, workDir string) error { // Scan workspace to get current file data materializer := workspace.NewMaterializer(casStore, ivaldiDir, workDir) + ignoreCache, _ := ignore.LoadPatternCache(workDir) + materializer.SetIgnorePatterns(ignoreCache) currentIndex, err := materializer.ScanWorkspace() if err != nil { return fmt.Errorf("failed to scan workspace: %w", err) @@ -207,6 +212,8 @@ func diffWorkingVsCommit(casStore cas.CAS, ivaldiDir, workDir, commitRef string) // Get working directory index materializer := workspace.NewMaterializer(casStore, ivaldiDir, workDir) + ignoreCache, _ := ignore.LoadPatternCache(workDir) + materializer.SetIgnorePatterns(ignoreCache) workingIndex, err := materializer.ScanWorkspace() if err != nil { return fmt.Errorf("failed to scan workspace: %w", err) diff --git a/cli/fuse.go b/cli/fuse.go index a20e5a9..57afaa9 100644 --- a/cli/fuse.go +++ b/cli/fuse.go @@ -735,33 +735,19 @@ func continueMerge(ivaldiDir, workDir string) error { return fmt.Errorf("no files staged. Stage resolved files with 'ivaldi gather ...'") } - // Scan workspace for staged files + // Scan only the staged files (not the entire workspace) materializer := workspace.NewMaterializer(casStore, ivaldiDir, workDir) - - wsIndex, err := materializer.ScanWorkspace() + wsIndex, err := materializer.ScanSpecificFiles(stagedFiles) if err != nil { - return fmt.Errorf("failed to scan workspace: %w", err) + return fmt.Errorf("failed to scan staged files: %w", err) } wsLoader := wsindex.NewLoader(casStore) - allFiles, err := wsLoader.ListAll(wsIndex) + mergedFiles, err := wsLoader.ListAll(wsIndex) if err != nil { return fmt.Errorf("failed to list files: %w", err) } - // Filter to staged files - var mergedFiles []wsindex.FileMetadata - stagedMap := make(map[string]bool) - for _, f := range stagedFiles { - stagedMap[f] = true - } - - for _, file := range allFiles { - if stagedMap[file.Path] { - mergedFiles = append(mergedFiles, file) - } - } - // Initialize MMR mmr, err := history.NewPersistentMMR(casStore, ivaldiDir) if err != nil { diff --git a/cli/gather.go b/cli/gather.go index 065257c..0fd3f89 100644 --- a/cli/gather.go +++ b/cli/gather.go @@ -1,7 +1,6 @@ package cli import ( - "bufio" "fmt" "log" "os" @@ -11,90 +10,10 @@ import ( "sync" "github.com/javanhut/Ivaldi-vcs/internal/colors" + "github.com/javanhut/Ivaldi-vcs/internal/ignore" "github.com/spf13/cobra" ) -// PatternCache holds pre-compiled ignore patterns for fast matching -type PatternCache struct { - patterns []string - dirPatterns []string // Patterns ending with / - globPatterns []string // Patterns with wildcards - literalMatches map[string]bool -} - -// NewPatternCache creates a pattern cache from a list of patterns -func NewPatternCache(patterns []string) *PatternCache { - cache := &PatternCache{ - patterns: patterns, - literalMatches: make(map[string]bool), - } - - for _, pattern := range patterns { - if strings.HasSuffix(pattern, "/") { - cache.dirPatterns = append(cache.dirPatterns, strings.TrimSuffix(pattern, "/")) - } else if strings.ContainsAny(pattern, "*?[") { - cache.globPatterns = append(cache.globPatterns, pattern) - } else { - cache.literalMatches[pattern] = true - } - } - - return cache -} - -// IsIgnored checks if a path matches any cached pattern -func (pc *PatternCache) IsIgnored(path string) bool { - if path == ".ivaldiignore" || filepath.Base(path) == ".ivaldiignore" { - return false - } - - baseName := filepath.Base(path) - - // Fast literal match check - if pc.literalMatches[path] || pc.literalMatches[baseName] { - return true - } - - // Check directory patterns - for _, dirPattern := range pc.dirPatterns { - if strings.HasPrefix(path, dirPattern+"/") || path == dirPattern { - return true - } - } - - // Check glob patterns - for _, pattern := range pc.globPatterns { - // Try matching the full path - if matched, _ := filepath.Match(pattern, path); matched { - return true - } - // Try matching just the basename - if matched, _ := filepath.Match(pattern, baseName); matched { - return true - } - // Handle ** patterns - if strings.Contains(pattern, "**") { - parts := strings.Split(pattern, "**") - if len(parts) == 2 { - prefix := strings.TrimPrefix(parts[0], "/") - suffix := strings.TrimPrefix(parts[1], "/") - - if prefix != "" && !strings.HasPrefix(path, prefix) { - continue - } - - if suffix != "" { - if matched, _ := filepath.Match(suffix, baseName); matched { - return true - } - } - } - } - } - - return false -} - // fileResult holds a file path discovered during parallel walking type fileResult struct { path string @@ -110,7 +29,7 @@ type dirJob struct { type parallelWalker struct { workDir string allowAll bool - patternCache *PatternCache + patternCache *ignore.PatternCache results chan fileResult jobs chan dirJob wg sync.WaitGroup @@ -120,7 +39,7 @@ type parallelWalker struct { } // newParallelWalker creates a new parallel walker -func newParallelWalker(workDir string, allowAll bool, patternCache *PatternCache) *parallelWalker { +func newParallelWalker(workDir string, allowAll bool, patternCache *ignore.PatternCache) *parallelWalker { workerCount := runtime.NumCPU() if workerCount < 4 { workerCount = 4 @@ -315,11 +234,10 @@ var gatherCmd = &cobra.Command{ } // Load ignore patterns from .ivaldiignore and create pattern cache - ignorePatterns, err := loadIgnorePatternsForGather(workDir) + patternCache, err := ignore.LoadPatternCache(workDir) if err != nil { log.Printf("Warning: Failed to load ignore patterns: %v", err) } - patternCache := NewPatternCache(ignorePatterns) // Create staging area directory stageDir := filepath.Join(ivaldiDir, "stage") @@ -580,86 +498,3 @@ func shouldGatherDotFile(path string) bool { return false } -// loadIgnorePatternsForGather loads patterns from .ivaldiignore file -func loadIgnorePatternsForGather(workDir string) ([]string, error) { - ignoreFile := filepath.Join(workDir, ".ivaldiignore") - if _, err := os.Stat(ignoreFile); os.IsNotExist(err) { - return []string{}, nil // No ignore file - } - - file, err := os.Open(ignoreFile) - if err != nil { - return nil, err - } - defer file.Close() - - var patterns []string - scanner := bufio.NewScanner(file) - for scanner.Scan() { - line := strings.TrimSpace(scanner.Text()) - // Skip empty lines and comments - if line != "" && !strings.HasPrefix(line, "#") { - patterns = append(patterns, line) - } - } - - return patterns, scanner.Err() -} - -// isFileIgnored checks if a file path matches any ignore patterns -// IMPORTANT: .ivaldiignore itself is NEVER ignored -func isFileIgnored(path string, patterns []string) bool { - // Never ignore .ivaldiignore itself - if path == ".ivaldiignore" || filepath.Base(path) == ".ivaldiignore" { - return false - } - - for _, pattern := range patterns { - // Handle directory patterns (patterns ending with /) - if strings.HasSuffix(pattern, "/") { - dirPattern := strings.TrimSuffix(pattern, "/") - // Check if the path is within this directory - if strings.HasPrefix(path, dirPattern+"/") || path == dirPattern { - return true - } - } - - // Try matching the full path - if matched, _ := filepath.Match(pattern, path); matched { - return true - } - - // Try matching just the basename - if matched, _ := filepath.Match(pattern, filepath.Base(path)); matched { - return true - } - - // Handle patterns with directory separators - if strings.Contains(pattern, "/") { - if matched, _ := filepath.Match(pattern, path); matched { - return true - } - } - - // Handle wildcards in directory paths (e.g., **/*.log) - if strings.Contains(pattern, "**") { - // Convert ** pattern to a simpler check - parts := strings.Split(pattern, "**") - if len(parts) == 2 { - prefix := strings.TrimPrefix(parts[0], "/") - suffix := strings.TrimPrefix(parts[1], "/") - - if prefix != "" && !strings.HasPrefix(path, prefix) { - continue - } - - if suffix != "" { - if matched, _ := filepath.Match(suffix, filepath.Base(path)); matched { - return true - } - } - } - } - } - return false -} diff --git a/cli/gather_test.go b/cli/gather_test.go index bd4fe72..e4a6771 100644 --- a/cli/gather_test.go +++ b/cli/gather_test.go @@ -2,6 +2,8 @@ package cli import ( "testing" + + "github.com/javanhut/Ivaldi-vcs/internal/ignore" ) // BenchmarkPatternCache benchmarks the pattern cache matching @@ -39,7 +41,7 @@ func BenchmarkPatternCache(b *testing.B) { } b.Run("PatternCache", func(b *testing.B) { - cache := NewPatternCache(patterns) + cache := ignore.NewPatternCache(patterns) b.ResetTimer() for i := 0; i < b.N; i++ { for _, path := range testPaths { @@ -47,15 +49,6 @@ func BenchmarkPatternCache(b *testing.B) { } } }) - - b.Run("OriginalPatternMatching", func(b *testing.B) { - b.ResetTimer() - for i := 0; i < b.N; i++ { - for _, path := range testPaths { - isFileIgnored(path, patterns) - } - } - }) } // BenchmarkPatternCacheLargePatternSet benchmarks with many patterns @@ -75,7 +68,7 @@ func BenchmarkPatternCacheLargePatternSet(b *testing.B) { } b.Run("PatternCache_100Patterns", func(b *testing.B) { - cache := NewPatternCache(patterns) + cache := ignore.NewPatternCache(patterns) b.ResetTimer() for i := 0; i < b.N; i++ { for _, path := range testPaths { @@ -83,18 +76,9 @@ func BenchmarkPatternCacheLargePatternSet(b *testing.B) { } } }) - - b.Run("Original_100Patterns", func(b *testing.B) { - b.ResetTimer() - for i := 0; i < b.N; i++ { - for _, path := range testPaths { - isFileIgnored(path, patterns) - } - } - }) } -// TestPatternCacheCorrectness verifies pattern cache matches original behavior +// TestPatternCacheCorrectness verifies pattern cache produces correct results func TestPatternCacheCorrectness(t *testing.T) { patterns := []string{ "*.log", @@ -118,18 +102,12 @@ func TestPatternCacheCorrectness(t *testing.T) { {".ivaldiignore", false}, // Should never be ignored } - cache := NewPatternCache(patterns) + cache := ignore.NewPatternCache(patterns) for _, tc := range testCases { - cacheResult := cache.IsIgnored(tc.path) - originalResult := isFileIgnored(tc.path, patterns) - - if cacheResult != originalResult { - t.Errorf("Mismatch for path %q: cache=%v, original=%v", tc.path, cacheResult, originalResult) - } - - if cacheResult != tc.expected { - t.Errorf("Path %q: expected %v, got %v", tc.path, tc.expected, cacheResult) + result := cache.IsIgnored(tc.path) + if result != tc.expected { + t.Errorf("Path %q: expected %v, got %v", tc.path, tc.expected, result) } } } diff --git a/cli/seal.go b/cli/seal.go index 99f5e3b..68278f6 100644 --- a/cli/seal.go +++ b/cli/seal.go @@ -80,35 +80,19 @@ var sealCmd = &cobra.Command{ mmr := history.NewMMR() commitBuilder := commit.NewCommitBuilder(casStore, mmr) - // Create materializer to scan workspace + // Scan only the staged files (not the entire workspace) materializer := workspace.NewMaterializer(casStore, ivaldiDir, workDir) - - // Scan the current workspace to create file metadata - wsIndex, err := materializer.ScanWorkspace() + wsIndex, err := materializer.ScanSpecificFiles(stagedFiles) if err != nil { - return fmt.Errorf("failed to scan workspace: %w", err) + return fmt.Errorf("failed to scan staged files: %w", err) } - // Get workspace files wsLoader := wsindex.NewLoader(casStore) - allWorkspaceFiles, err := wsLoader.ListAll(wsIndex) + workspaceFiles, err := wsLoader.ListAll(wsIndex) if err != nil { return fmt.Errorf("failed to list workspace files: %w", err) } - // Filter workspace files to only include staged files - stagedFileMap := make(map[string]bool) - for _, file := range stagedFiles { - stagedFileMap[file] = true - } - - var workspaceFiles []wsindex.FileMetadata - for _, file := range allWorkspaceFiles { - if stagedFileMap[file.Path] { - workspaceFiles = append(workspaceFiles, file) - } - } - fmt.Printf("Found %d files in workspace\n", len(workspaceFiles)) // Get author from config diff --git a/cli/status.go b/cli/status.go index a1a8d7b..42b037f 100644 --- a/cli/status.go +++ b/cli/status.go @@ -1,7 +1,6 @@ package cli import ( - "bufio" "encoding/hex" "fmt" "log" @@ -12,6 +11,7 @@ import ( "github.com/javanhut/Ivaldi-vcs/internal/cas" "github.com/javanhut/Ivaldi-vcs/internal/colors" "github.com/javanhut/Ivaldi-vcs/internal/commit" + "github.com/javanhut/Ivaldi-vcs/internal/ignore" "github.com/javanhut/Ivaldi-vcs/internal/objects" "github.com/javanhut/Ivaldi-vcs/internal/refs" "github.com/spf13/cobra" @@ -68,13 +68,13 @@ var statusCmd = &cobra.Command{ } // Load ignore patterns - ignorePatterns, err := loadIgnorePatterns(workDir) + patternCache, err := ignore.LoadPatternCache(workDir) if err != nil { log.Printf("Warning: Failed to load ignore patterns: %v", err) } // Get file statuses - fileStatuses, err := getFileStatuses(workDir, ivaldiDir, ignorePatterns) + fileStatuses, err := getFileStatuses(workDir, ivaldiDir, patternCache) if err != nil { return fmt.Errorf("failed to get file statuses: %w", err) } @@ -194,7 +194,7 @@ func init() { } // getFileStatuses analyzes the working directory and returns file status information -func getFileStatuses(workDir, ivaldiDir string, ignorePatterns []string) ([]FileStatusInfo, error) { +func getFileStatuses(workDir, ivaldiDir string, patternCache *ignore.PatternCache) ([]FileStatusInfo, error) { var fileStatuses []FileStatusInfo // Get staged files @@ -212,27 +212,36 @@ func getFileStatuses(workDir, ivaldiDir string, ignorePatterns []string) ([]File // Walk the working directory err = filepath.Walk(workDir, func(path string, info os.FileInfo, err error) error { if err != nil { + if os.IsNotExist(err) { + return nil + } return err } - // Skip directories - if info.IsDir() { - return nil - } - // Get relative path relPath, err := filepath.Rel(workDir, path) if err != nil { return err } - // Skip .ivaldi directory + // Handle directories: skip .ivaldi and ignored dirs entirely + if info.IsDir() { + if relPath == ".ivaldi" || strings.HasPrefix(relPath, ".ivaldi"+string(filepath.Separator)) { + return filepath.SkipDir + } + if patternCache != nil && patternCache.IsDirIgnored(relPath) { + return filepath.SkipDir + } + return nil + } + + // Skip .ivaldi files (safety fallback) if strings.HasPrefix(relPath, ".ivaldi") { return nil } // Check if file is ignored - if isIgnored(relPath, ignorePatterns) { + if patternCache != nil && patternCache.IsIgnored(relPath) { fileStatuses = append(fileStatuses, FileStatusInfo{ Path: relPath, Status: StatusIgnored, @@ -362,30 +371,6 @@ func getStagedFiles(ivaldiDir string) ([]string, error) { return files, nil } -// loadIgnorePatterns loads patterns from .ivaldiignore file -func loadIgnorePatterns(workDir string) ([]string, error) { - ignoreFile := filepath.Join(workDir, ".ivaldiignore") - if _, err := os.Stat(ignoreFile); os.IsNotExist(err) { - return []string{}, nil // No ignore file - } - - file, err := os.Open(ignoreFile) - if err != nil { - return nil, err - } - defer file.Close() - - var patterns []string - scanner := bufio.NewScanner(file) - for scanner.Scan() { - line := strings.TrimSpace(scanner.Text()) - if line != "" && !strings.HasPrefix(line, "#") { - patterns = append(patterns, line) - } - } - - return patterns, scanner.Err() -} // getKnownFiles reads files from the last commit/seal for proper status tracking func getKnownFiles(ivaldiDir string) (map[string][32]byte, error) { @@ -502,17 +487,3 @@ func displayLastSealInfo(refsManager *refs.RefsManager, currentTimeline, ivaldiD return nil } -// isIgnored checks if a file path matches any ignore patterns -func isIgnored(path string, patterns []string) bool { - for _, pattern := range patterns { - // Simple pattern matching - in a full implementation, - // this would support full glob patterns - if matched, _ := filepath.Match(pattern, path); matched { - return true - } - if matched, _ := filepath.Match(pattern, filepath.Base(path)); matched { - return true - } - } - return false -} diff --git a/cli/timeline.go b/cli/timeline.go index d6bb7ae..1a78880 100644 --- a/cli/timeline.go +++ b/cli/timeline.go @@ -10,6 +10,7 @@ import ( "github.com/javanhut/Ivaldi-vcs/internal/cas" "github.com/javanhut/Ivaldi-vcs/internal/commit" "github.com/javanhut/Ivaldi-vcs/internal/history" + "github.com/javanhut/Ivaldi-vcs/internal/ignore" "github.com/javanhut/Ivaldi-vcs/internal/refs" "github.com/javanhut/Ivaldi-vcs/internal/seals" "github.com/javanhut/Ivaldi-vcs/internal/shelf" @@ -68,6 +69,8 @@ var createTimelineCmd = &cobra.Command{ // This ensures files like tl1.txt stay with tl1 when we create tl2 shelfManager := shelf.NewShelfManager(casStore, ivaldiDir) materializer := workspace.NewMaterializer(casStore, ivaldiDir, ".") + ignoreCache, _ := ignore.LoadPatternCache(".") + materializer.SetIgnorePatterns(ignoreCache) currentWorkspaceIndex, err := materializer.ScanWorkspace() if err == nil { // Get the current timeline's base (committed) state @@ -387,6 +390,8 @@ func createCommitFromWorkspace(casStore cas.CAS, ivaldiDir string, parentTimelin // Scan current workspace to capture ALL files (both tracked and untracked) // This becomes the initial state of the new timeline materializer := workspace.NewMaterializer(casStore, ivaldiDir, ".") + ignoreCache, _ := ignore.LoadPatternCache(".") + materializer.SetIgnorePatterns(ignoreCache) wsIndex, err := materializer.ScanWorkspace() if err != nil { return fmt.Errorf("failed to scan workspace: %w", err) diff --git a/cli/utils.go b/cli/utils.go index 184fb8f..0b91465 100644 --- a/cli/utils.go +++ b/cli/utils.go @@ -10,6 +10,7 @@ import ( "github.com/javanhut/Ivaldi-vcs/internal/commit" "github.com/javanhut/Ivaldi-vcs/internal/config" "github.com/javanhut/Ivaldi-vcs/internal/history" + "github.com/javanhut/Ivaldi-vcs/internal/ignore" "github.com/javanhut/Ivaldi-vcs/internal/objects" "github.com/javanhut/Ivaldi-vcs/internal/refs" "github.com/javanhut/Ivaldi-vcs/internal/seals" @@ -80,6 +81,8 @@ func createInitialCommit(ivaldiDir, workDir string) (*[32]byte, error) { // Create materializer to scan workspace materializer := workspace.NewMaterializer(casStore, ivaldiDir, workDir) + ignoreCache, _ := ignore.LoadPatternCache(workDir) + materializer.SetIgnorePatterns(ignoreCache) // Scan the current workspace wsIndex, err := materializer.ScanWorkspace() diff --git a/internal/ignore/ignore.go b/internal/ignore/ignore.go new file mode 100644 index 0000000..084bbb1 --- /dev/null +++ b/internal/ignore/ignore.go @@ -0,0 +1,152 @@ +// Package ignore provides shared ignore-pattern matching for Ivaldi. +// +// It loads patterns from .ivaldiignore files and provides efficient +// matching via PatternCache, used by both CLI commands and the +// workspace scanner. +package ignore + +import ( + "bufio" + "os" + "path/filepath" + "strings" +) + +// PatternCache holds pre-compiled ignore patterns for fast matching. +type PatternCache struct { + patterns []string + dirPatterns []string // Patterns ending with / + globPatterns []string // Patterns with wildcards + literalMatches map[string]bool // Exact-match patterns +} + +// NewPatternCache creates a PatternCache from a list of patterns. +func NewPatternCache(patterns []string) *PatternCache { + cache := &PatternCache{ + patterns: patterns, + literalMatches: make(map[string]bool), + } + + for _, pattern := range patterns { + if strings.HasSuffix(pattern, "/") { + cache.dirPatterns = append(cache.dirPatterns, strings.TrimSuffix(pattern, "/")) + } else if strings.ContainsAny(pattern, "*?[") { + cache.globPatterns = append(cache.globPatterns, pattern) + } else { + cache.literalMatches[pattern] = true + } + } + + return cache +} + +// IsIgnored checks if a file path matches any cached pattern. +// .ivaldiignore itself is never ignored. +func (pc *PatternCache) IsIgnored(path string) bool { + if path == ".ivaldiignore" || filepath.Base(path) == ".ivaldiignore" { + return false + } + + baseName := filepath.Base(path) + + // Fast literal match check + if pc.literalMatches[path] || pc.literalMatches[baseName] { + return true + } + + // Check directory patterns + for _, dirPattern := range pc.dirPatterns { + if strings.HasPrefix(path, dirPattern+"/") || path == dirPattern { + return true + } + } + + // Check glob patterns + for _, pattern := range pc.globPatterns { + if matched, _ := filepath.Match(pattern, path); matched { + return true + } + if matched, _ := filepath.Match(pattern, baseName); matched { + return true + } + // Handle ** patterns + if strings.Contains(pattern, "**") { + parts := strings.Split(pattern, "**") + if len(parts) == 2 { + prefix := strings.TrimPrefix(parts[0], "/") + suffix := strings.TrimPrefix(parts[1], "/") + + if prefix != "" && !strings.HasPrefix(path, prefix) { + continue + } + + if suffix != "" { + if matched, _ := filepath.Match(suffix, baseName); matched { + return true + } + } + } + } + } + + return false +} + +// IsDirIgnored checks if a directory path matches any cached directory pattern. +// This is used for early pruning during directory walks. +func (pc *PatternCache) IsDirIgnored(dirPath string) bool { + baseName := filepath.Base(dirPath) + + // Check literal matches (directory name) + if pc.literalMatches[dirPath] || pc.literalMatches[baseName] { + return true + } + + // Check directory patterns + for _, dirPattern := range pc.dirPatterns { + if dirPath == dirPattern || strings.HasPrefix(dirPath, dirPattern+"/") { + return true + } + // Also match if the directory basename matches + if baseName == dirPattern { + return true + } + } + + return false +} + +// LoadPatterns reads ignore patterns from the .ivaldiignore file in workDir. +func LoadPatterns(workDir string) ([]string, error) { + ignoreFile := filepath.Join(workDir, ".ivaldiignore") + if _, err := os.Stat(ignoreFile); os.IsNotExist(err) { + return []string{}, nil + } + + file, err := os.Open(ignoreFile) + if err != nil { + return nil, err + } + defer file.Close() + + var patterns []string + scanner := bufio.NewScanner(file) + for scanner.Scan() { + line := strings.TrimSpace(scanner.Text()) + if line != "" && !strings.HasPrefix(line, "#") { + patterns = append(patterns, line) + } + } + + return patterns, scanner.Err() +} + +// LoadPatternCache is a convenience function that loads patterns and creates a cache. +// Returns a non-nil cache even on error (with empty patterns). +func LoadPatternCache(workDir string) (*PatternCache, error) { + patterns, err := LoadPatterns(workDir) + if err != nil { + return NewPatternCache(nil), err + } + return NewPatternCache(patterns), nil +} diff --git a/internal/ignore/ignore_test.go b/internal/ignore/ignore_test.go new file mode 100644 index 0000000..4ba4d48 --- /dev/null +++ b/internal/ignore/ignore_test.go @@ -0,0 +1,191 @@ +package ignore + +import ( + "os" + "path/filepath" + "testing" +) + +func TestNewPatternCache(t *testing.T) { + patterns := []string{ + "*.log", + "node_modules/", + "target/", + ".DS_Store", + "**/*.bak", + } + + cache := NewPatternCache(patterns) + + if len(cache.patterns) != 5 { + t.Errorf("Expected 5 patterns, got %d", len(cache.patterns)) + } + if len(cache.dirPatterns) != 2 { + t.Errorf("Expected 2 dir patterns, got %d", len(cache.dirPatterns)) + } + if len(cache.globPatterns) != 2 { + t.Errorf("Expected 2 glob patterns, got %d", len(cache.globPatterns)) + } + if len(cache.literalMatches) != 1 { + t.Errorf("Expected 1 literal match, got %d", len(cache.literalMatches)) + } +} + +func TestIsIgnored(t *testing.T) { + patterns := []string{ + "*.log", + "target/", + "node_modules/", + "**/*.bak", + ".DS_Store", + "build/", + } + + cache := NewPatternCache(patterns) + + tests := []struct { + path string + expected bool + }{ + // Literal match + {".DS_Store", true}, + + // Glob patterns + {"app.log", true}, + {"src/debug.log", true}, + + // Directory patterns + {"target/release/deps/foo", true}, + {"target/debug/build/bar", true}, + {"node_modules/express/index.js", true}, + {"build/output.js", true}, + + // ** glob patterns + {"file.bak", true}, + {"src/deep/file.bak", true}, + + // Not ignored + {"src/main.go", false}, + {"README.md", false}, + {"Cargo.toml", false}, + + // .ivaldiignore should never be ignored + {".ivaldiignore", false}, + {"subdir/.ivaldiignore", false}, + } + + for _, tc := range tests { + result := cache.IsIgnored(tc.path) + if result != tc.expected { + t.Errorf("IsIgnored(%q) = %v, want %v", tc.path, result, tc.expected) + } + } +} + +func TestIsDirIgnored(t *testing.T) { + patterns := []string{ + "target/", + "node_modules/", + "build/", + "*.log", + } + + cache := NewPatternCache(patterns) + + tests := []struct { + path string + expected bool + }{ + {"target", true}, + {"node_modules", true}, + {"build", true}, + {"target/release", true}, + {"target/debug/deps", true}, + {"src", false}, + {"internal", false}, + } + + for _, tc := range tests { + result := cache.IsDirIgnored(tc.path) + if result != tc.expected { + t.Errorf("IsDirIgnored(%q) = %v, want %v", tc.path, result, tc.expected) + } + } +} + +func TestLoadPatterns(t *testing.T) { + tempDir := t.TempDir() + + // Test with no .ivaldiignore file + patterns, err := LoadPatterns(tempDir) + if err != nil { + t.Fatalf("LoadPatterns failed with no ignore file: %v", err) + } + if len(patterns) != 0 { + t.Errorf("Expected 0 patterns with no ignore file, got %d", len(patterns)) + } + + // Create .ivaldiignore file + content := `# Build output +target/ +build/ + +# Dependencies +node_modules/ + +# Logs +*.log + +# OS files +.DS_Store +` + ignoreFile := filepath.Join(tempDir, ".ivaldiignore") + if err := os.WriteFile(ignoreFile, []byte(content), 0644); err != nil { + t.Fatalf("Failed to write .ivaldiignore: %v", err) + } + + patterns, err = LoadPatterns(tempDir) + if err != nil { + t.Fatalf("LoadPatterns failed: %v", err) + } + + expected := []string{"target/", "build/", "node_modules/", "*.log", ".DS_Store"} + if len(patterns) != len(expected) { + t.Fatalf("Expected %d patterns, got %d: %v", len(expected), len(patterns), patterns) + } + + for i, p := range expected { + if patterns[i] != p { + t.Errorf("Pattern %d: expected %q, got %q", i, p, patterns[i]) + } + } +} + +func TestLoadPatternCache(t *testing.T) { + tempDir := t.TempDir() + + // Should return non-nil cache even with no ignore file + cache, err := LoadPatternCache(tempDir) + if err != nil { + t.Fatalf("LoadPatternCache failed: %v", err) + } + if cache == nil { + t.Fatal("Expected non-nil cache") + } + + // Should not ignore anything with empty patterns + if cache.IsIgnored("anything.go") { + t.Error("Empty cache should not ignore any file") + } +} + +func TestEmptyPatternCache(t *testing.T) { + cache := NewPatternCache(nil) + + if cache.IsIgnored("foo.go") { + t.Error("Nil-patterns cache should not ignore anything") + } + if cache.IsDirIgnored("target") { + t.Error("Nil-patterns cache should not ignore any directory") + } +} diff --git a/internal/workspace/workspace.go b/internal/workspace/workspace.go index c5fced4..e7912cf 100644 --- a/internal/workspace/workspace.go +++ b/internal/workspace/workspace.go @@ -27,6 +27,7 @@ import ( "github.com/javanhut/Ivaldi-vcs/internal/diffmerge" "github.com/javanhut/Ivaldi-vcs/internal/filechunk" "github.com/javanhut/Ivaldi-vcs/internal/hamtdir" + "github.com/javanhut/Ivaldi-vcs/internal/ignore" "github.com/javanhut/Ivaldi-vcs/internal/logging" "github.com/javanhut/Ivaldi-vcs/internal/refs" "github.com/javanhut/Ivaldi-vcs/internal/shelf" @@ -44,9 +45,15 @@ type WorkspaceState struct { // Materializer handles workspace materialization operations. type Materializer struct { - CAS cas.CAS - IvaldiDir string - WorkDir string + CAS cas.CAS + IvaldiDir string + WorkDir string + IgnoreCache *ignore.PatternCache +} + +// SetIgnorePatterns sets the ignore-pattern cache used by ScanWorkspace. +func (m *Materializer) SetIgnorePatterns(pc *ignore.PatternCache) { + m.IgnoreCache = pc } // NewMaterializer creates a new Materializer. @@ -100,38 +107,67 @@ func (m *Materializer) GetCurrentState() (*WorkspaceState, error) { } // ScanWorkspace scans the current working directory and creates a workspace index. +// It respects ignore patterns set via SetIgnorePatterns and handles transient files +// (files that disappear between discovery and read) gracefully. func (m *Materializer) ScanWorkspace() (wsindex.IndexRef, error) { var files []wsindex.FileMetadata err := filepath.WalkDir(m.WorkDir, func(path string, d fs.DirEntry, err error) error { if err != nil { + // Handle transient directories/files that disappear during walk + if os.IsNotExist(err) { + logging.Warn("File disappeared during scan, skipping", "path", path) + return nil + } return err } - // Skip directories - if d.IsDir() { - return nil - } - - // Skip .ivaldi directory relPath, err := filepath.Rel(m.WorkDir, path) if err != nil { return err } + // Skip .ivaldi directory entirely + if d.IsDir() { + if relPath == ".ivaldi" || strings.HasPrefix(relPath, ".ivaldi"+string(filepath.Separator)) { + return filepath.SkipDir + } + + // Skip directories matching ignore patterns + if m.IgnoreCache != nil && m.IgnoreCache.IsDirIgnored(relPath) { + return filepath.SkipDir + } + + return nil + } + + // Skip .ivaldi files (in case we get here without SkipDir) if strings.HasPrefix(relPath, ".ivaldi"+string(filepath.Separator)) || relPath == ".ivaldi" { return nil } + // Skip files matching ignore patterns + if m.IgnoreCache != nil && m.IgnoreCache.IsIgnored(relPath) { + return nil + } + // Get file info info, err := d.Info() if err != nil { + if os.IsNotExist(err) { + logging.Warn("File disappeared during scan, skipping", "path", relPath) + return nil + } return err } // Read file content content, err := os.ReadFile(path) if err != nil { + if os.IsNotExist(err) { + logging.Warn("File disappeared during read, skipping", "path", relPath) + return nil + } return fmt.Errorf("failed to read file %s: %w", relPath, err) } @@ -165,6 +201,60 @@ func (m *Materializer) ScanWorkspace() (wsindex.IndexRef, error) { return wsBuilder.Build(files) } +// ScanSpecificFiles scans only the specified relative paths and creates a workspace index. +// Files that do not exist or disappear during read are skipped with a warning. +// This is O(len(relativePaths)) instead of O(all workspace files). +func (m *Materializer) ScanSpecificFiles(relativePaths []string) (wsindex.IndexRef, error) { + var files []wsindex.FileMetadata + + for _, relPath := range relativePaths { + fullPath := filepath.Join(m.WorkDir, relPath) + + info, err := os.Stat(fullPath) + if err != nil { + if os.IsNotExist(err) { + logging.Warn("Staged file not found, skipping", "path", relPath) + continue + } + return wsindex.IndexRef{}, fmt.Errorf("failed to stat file %s: %w", relPath, err) + } + + if info.IsDir() { + logging.Warn("Skipping directory in staged files", "path", relPath) + continue + } + + content, err := os.ReadFile(fullPath) + if err != nil { + if os.IsNotExist(err) { + logging.Warn("File disappeared during read, skipping", "path", relPath) + continue + } + return wsindex.IndexRef{}, fmt.Errorf("failed to read file %s: %w", relPath, err) + } + + builder := filechunk.NewBuilder(m.CAS, filechunk.DefaultParams()) + fileRef, err := builder.Build(content) + if err != nil { + return wsindex.IndexRef{}, fmt.Errorf("failed to create file chunks for %s: %w", relPath, err) + } + + fileMetadata := wsindex.FileMetadata{ + Path: relPath, + FileRef: fileRef, + ModTime: info.ModTime(), + Mode: uint32(info.Mode()), + Size: info.Size(), + Checksum: cas.SumB3(content), + } + + files = append(files, fileMetadata) + } + + wsBuilder := wsindex.NewBuilder(m.CAS) + return wsBuilder.Build(files) +} + // MaterializeTimeline materializes a timeline's state to the workspace. func (m *Materializer) MaterializeTimeline(timelineName string) error { return m.MaterializeTimelineWithAutoShelf(timelineName, true) diff --git a/internal/workspace/workspace_test.go b/internal/workspace/workspace_test.go index ded321b..9246cd0 100644 --- a/internal/workspace/workspace_test.go +++ b/internal/workspace/workspace_test.go @@ -8,6 +8,7 @@ import ( "github.com/javanhut/Ivaldi-vcs/internal/cas" "github.com/javanhut/Ivaldi-vcs/internal/commit" "github.com/javanhut/Ivaldi-vcs/internal/history" + "github.com/javanhut/Ivaldi-vcs/internal/ignore" "github.com/javanhut/Ivaldi-vcs/internal/refs" "github.com/javanhut/Ivaldi-vcs/internal/wsindex" ) @@ -464,6 +465,160 @@ func TestRemoveEmptyDirectories(t *testing.T) { } } +func TestScanSpecificFiles(t *testing.T) { + _, workDir, materializer, cleanup := setupTestWorkspace(t) + defer cleanup() + + // Create test files + err := os.WriteFile(filepath.Join(workDir, "file1.txt"), []byte("content1"), 0644) + if err != nil { + t.Fatalf("Failed to create file1: %v", err) + } + err = os.WriteFile(filepath.Join(workDir, "file2.txt"), []byte("content2"), 0644) + if err != nil { + t.Fatalf("Failed to create file2: %v", err) + } + err = os.WriteFile(filepath.Join(workDir, "file3.txt"), []byte("content3"), 0644) + if err != nil { + t.Fatalf("Failed to create file3: %v", err) + } + + // Scan only file1 and file3 + index, err := materializer.ScanSpecificFiles([]string{"file1.txt", "file3.txt"}) + if err != nil { + t.Fatalf("ScanSpecificFiles failed: %v", err) + } + + if index.Count != 2 { + t.Errorf("Expected 2 files in index, got %d", index.Count) + } + + // Verify the correct files are in the index + wsLoader := wsindex.NewLoader(materializer.CAS) + files, err := wsLoader.ListAll(index) + if err != nil { + t.Fatalf("Failed to list files: %v", err) + } + + fileNames := make(map[string]bool) + for _, f := range files { + fileNames[f.Path] = true + } + + if !fileNames["file1.txt"] { + t.Error("Expected file1.txt in index") + } + if fileNames["file2.txt"] { + t.Error("Did not expect file2.txt in index") + } + if !fileNames["file3.txt"] { + t.Error("Expected file3.txt in index") + } +} + +func TestScanSpecificFilesMissingFile(t *testing.T) { + _, workDir, materializer, cleanup := setupTestWorkspace(t) + defer cleanup() + + // Create only one file + err := os.WriteFile(filepath.Join(workDir, "exists.txt"), []byte("content"), 0644) + if err != nil { + t.Fatalf("Failed to create file: %v", err) + } + + // Scan with a mix of existing and missing files + index, err := materializer.ScanSpecificFiles([]string{"exists.txt", "missing.txt"}) + if err != nil { + t.Fatalf("ScanSpecificFiles should not fail on missing files: %v", err) + } + + // Should only include the existing file + if index.Count != 1 { + t.Errorf("Expected 1 file in index (skipping missing), got %d", index.Count) + } +} + +func TestScanWorkspaceWithIgnorePatterns(t *testing.T) { + _, workDir, materializer, cleanup := setupTestWorkspace(t) + defer cleanup() + + // Create files and directories + err := os.WriteFile(filepath.Join(workDir, "main.go"), []byte("package main"), 0644) + if err != nil { + t.Fatalf("Failed to create main.go: %v", err) + } + + // Create a "target" directory with files (simulating Rust build output) + err = os.MkdirAll(filepath.Join(workDir, "target", "release"), 0755) + if err != nil { + t.Fatalf("Failed to create target dir: %v", err) + } + err = os.WriteFile(filepath.Join(workDir, "target", "release", "binary"), []byte("binary content"), 0644) + if err != nil { + t.Fatalf("Failed to create binary: %v", err) + } + + // Create a log file + err = os.WriteFile(filepath.Join(workDir, "debug.log"), []byte("log content"), 0644) + if err != nil { + t.Fatalf("Failed to create log file: %v", err) + } + + // Set up ignore patterns + pc := ignore.NewPatternCache([]string{"target/", "*.log"}) + materializer.SetIgnorePatterns(pc) + + // Scan workspace + index, err := materializer.ScanWorkspace() + if err != nil { + t.Fatalf("ScanWorkspace failed: %v", err) + } + + // Should only include main.go (target/ and *.log are ignored) + if index.Count != 1 { + t.Errorf("Expected 1 file (ignoring target/ and *.log), got %d", index.Count) + } + + wsLoader := wsindex.NewLoader(materializer.CAS) + files, err := wsLoader.ListAll(index) + if err != nil { + t.Fatalf("Failed to list files: %v", err) + } + + if len(files) != 1 || files[0].Path != "main.go" { + t.Errorf("Expected only main.go, got %v", files) + } +} + +func TestScanWorkspaceSkipsIvaldiDir(t *testing.T) { + _, workDir, materializer, cleanup := setupTestWorkspace(t) + defer cleanup() + + // Create a file in workspace + err := os.WriteFile(filepath.Join(workDir, "src.go"), []byte("package src"), 0644) + if err != nil { + t.Fatalf("Failed to create file: %v", err) + } + + // .ivaldi directory already exists from setup - make sure it's skipped + index, err := materializer.ScanWorkspace() + if err != nil { + t.Fatalf("ScanWorkspace failed: %v", err) + } + + wsLoader := wsindex.NewLoader(materializer.CAS) + files, err := wsLoader.ListAll(index) + if err != nil { + t.Fatalf("Failed to list files: %v", err) + } + + for _, f := range files { + if f.Path == ".ivaldi" || filepath.Dir(f.Path) == ".ivaldi" { + t.Errorf("ScanWorkspace should skip .ivaldi files, found: %s", f.Path) + } + } +} + func BenchmarkScanWorkspace(b *testing.B) { tempDir := b.TempDir() ivaldiDir := filepath.Join(tempDir, ".ivaldi") From a96e9f1e4fbaaa92df071bfec32a528b311b69a4 Mon Sep 17 00:00:00 2001 From: javanhut Date: Sun, 15 Feb 2026 08:25:44 +0000 Subject: [PATCH 02/12] feat: enforce ignore to reduce expensive io operations --- cli/timeline.go | 2 ++ cli/travel.go | 5 +++++ internal/github/sync_push.go | 1 + internal/workspace/workspace.go | 7 +++++++ 4 files changed, 15 insertions(+) diff --git a/cli/timeline.go b/cli/timeline.go index 1a78880..0d35973 100644 --- a/cli/timeline.go +++ b/cli/timeline.go @@ -300,6 +300,8 @@ var switchTimelineCmd = &cobra.Command{ } materializer := workspace.NewMaterializer(casStore, ivaldiDir, workDir) + ignoreCache, _ := ignore.LoadPatternCache(workDir) + materializer.SetIgnorePatterns(ignoreCache) // Materialize the target timeline with auto-shelving enabled // This will automatically stash uncommitted changes and restore any existing shelf diff --git a/cli/travel.go b/cli/travel.go index 7629226..fb8ec82 100644 --- a/cli/travel.go +++ b/cli/travel.go @@ -14,6 +14,7 @@ import ( "github.com/javanhut/Ivaldi-vcs/internal/colors" "github.com/javanhut/Ivaldi-vcs/internal/commit" "github.com/javanhut/Ivaldi-vcs/internal/diffmerge" + "github.com/javanhut/Ivaldi-vcs/internal/ignore" "github.com/javanhut/Ivaldi-vcs/internal/refs" "github.com/javanhut/Ivaldi-vcs/internal/workspace" "github.com/spf13/cobra" @@ -621,6 +622,8 @@ func createDivergentTimeline(casStore cas.CAS, refsManager *refs.RefsManager, iv // Switch to new timeline materializer := workspace.NewMaterializer(casStore, ivaldiDir, workDir) + ignoreCache, _ := ignore.LoadPatternCache(workDir) + materializer.SetIgnorePatterns(ignoreCache) err = materializer.MaterializeTimeline(newTimelineName) if err != nil { return fmt.Errorf("failed to switch to new timeline: %w", err) @@ -648,6 +651,8 @@ func overwriteTimeline(casStore cas.CAS, refsManager *refs.RefsManager, ivaldiDi // Materialize workspace to this seal materializer := workspace.NewMaterializer(casStore, ivaldiDir, workDir) + ignoreCache, _ := ignore.LoadPatternCache(workDir) + materializer.SetIgnorePatterns(ignoreCache) // Get timeline with updated hash timeline, err := refsManager.GetTimeline(currentTimeline, refs.LocalTimeline) diff --git a/internal/github/sync_push.go b/internal/github/sync_push.go index 8c218db..3de700d 100644 --- a/internal/github/sync_push.go +++ b/internal/github/sync_push.go @@ -41,6 +41,7 @@ type fileHashResult struct { // computeFileDeltas compares two commits and returns changed files using parallel hash computation func (rs *RepoSyncer) computeFileDeltas(parentHash, currentHash cas.Hash) ([]FileChange, error) { + fmt.Printf("Computing file changes...\n") commitReader := commit.NewCommitReader(rs.casStore) // Determine worker count diff --git a/internal/workspace/workspace.go b/internal/workspace/workspace.go index e7912cf..f969082 100644 --- a/internal/workspace/workspace.go +++ b/internal/workspace/workspace.go @@ -110,6 +110,7 @@ func (m *Materializer) GetCurrentState() (*WorkspaceState, error) { // It respects ignore patterns set via SetIgnorePatterns and handles transient files // (files that disappear between discovery and read) gracefully. func (m *Materializer) ScanWorkspace() (wsindex.IndexRef, error) { + fmt.Fprintf(os.Stderr, "Scanning workspace...\n") var files []wsindex.FileMetadata err := filepath.WalkDir(m.WorkDir, func(path string, d fs.DirEntry, err error) error { @@ -196,6 +197,8 @@ func (m *Materializer) ScanWorkspace() (wsindex.IndexRef, error) { return wsindex.IndexRef{}, fmt.Errorf("failed to scan workspace: %w", err) } + fmt.Fprintf(os.Stderr, "Scanned %d files\n", len(files)) + // Build workspace index wsBuilder := wsindex.NewBuilder(m.CAS) return wsBuilder.Build(files) @@ -293,6 +296,7 @@ func (m *Materializer) MaterializeTimelineWithAutoShelf(timelineName string, ena // Auto-shelf current changes before switching (if enabled and switching between different timelines) if enableAutoShelf && currentTimelineName != "" && currentTimelineName != timelineName { + fmt.Printf("Shelving current workspace...\n") shelfManager := shelf.NewShelfManager(m.CAS, m.IvaldiDir) // Always remove any existing auto-shelf for the current timeline first @@ -368,6 +372,7 @@ func (m *Materializer) MaterializeTimelineWithAutoShelf(timelineName string, ena } // Compute differences between current state and target + fmt.Printf("Computing workspace changes...\n") differ := diffmerge.NewDiffer(m.CAS) diff, err := differ.DiffWorkspaces(currentState.Index, targetIndex) if err != nil { @@ -375,6 +380,7 @@ func (m *Materializer) MaterializeTimelineWithAutoShelf(timelineName string, ena } // Apply changes to working directory + fmt.Printf("Applying %d changes...\n", len(diff.FileChanges)) err = m.ApplyChangesToWorkspace(diff) if err != nil { return fmt.Errorf("failed to apply changes to workspace: %w", err) @@ -424,6 +430,7 @@ func (m *Materializer) CreateTargetIndex(timeline refs.Timeline) (wsindex.IndexR } // Create file metadata for each file + fmt.Fprintf(os.Stderr, "Reading %d files from seal...\n", len(filePaths)) var files []wsindex.FileMetadata for _, filePath := range filePaths { // Get file content to determine size and checksum From fce04d5926e0e81206615d633b16171132bba10d Mon Sep 17 00:00:00 2001 From: javanhut Date: Sun, 15 Feb 2026 16:18:23 +0000 Subject: [PATCH 03/12] feat: optimized for bottlenecks --- cli/diff.go | 72 ++++++++------------ cli/status.go | 83 ++++++++++------------- cli/timeline.go | 25 ++++--- internal/cas/file_cas.go | 8 +-- internal/diffmerge/diffmerge.go | 26 ++++---- internal/filechunk/filechunk.go | 79 ++++++++++++++++------ internal/hamtdir/hamtdir.go | 65 ++++++++++-------- internal/history/persistent_mmr.go | 95 ++++++++++----------------- internal/ignore/ignore.go | 58 ++++++++++------ internal/ignore/ignore_test.go | 7 +- internal/refs/refs.go | 58 ++++++++++------ internal/shelf/shelf.go | 13 ++-- internal/workspace/workspace.go | 102 ++++++++++++++++++++++++++--- 13 files changed, 400 insertions(+), 291 deletions(-) diff --git a/cli/diff.go b/cli/diff.go index efec2e4..5a46d49 100644 --- a/cli/diff.go +++ b/cli/diff.go @@ -154,39 +154,11 @@ func diffStagedVsHead(casStore cas.CAS, ivaldiDir, workDir string) error { return nil } - // Scan workspace to get current file data + // Scan only staged files instead of full workspace materializer := workspace.NewMaterializer(casStore, ivaldiDir, workDir) - ignoreCache, _ := ignore.LoadPatternCache(workDir) - materializer.SetIgnorePatterns(ignoreCache) - currentIndex, err := materializer.ScanWorkspace() - if err != nil { - return fmt.Errorf("failed to scan workspace: %w", err) - } - - wsLoader := wsindex.NewLoader(casStore) - allFiles, err := wsLoader.ListAll(currentIndex) - if err != nil { - return fmt.Errorf("failed to list files: %w", err) - } - - // Filter to staged files - var stagedMetadata []wsindex.FileMetadata - stagedMap := make(map[string]bool) - for _, f := range stagedFiles { - stagedMap[f] = true - } - - for _, file := range allFiles { - if stagedMap[file.Path] { - stagedMetadata = append(stagedMetadata, file) - } - } - - // Build staged index - wsBuilder := wsindex.NewBuilder(casStore) - stagedIndex, err := wsBuilder.Build(stagedMetadata) + stagedIndex, err := materializer.ScanSpecificFiles(stagedFiles) if err != nil { - return fmt.Errorf("failed to build staged index: %w", err) + return fmt.Errorf("failed to scan staged files: %w", err) } return showDiff(casStore, headIndex, stagedIndex, "HEAD", "staged") @@ -378,13 +350,20 @@ func readFileContent(casStore cas.CAS, file *wsindex.FileMetadata) ([]byte, erro return loader.ReadAll(file.FileRef) } -// getHeadIndex returns the workspace index for the HEAD commit -func getHeadIndex(casStore cas.CAS, ivaldiDir string) (wsindex.IndexRef, error) { - refsManager, err := refs.NewRefsManager(ivaldiDir) - if err != nil { - return wsindex.IndexRef{}, fmt.Errorf("failed to initialize refs: %w", err) +// getHeadIndex returns the workspace index for the HEAD commit. +// If rm is non-nil, it is reused; otherwise a new RefsManager is created. +func getHeadIndex(casStore cas.CAS, ivaldiDir string, rm ...*refs.RefsManager) (wsindex.IndexRef, error) { + var refsManager *refs.RefsManager + if len(rm) > 0 && rm[0] != nil { + refsManager = rm[0] + } else { + var err error + refsManager, err = refs.NewRefsManager(ivaldiDir) + if err != nil { + return wsindex.IndexRef{}, fmt.Errorf("failed to initialize refs: %w", err) + } + defer refsManager.Close() } - defer refsManager.Close() currentTimeline, err := refsManager.GetCurrentTimeline() if err != nil { @@ -432,13 +411,20 @@ func getCommitIndex(casStore cas.CAS, commitHash [32]byte) (wsindex.IndexRef, er return wsBuilder.Build(files) } -// getCommitIndexByRef resolves a ref (seal name or hash) to a workspace index -func getCommitIndexByRef(casStore cas.CAS, ivaldiDir, ref string) (wsindex.IndexRef, error) { - refsManager, err := refs.NewRefsManager(ivaldiDir) - if err != nil { - return wsindex.IndexRef{}, fmt.Errorf("failed to initialize refs: %w", err) +// getCommitIndexByRef resolves a ref (seal name or hash) to a workspace index. +// If rm is non-nil, it is reused; otherwise a new RefsManager is created. +func getCommitIndexByRef(casStore cas.CAS, ivaldiDir, ref string, rm ...*refs.RefsManager) (wsindex.IndexRef, error) { + var refsManager *refs.RefsManager + if len(rm) > 0 && rm[0] != nil { + refsManager = rm[0] + } else { + var err error + refsManager, err = refs.NewRefsManager(ivaldiDir) + if err != nil { + return wsindex.IndexRef{}, fmt.Errorf("failed to initialize refs: %w", err) + } + defer refsManager.Close() } - defer refsManager.Close() // Try to resolve as seal name first commitHash, _, _, err := refsManager.GetSealByName(ref) diff --git a/cli/status.go b/cli/status.go index 42b037f..d22c1dd 100644 --- a/cli/status.go +++ b/cli/status.go @@ -73,8 +73,14 @@ var statusCmd = &cobra.Command{ log.Printf("Warning: Failed to load ignore patterns: %v", err) } + // Get known files once (reuse for both status and seal info) + knownFiles, err := getKnownFiles(ivaldiDir, refsManager) + if err != nil { + log.Printf("Warning: Failed to get known files: %v", err) + } + // Get file statuses - fileStatuses, err := getFileStatuses(workDir, ivaldiDir, patternCache) + fileStatuses, err := getFileStatuses(workDir, ivaldiDir, patternCache, knownFiles) if err != nil { return fmt.Errorf("failed to get file statuses: %w", err) } @@ -83,7 +89,7 @@ var statusCmd = &cobra.Command{ fmt.Printf("On timeline %s\n", colors.Bold(currentTimeline)) // Show information about the last seal if available - err = displayLastSealInfo(refsManager, currentTimeline, ivaldiDir) + err = displayLastSealInfo(refsManager, currentTimeline, knownFiles) if err != nil { // Don't fail if we can't get seal info } @@ -194,7 +200,7 @@ func init() { } // getFileStatuses analyzes the working directory and returns file status information -func getFileStatuses(workDir, ivaldiDir string, patternCache *ignore.PatternCache) ([]FileStatusInfo, error) { +func getFileStatuses(workDir, ivaldiDir string, patternCache *ignore.PatternCache, knownFiles map[string][32]byte) ([]FileStatusInfo, error) { var fileStatuses []FileStatusInfo // Get staged files @@ -203,10 +209,10 @@ func getFileStatuses(workDir, ivaldiDir string, patternCache *ignore.PatternCach log.Printf("Warning: Failed to get staged files: %v", err) } - // Get known files from last snapshot (if any) - knownFiles, err := getKnownFiles(ivaldiDir) - if err != nil { - log.Printf("Warning: Failed to get known files: %v", err) + // Build staged map for O(1) lookups + stagedMap := make(map[string]bool, len(stagedFiles)) + for _, f := range stagedFiles { + stagedMap[f] = true } // Walk the working directory @@ -249,25 +255,11 @@ func getFileStatuses(workDir, ivaldiDir string, patternCache *ignore.PatternCach return nil } - // Check if file is staged - isStaged := false - for _, stagedFile := range stagedFiles { - if stagedFile == relPath { - isStaged = true - break - } - } + // O(1) staged check + isStaged := stagedMap[relPath] - // Check if file was known in previous snapshot - wasKnown := false - var knownHash [32]byte - for filePath, hash := range knownFiles { - if filePath == relPath { - wasKnown = true - knownHash = hash - break - } - } + // O(1) known file check + knownHash, wasKnown := knownFiles[relPath] if isStaged { // File is staged - determine if it's new or modified @@ -319,14 +311,8 @@ func getFileStatuses(workDir, ivaldiDir string, patternCache *ignore.PatternCach for filePath := range knownFiles { fullPath := filepath.Join(workDir, filePath) if _, err := os.Stat(fullPath); os.IsNotExist(err) { - // File was deleted - isStaged := false - for _, stagedFile := range stagedFiles { - if stagedFile == filePath { - isStaged = true - break - } - } + // O(1) staged check + isStaged := stagedMap[filePath] if isStaged { // Deletion is staged @@ -372,23 +358,28 @@ func getStagedFiles(ivaldiDir string) ([]string, error) { } -// getKnownFiles reads files from the last commit/seal for proper status tracking -func getKnownFiles(ivaldiDir string) (map[string][32]byte, error) { +// getKnownFiles reads files from the last commit/seal for proper status tracking. +// If refsManager is non-nil, it is reused; otherwise a new one is created internally. +func getKnownFiles(ivaldiDir string, refsManager *refs.RefsManager) (map[string][32]byte, error) { knownFiles := make(map[string][32]byte) - // Get the current timeline and its last commit - refsManager, err := refs.NewRefsManager(ivaldiDir) - if err != nil { - return knownFiles, nil // No refs system, treat as empty + // Use provided RefsManager or create a new one + rm := refsManager + if rm == nil { + var err error + rm, err = refs.NewRefsManager(ivaldiDir) + if err != nil { + return knownFiles, nil // No refs system, treat as empty + } + defer rm.Close() } - defer refsManager.Close() - currentTimeline, err := refsManager.GetCurrentTimeline() + currentTimeline, err := rm.GetCurrentTimeline() if err != nil { return knownFiles, nil // No current timeline } - timeline, err := refsManager.GetTimeline(currentTimeline, refs.LocalTimeline) + timeline, err := rm.GetTimeline(currentTimeline, refs.LocalTimeline) if err != nil { return knownFiles, nil // Timeline doesn't exist } @@ -453,7 +444,7 @@ func computeFileHash(filePath string) ([32]byte, error) { } // displayLastSealInfo shows information about the last seal and its contents -func displayLastSealInfo(refsManager *refs.RefsManager, currentTimeline, ivaldiDir string) error { +func displayLastSealInfo(refsManager *refs.RefsManager, currentTimeline string, knownFiles map[string][32]byte) error { timeline, err := refsManager.GetTimeline(currentTimeline, refs.LocalTimeline) if err != nil { return err @@ -474,12 +465,6 @@ func displayLastSealInfo(refsManager *refs.RefsManager, currentTimeline, ivaldiD fmt.Printf("Last seal: %s\n", colors.Cyan(shortHash)) } - // Get files from the last commit - knownFiles, err := getKnownFiles(ivaldiDir) - if err != nil { - return err - } - if len(knownFiles) > 0 { fmt.Printf("Files tracked in last seal: %s\n", colors.InfoText(fmt.Sprintf("%d", len(knownFiles)))) } diff --git a/cli/timeline.go b/cli/timeline.go index 0d35973..0798d4e 100644 --- a/cli/timeline.go +++ b/cli/timeline.go @@ -103,7 +103,7 @@ var createTimelineCmd = &cobra.Command{ // THEN: Capture the workspace state for the NEW timeline log.Printf("Capturing current workspace state for new timeline") - err = createCommitFromWorkspace(casStore, ivaldiDir, currentTimeline, &baseHashes) + err = createCommitFromWorkspace(casStore, ivaldiDir, currentTimeline, &baseHashes, ¤tWorkspaceIndex) if err != nil { log.Printf("Warning: Could not create workspace snapshot: %v", err) @@ -370,7 +370,8 @@ var removeTimelineCmd = &cobra.Command{ // createCommitFromWorkspace creates a commit object from the current workspace state // and stores the commit hash in the provided baseHashes array. -func createCommitFromWorkspace(casStore cas.CAS, ivaldiDir string, parentTimeline string, baseHashes *[2][32]byte) error { +// If preScannedIndex is non-nil, it is used instead of rescanning the workspace. +func createCommitFromWorkspace(casStore cas.CAS, ivaldiDir string, parentTimeline string, baseHashes *[2][32]byte, preScannedIndex *wsindex.IndexRef) error { // Get the parent timeline's commit if it exists refsManager, err := refs.NewRefsManager(ivaldiDir) if err != nil { @@ -389,14 +390,18 @@ func createCommitFromWorkspace(casStore cas.CAS, ivaldiDir string, parentTimelin } } - // Scan current workspace to capture ALL files (both tracked and untracked) - // This becomes the initial state of the new timeline - materializer := workspace.NewMaterializer(casStore, ivaldiDir, ".") - ignoreCache, _ := ignore.LoadPatternCache(".") - materializer.SetIgnorePatterns(ignoreCache) - wsIndex, err := materializer.ScanWorkspace() - if err != nil { - return fmt.Errorf("failed to scan workspace: %w", err) + // Use pre-scanned index if available, otherwise scan workspace + var wsIndex wsindex.IndexRef + if preScannedIndex != nil { + wsIndex = *preScannedIndex + } else { + materializer := workspace.NewMaterializer(casStore, ivaldiDir, ".") + ignoreCache, _ := ignore.LoadPatternCache(".") + materializer.SetIgnorePatterns(ignoreCache) + wsIndex, err = materializer.ScanWorkspace() + if err != nil { + return fmt.Errorf("failed to scan workspace: %w", err) + } } wsLoader := wsindex.NewLoader(casStore) diff --git a/internal/cas/file_cas.go b/internal/cas/file_cas.go index 8150933..5a7e2c1 100644 --- a/internal/cas/file_cas.go +++ b/internal/cas/file_cas.go @@ -102,13 +102,7 @@ func (f *FileCAS) Get(hash Hash) ([]byte, error) { if err != nil { return nil, fmt.Errorf("failed to read file: %w", err) } - - // Verify the hash matches - computed := SumB3(data) - if computed != hash { - return nil, fmt.Errorf("corrupted data: hash mismatch for %s", hash.String()) - } - + return data, nil } diff --git a/internal/diffmerge/diffmerge.go b/internal/diffmerge/diffmerge.go index 9b24fef..751cc8f 100644 --- a/internal/diffmerge/diffmerge.go +++ b/internal/diffmerge/diffmerge.go @@ -717,21 +717,23 @@ func (a *Analyzer) DetectRenames(diff *WorkspaceDiff, threshold float64) []Renam } } - // Compare each removed file with each added file - for _, removedFile := range removed { - if removedFile.OldFile == nil { - continue + // Build hash index of removed files for O(n+m) lookup + removedByHash := make(map[cas.Hash][]FileChange) + for _, rm := range removed { + if rm.OldFile != nil { + removedByHash[rm.OldFile.FileRef.Hash] = append(removedByHash[rm.OldFile.FileRef.Hash], rm) } + } - for _, addedFile := range added { - if addedFile.NewFile == nil { - continue - } - - // Check if content is similar (same hash indicates exact match) - if removedFile.OldFile.FileRef.Hash == addedFile.NewFile.FileRef.Hash { + // Match added files against removed files by hash + for _, addedFile := range added { + if addedFile.NewFile == nil { + continue + } + if matches, ok := removedByHash[addedFile.NewFile.FileRef.Hash]; ok { + for _, rm := range matches { renames = append(renames, RenameDetection{ - OldPath: removedFile.Path, + OldPath: rm.Path, NewPath: addedFile.Path, Similarity: 1.0, // Exact match }) diff --git a/internal/filechunk/filechunk.go b/internal/filechunk/filechunk.go index 38670e0..6f2a71b 100644 --- a/internal/filechunk/filechunk.go +++ b/internal/filechunk/filechunk.go @@ -16,6 +16,7 @@ import ( "encoding/binary" "fmt" "io" + "sync" "github.com/javanhut/Ivaldi-vcs/internal/cas" ) @@ -302,6 +303,12 @@ func (l *Loader) readLeaf(data []byte, w io.Writer) error { return err } +// childResult holds the result of a parallel CAS read for a child node. +type childResult struct { + data []byte + err error +} + // readInternal reads content from an internal node. func (l *Loader) readInternal(data []byte, w io.Writer) error { if len(data) == 0 || data[0] != 0x01 { @@ -309,7 +316,7 @@ func (l *Loader) readInternal(data []byte, w io.Writer) error { } buf := bytes.NewReader(data[1:]) - + // Read child count childCount, err := binary.ReadUvarint(buf) if err != nil { @@ -331,29 +338,61 @@ func (l *Loader) readInternal(data []byte, w io.Writer) error { return fmt.Errorf("failed to read total size: %w", err) } - // Recursively read children - for _, childHash := range children { - // We need to determine the child's kind and size - // For simplicity, we'll peek at the stored data - childData, err := l.CAS.Get(childHash) - if err != nil { - return fmt.Errorf("failed to get child %s: %w", childHash, err) + // Parallel CAS reads when there are more than 2 children + if childCount > 2 { + results := make([]childResult, childCount) + var wg sync.WaitGroup + for i := uint64(0); i < childCount; i++ { + wg.Add(1) + go func(idx uint64) { + defer wg.Done() + d, e := l.CAS.Get(children[idx]) + results[idx] = childResult{data: d, err: e} + }(i) } + wg.Wait() - var childNode NodeRef - childNode.Hash = childHash - - if len(childData) > 0 && childData[0] == 0x00 { - childNode.Kind = Leaf - } else if len(childData) > 0 && childData[0] == 0x01 { - childNode.Kind = Node - } else { - return fmt.Errorf("invalid child node encoding") + // Write sequentially to maintain output order + for i, res := range results { + if res.err != nil { + return fmt.Errorf("failed to get child %s: %w", children[i], res.err) + } + + var childNode NodeRef + childNode.Hash = children[i] + if len(res.data) > 0 && res.data[0] == 0x00 { + childNode.Kind = Leaf + } else if len(res.data) > 0 && res.data[0] == 0x01 { + childNode.Kind = Node + } else { + return fmt.Errorf("invalid child node encoding") + } + + if err := l.readNode(childNode, w); err != nil { + return err + } } + } else { + // Sequential reads for <= 2 children + for _, childHash := range children { + childData, err := l.CAS.Get(childHash) + if err != nil { + return fmt.Errorf("failed to get child %s: %w", childHash, err) + } - err = l.readNode(childNode, w) - if err != nil { - return err + var childNode NodeRef + childNode.Hash = childHash + if len(childData) > 0 && childData[0] == 0x00 { + childNode.Kind = Leaf + } else if len(childData) > 0 && childData[0] == 0x01 { + childNode.Kind = Node + } else { + return fmt.Errorf("invalid child node encoding") + } + + if err := l.readNode(childNode, w); err != nil { + return err + } } } diff --git a/internal/hamtdir/hamtdir.go b/internal/hamtdir/hamtdir.go index d7e5597..a118376 100644 --- a/internal/hamtdir/hamtdir.go +++ b/internal/hamtdir/hamtdir.go @@ -253,14 +253,45 @@ func (b *Builder) encodeInternal(node *Node) []byte { return buf.Bytes() } -// Loader reads directory HAMTs. +// Loader reads directory HAMTs with an optional node cache. type Loader struct { - CAS cas.CAS + CAS cas.CAS + cache map[cas.Hash]*Node } +const maxCacheEntries = 1024 + // NewLoader creates a new Loader with the given CAS. func NewLoader(casStore cas.CAS) *Loader { - return &Loader{CAS: casStore} + return &Loader{ + CAS: casStore, + cache: make(map[cas.Hash]*Node), + } +} + +// getNode retrieves a node, checking the cache first. +func (l *Loader) getNode(hash cas.Hash) (*Node, error) { + if node, ok := l.cache[hash]; ok { + return node, nil + } + + data, err := l.CAS.Get(hash) + if err != nil { + return nil, err + } + + node, err := l.decodeNode(data) + if err != nil { + return nil, err + } + + // Simple eviction: clear cache when it gets too large + if len(l.cache) >= maxCacheEntries { + l.cache = make(map[cas.Hash]*Node) + } + l.cache[hash] = node + + return node, nil } // Lookup finds an entry by name in the directory. @@ -275,16 +306,11 @@ func (l *Loader) ListAll(dir DirRef) ([]Entry, error) { // List returns direct entries in the directory (non-recursive). func (l *Loader) List(dir DirRef) ([]Entry, error) { - data, err := l.CAS.Get(dir.Hash) + node, err := l.getNode(dir.Hash) if err != nil { return nil, fmt.Errorf("failed to get directory node: %w", err) } - node, err := l.decodeNode(data) - if err != nil { - return nil, err - } - if node.IsLeaf { return node.Entries, nil } @@ -308,16 +334,11 @@ func (l *Loader) List(dir DirRef) ([]Entry, error) { // lookupNode recursively searches for an entry by name. func (l *Loader) lookupNode(nodeHash cas.Hash, name string, depth int) (*Entry, error) { - data, err := l.CAS.Get(nodeHash) + node, err := l.getNode(nodeHash) if err != nil { return nil, fmt.Errorf("failed to get node: %w", err) } - node, err := l.decodeNode(data) - if err != nil { - return nil, err - } - if node.IsLeaf { // Search in leaf entries for _, entry := range node.Entries { @@ -346,16 +367,11 @@ func (l *Loader) lookupNode(nodeHash cas.Hash, name string, depth int) (*Entry, // listNode recursively lists all entries in a node. func (l *Loader) listNode(nodeHash cas.Hash) ([]Entry, error) { - data, err := l.CAS.Get(nodeHash) + node, err := l.getNode(nodeHash) if err != nil { return nil, fmt.Errorf("failed to get node: %w", err) } - node, err := l.decodeNode(data) - if err != nil { - return nil, err - } - if node.IsLeaf { return node.Entries, nil } @@ -552,16 +568,11 @@ func (l *Loader) WalkEntries(dir DirRef, walkFn func(path string, entry Entry) e // walkNode recursively walks a node with path prefix. func (l *Loader) walkNode(nodeHash cas.Hash, pathPrefix string, walkFn func(string, Entry) error) error { - data, err := l.CAS.Get(nodeHash) + node, err := l.getNode(nodeHash) if err != nil { return fmt.Errorf("failed to get node: %w", err) } - node, err := l.decodeNode(data) - if err != nil { - return err - } - if node.IsLeaf { for _, entry := range node.Entries { fullPath := entry.Name diff --git a/internal/history/persistent_mmr.go b/internal/history/persistent_mmr.go index 12f7363..6b0f7ef 100644 --- a/internal/history/persistent_mmr.go +++ b/internal/history/persistent_mmr.go @@ -43,7 +43,7 @@ func NewPersistentMMR(casStore cas.CAS, ivaldiDir string) (*PersistentMMR, error return p, nil } -// AppendLeaf appends a leaf and persists the state. +// AppendLeaf appends a leaf and persists the state in a single transaction. func (p *PersistentMMR) AppendLeaf(l Leaf) (uint64, Hash, error) { // Call parent implementation idx, root, err := p.MMR.AppendLeaf(l) @@ -51,13 +51,9 @@ func (p *PersistentMMR) AppendLeaf(l Leaf) (uint64, Hash, error) { return 0, Hash{}, err } - // Persist the leaf and MMR state - if err := p.persistLeaf(idx, l); err != nil { - return 0, Hash{}, fmt.Errorf("failed to persist leaf: %w", err) - } - - if err := p.persistMMRState(); err != nil { - return 0, Hash{}, fmt.Errorf("failed to persist MMR state: %w", err) + // Persist leaf + MMR state in a single transaction + if err := p.persistAll(idx, l); err != nil { + return 0, Hash{}, fmt.Errorf("failed to persist leaf and state: %w", err) } return idx, root, nil @@ -68,9 +64,9 @@ func (p *PersistentMMR) loadFromStorage() error { // Load MMR metadata (size, peaks, etc.) var metaData []byte err := p.db.View(func(tx *bbolt.Tx) error { - bucket, err := tx.CreateBucketIfNotExists([]byte("mmr")) - if err != nil { - return err + bucket := tx.Bucket([]byte("mmr")) + if bucket == nil { + return nil } metaData = bucket.Get([]byte("metadata")) return nil @@ -90,13 +86,14 @@ func (p *PersistentMMR) loadFromStorage() error { return fmt.Errorf("failed to unmarshal metadata: %w", err) } - // Load all leaves + // Load all leaves and nodes in a single transaction err = p.db.View(func(tx *bbolt.Tx) error { bucket := tx.Bucket([]byte("mmr")) if bucket == nil { return nil } + // Load leaves for i := uint64(0); i < metadata.Size; i++ { leafKey := p.leafKey(i) leafData := bucket.Get(leafKey) @@ -109,92 +106,60 @@ func (p *PersistentMMR) loadFromStorage() error { return fmt.Errorf("failed to unmarshal leaf %d: %w", i, err) } - // Add to in-memory MMR p.leaves = append(p.leaves, leaf) } + + // Load all node trees from peaks within the same transaction + for _, peakPos := range metadata.Peaks { + p.loadNodeTreeFromBucket(bucket, peakPos) + } + return nil }) if err != nil { return err } - // Load all nodes - for _, peakPos := range metadata.Peaks { - if err := p.loadNodeTree(peakPos); err != nil { - return fmt.Errorf("failed to load node tree at %d: %w", peakPos, err) - } - } - p.peaks = metadata.Peaks return nil } -// loadNodeTree recursively loads nodes from storage. -func (p *PersistentMMR) loadNodeTree(pos uint64) error { - var nodeData []byte - err := p.db.View(func(tx *bbolt.Tx) error { - bucket := tx.Bucket([]byte("mmr")) - if bucket == nil { - return nil - } - nodeKey := p.nodeKey(pos) - nodeData = bucket.Get(nodeKey) - return nil - }) - if err != nil { - return fmt.Errorf("failed to load node %d: %w", pos, err) - } +// loadNodeTreeFromBucket recursively loads nodes from a bucket within an existing transaction. +func (p *PersistentMMR) loadNodeTreeFromBucket(bucket *bbolt.Bucket, pos uint64) { + nodeKey := p.nodeKey(pos) + nodeData := bucket.Get(nodeKey) if nodeData == nil { - return nil // Node doesn't exist (might be a leaf) + return // Node doesn't exist (might be a leaf) } - var hash Hash if len(nodeData) != 32 { - return fmt.Errorf("invalid node data size: %d", len(nodeData)) + return } + + var hash Hash copy(hash[:], nodeData) p.nodes[pos] = hash // Recursively load children if this is an internal node height := p.getHeight(pos) if height > 0 { - // Load left and right children step := uint64(1) << (height - 1) leftPos := pos - step rightPos := pos - 1 - if err := p.loadNodeTree(leftPos); err != nil { - return err - } - if err := p.loadNodeTree(rightPos); err != nil { - return err - } + p.loadNodeTreeFromBucket(bucket, leftPos) + p.loadNodeTreeFromBucket(bucket, rightPos) } - - return nil } -// persistLeaf persists a single leaf to storage. -func (p *PersistentMMR) persistLeaf(idx uint64, leaf Leaf) error { +// persistAll persists a leaf, metadata, and all nodes in a single transaction. +func (p *PersistentMMR) persistAll(idx uint64, leaf Leaf) error { leafData, err := json.Marshal(leaf) if err != nil { return fmt.Errorf("failed to marshal leaf: %w", err) } - return p.db.Update(func(tx *bbolt.Tx) error { - bucket, err := tx.CreateBucketIfNotExists([]byte("mmr")) - if err != nil { - return err - } - leafKey := p.leafKey(idx) - return bucket.Put(leafKey, leafData) - }) -} - -// persistMMRState persists the current MMR state. -func (p *PersistentMMR) persistMMRState() error { - // Save metadata metadata := struct { Size uint64 `json:"size"` Peaks []uint64 `json:"peaks"` @@ -214,6 +179,12 @@ func (p *PersistentMMR) persistMMRState() error { return err } + // Save leaf + leafKey := p.leafKey(idx) + if err := bucket.Put(leafKey, leafData); err != nil { + return fmt.Errorf("failed to save leaf: %w", err) + } + // Save metadata if err := bucket.Put([]byte("metadata"), metaData); err != nil { return fmt.Errorf("failed to save metadata: %w", err) diff --git a/internal/ignore/ignore.go b/internal/ignore/ignore.go index 084bbb1..edb975a 100644 --- a/internal/ignore/ignore.go +++ b/internal/ignore/ignore.go @@ -12,12 +12,19 @@ import ( "strings" ) +// doubleStarPattern holds a pre-split ** glob pattern. +type doubleStarPattern struct { + prefix string // Part before ** + suffix string // Part after ** +} + // PatternCache holds pre-compiled ignore patterns for fast matching. type PatternCache struct { - patterns []string - dirPatterns []string // Patterns ending with / - globPatterns []string // Patterns with wildcards - literalMatches map[string]bool // Exact-match patterns + patterns []string + dirPatterns []string // Patterns ending with / + globPatterns []string // Patterns with wildcards (no **) + doubleStarPats []doubleStarPattern // Pre-split ** patterns + literalMatches map[string]bool // Exact-match patterns } // NewPatternCache creates a PatternCache from a list of patterns. @@ -30,6 +37,15 @@ func NewPatternCache(patterns []string) *PatternCache { for _, pattern := range patterns { if strings.HasSuffix(pattern, "/") { cache.dirPatterns = append(cache.dirPatterns, strings.TrimSuffix(pattern, "/")) + } else if strings.Contains(pattern, "**") { + // Pre-split ** patterns at cache creation time + parts := strings.SplitN(pattern, "**", 2) + if len(parts) == 2 { + cache.doubleStarPats = append(cache.doubleStarPats, doubleStarPattern{ + prefix: strings.TrimPrefix(parts[0], "/"), + suffix: strings.TrimPrefix(parts[1], "/"), + }) + } } else if strings.ContainsAny(pattern, "*?[") { cache.globPatterns = append(cache.globPatterns, pattern) } else { @@ -61,7 +77,7 @@ func (pc *PatternCache) IsIgnored(path string) bool { } } - // Check glob patterns + // Check simple glob patterns (no **) for _, pattern := range pc.globPatterns { if matched, _ := filepath.Match(pattern, path); matched { return true @@ -69,23 +85,23 @@ func (pc *PatternCache) IsIgnored(path string) bool { if matched, _ := filepath.Match(pattern, baseName); matched { return true } - // Handle ** patterns - if strings.Contains(pattern, "**") { - parts := strings.Split(pattern, "**") - if len(parts) == 2 { - prefix := strings.TrimPrefix(parts[0], "/") - suffix := strings.TrimPrefix(parts[1], "/") - - if prefix != "" && !strings.HasPrefix(path, prefix) { - continue - } - - if suffix != "" { - if matched, _ := filepath.Match(suffix, baseName); matched { - return true - } - } + } + + // Check pre-compiled ** patterns + for _, dsp := range pc.doubleStarPats { + if dsp.prefix != "" && !strings.HasPrefix(path, dsp.prefix) { + continue + } + if dsp.suffix != "" { + if matched, _ := filepath.Match(dsp.suffix, baseName); matched { + return true } + } else if dsp.prefix == "" { + // Pattern is just "**" — matches everything + return true + } else { + // Pattern is "prefix/**" — matches anything under prefix + return true } } diff --git a/internal/ignore/ignore_test.go b/internal/ignore/ignore_test.go index 4ba4d48..289e399 100644 --- a/internal/ignore/ignore_test.go +++ b/internal/ignore/ignore_test.go @@ -23,8 +23,11 @@ func TestNewPatternCache(t *testing.T) { if len(cache.dirPatterns) != 2 { t.Errorf("Expected 2 dir patterns, got %d", len(cache.dirPatterns)) } - if len(cache.globPatterns) != 2 { - t.Errorf("Expected 2 glob patterns, got %d", len(cache.globPatterns)) + if len(cache.globPatterns) != 1 { + t.Errorf("Expected 1 glob pattern, got %d", len(cache.globPatterns)) + } + if len(cache.doubleStarPats) != 1 { + t.Errorf("Expected 1 double-star pattern, got %d", len(cache.doubleStarPats)) } if len(cache.literalMatches) != 1 { t.Errorf("Expected 1 literal match, got %d", len(cache.literalMatches)) diff --git a/internal/refs/refs.go b/internal/refs/refs.go index f148f06..0d5fc85 100644 --- a/internal/refs/refs.go +++ b/internal/refs/refs.go @@ -34,9 +34,11 @@ type Timeline struct { // RefsManager handles timeline and reference management type RefsManager struct { - ivaldiDir string - refsDir string - db *store.SharedDB + ivaldiDir string + refsDir string + db *store.SharedDB + sealIndex map[[32]byte]string + sealIndexBuilt bool } // NewRefsManager creates a new refs manager @@ -584,7 +586,16 @@ func (rm *RefsManager) StoreSealName(sealName string, hash [32]byte, message str // Format: hash_hex timestamp message content := fmt.Sprintf("%s %d %s\n", hashHex, timestamp, message) - return os.WriteFile(sealPath, []byte(content), 0644) + if err := os.WriteFile(sealPath, []byte(content), 0644); err != nil { + return err + } + + // Update in-memory index if already built + if rm.sealIndexBuilt { + rm.sealIndex[hash] = sealName + } + + return nil } // GetSealByName retrieves seal information by name @@ -622,47 +633,52 @@ func (rm *RefsManager) GetSealByName(sealName string) (hash [32]byte, timestamp return hash, timestamp, message, nil } -// GetSealNameByHash retrieves seal name by hash (reverse lookup) -func (rm *RefsManager) GetSealNameByHash(hash [32]byte) (string, error) { +// buildSealIndex reads all seal files once and builds a hash→name map. +func (rm *RefsManager) buildSealIndex() { + rm.sealIndex = make(map[[32]byte]string) + rm.sealIndexBuilt = true + sealsDir := filepath.Join(rm.refsDir, "seals") if _, err := os.Stat(sealsDir); os.IsNotExist(err) { - return "", fmt.Errorf("no seals directory") + return } - hashHex := hex.EncodeToString(hash[:]) - - // Walk through all seal files to find matching hash - var foundSealName string - err := filepath.Walk(sealsDir, func(path string, info os.FileInfo, err error) error { + filepath.Walk(sealsDir, func(path string, info os.FileInfo, err error) error { if err != nil || info.IsDir() { return err } data, err := os.ReadFile(path) if err != nil { - return nil // Skip files we can't read + return nil } content := strings.TrimSpace(string(data)) parts := strings.SplitN(content, " ", 2) if len(parts) < 1 { - return nil // Skip malformed files + return nil } - if parts[0] == hashHex { - foundSealName = filepath.Base(path) - return fmt.Errorf("found") // Use error to break out of walk + hashBytes, err := hex.DecodeString(parts[0]) + if err != nil || len(hashBytes) != 32 { + return nil } + var h [32]byte + copy(h[:], hashBytes) + rm.sealIndex[h] = filepath.Base(path) return nil }) +} - if foundSealName != "" { - return foundSealName, nil +// GetSealNameByHash retrieves seal name by hash (reverse lookup) using a lazy-init index. +func (rm *RefsManager) GetSealNameByHash(hash [32]byte) (string, error) { + if !rm.sealIndexBuilt { + rm.buildSealIndex() } - if err != nil && err.Error() == "found" { - return foundSealName, nil + if name, ok := rm.sealIndex[hash]; ok { + return name, nil } return "", fmt.Errorf("seal name not found for hash") diff --git a/internal/shelf/shelf.go b/internal/shelf/shelf.go index 7359a87..c7b755f 100644 --- a/internal/shelf/shelf.go +++ b/internal/shelf/shelf.go @@ -8,6 +8,7 @@ import ( "fmt" "os" "path/filepath" + "sort" "strings" "time" @@ -155,13 +156,9 @@ func (sm *ShelfManager) listShelves() ([]Shelf, error) { } // Sort by creation time (newest first) - for i := 0; i < len(shelves)-1; i++ { - for j := i + 1; j < len(shelves); j++ { - if shelves[j].CreatedAt.After(shelves[i].CreatedAt) { - shelves[i], shelves[j] = shelves[j], shelves[i] - } - } - } + sort.Slice(shelves, func(i, j int) bool { + return shelves[i].CreatedAt.After(shelves[j].CreatedAt) + }) return shelves, nil } @@ -197,7 +194,7 @@ func (sm *ShelfManager) removeShelf(shelfID string) error { func (sm *ShelfManager) saveShelf(shelf *Shelf) error { shelfPath := filepath.Join(sm.shelfDir, shelf.ID+".json") - data, err := json.MarshalIndent(shelf, "", " ") + data, err := json.Marshal(shelf) if err != nil { return fmt.Errorf("failed to marshal shelf: %w", err) } diff --git a/internal/workspace/workspace.go b/internal/workspace/workspace.go index f969082..fc4f6c9 100644 --- a/internal/workspace/workspace.go +++ b/internal/workspace/workspace.go @@ -14,6 +14,7 @@ package workspace import ( + "encoding/json" "fmt" "io/fs" "log" @@ -43,12 +44,27 @@ type WorkspaceState struct { WorkDir string // Path to working directory } +// StatCacheEntry holds cached stat+hash data for a single file. +type StatCacheEntry struct { + ModTime int64 `json:"mod_time"` + Size int64 `json:"size"` + Checksum cas.Hash `json:"checksum"` + FileRef filechunk.NodeRef `json:"file_ref"` + Mode uint32 `json:"mode"` +} + +// StatCache maps relative paths to cached file metadata. +type StatCache struct { + Entries map[string]StatCacheEntry `json:"entries"` +} + // Materializer handles workspace materialization operations. type Materializer struct { CAS cas.CAS IvaldiDir string WorkDir string IgnoreCache *ignore.PatternCache + statCache *StatCache } // SetIgnorePatterns sets the ignore-pattern cache used by ScanWorkspace. @@ -58,23 +74,61 @@ func (m *Materializer) SetIgnorePatterns(pc *ignore.PatternCache) { // NewMaterializer creates a new Materializer. func NewMaterializer(casStore cas.CAS, ivaldiDir, workDir string) *Materializer { - return &Materializer{ + m := &Materializer{ CAS: casStore, IvaldiDir: ivaldiDir, WorkDir: workDir, } + m.loadStatCache() + return m } -// GetCurrentState reads the current workspace state. -func (m *Materializer) GetCurrentState() (*WorkspaceState, error) { - // Get current timeline - refsManager, err := refs.NewRefsManager(m.IvaldiDir) +// loadStatCache loads the stat cache from disk. +func (m *Materializer) loadStatCache() { + cachePath := filepath.Join(m.IvaldiDir, "stat-cache.json") + data, err := os.ReadFile(cachePath) if err != nil { - return nil, fmt.Errorf("failed to create refs manager: %w", err) + m.statCache = &StatCache{Entries: make(map[string]StatCacheEntry)} + return } - defer refsManager.Close() + var cache StatCache + if err := json.Unmarshal(data, &cache); err != nil { + m.statCache = &StatCache{Entries: make(map[string]StatCacheEntry)} + return + } + if cache.Entries == nil { + cache.Entries = make(map[string]StatCacheEntry) + } + m.statCache = &cache +} - timelineName, err := refsManager.GetCurrentTimeline() +// saveStatCache writes the stat cache to disk. +func (m *Materializer) saveStatCache() { + cachePath := filepath.Join(m.IvaldiDir, "stat-cache.json") + data, err := json.Marshal(m.statCache) + if err != nil { + return + } + os.WriteFile(cachePath, data, 0644) +} + +// GetCurrentState reads the current workspace state. +// If refsManager is non-nil, it is reused; otherwise a new one is created internally. +func (m *Materializer) GetCurrentState(refsManager ...*refs.RefsManager) (*WorkspaceState, error) { + // Use provided RefsManager or create a new one + var rm *refs.RefsManager + if len(refsManager) > 0 && refsManager[0] != nil { + rm = refsManager[0] + } else { + var err error + rm, err = refs.NewRefsManager(m.IvaldiDir) + if err != nil { + return nil, fmt.Errorf("failed to create refs manager: %w", err) + } + defer rm.Close() + } + + timelineName, err := rm.GetCurrentTimeline() if err != nil { // If there's no current timeline (no HEAD file), scan the current workspace // and create an index based on what's currently in the working directory @@ -162,6 +216,22 @@ func (m *Materializer) ScanWorkspace() (wsindex.IndexRef, error) { return err } + // Check stat cache: if mtime+size match, reuse cached entry + if cached, ok := m.statCache.Entries[relPath]; ok { + if cached.ModTime == info.ModTime().UnixNano() && cached.Size == info.Size() { + fileMetadata := wsindex.FileMetadata{ + Path: relPath, + FileRef: cached.FileRef, + ModTime: info.ModTime(), + Mode: cached.Mode, + Size: cached.Size, + Checksum: cached.Checksum, + } + files = append(files, fileMetadata) + return nil + } + } + // Read file content content, err := os.ReadFile(path) if err != nil { @@ -179,6 +249,8 @@ func (m *Materializer) ScanWorkspace() (wsindex.IndexRef, error) { return fmt.Errorf("failed to create file chunks for %s: %w", relPath, err) } + checksum := cas.SumB3(content) + // Create metadata fileMetadata := wsindex.FileMetadata{ Path: relPath, @@ -186,7 +258,16 @@ func (m *Materializer) ScanWorkspace() (wsindex.IndexRef, error) { ModTime: info.ModTime(), Mode: uint32(info.Mode()), Size: info.Size(), - Checksum: cas.SumB3(content), + Checksum: checksum, + } + + // Update stat cache + m.statCache.Entries[relPath] = StatCacheEntry{ + ModTime: info.ModTime().UnixNano(), + Size: info.Size(), + Checksum: checksum, + FileRef: fileRef, + Mode: uint32(info.Mode()), } files = append(files, fileMetadata) @@ -197,6 +278,9 @@ func (m *Materializer) ScanWorkspace() (wsindex.IndexRef, error) { return wsindex.IndexRef{}, fmt.Errorf("failed to scan workspace: %w", err) } + // Save updated stat cache + m.saveStatCache() + fmt.Fprintf(os.Stderr, "Scanned %d files\n", len(files)) // Build workspace index From 13961568b7b0b80b3e4bf17dda59385688554cb6 Mon Sep 17 00:00:00 2001 From: javanhut Date: Sun, 15 Feb 2026 16:20:54 +0000 Subject: [PATCH 04/12] feat: added aliases for gather --- cli/gather.go | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/cli/gather.go b/cli/gather.go index 0fd3f89..9e6b61c 100644 --- a/cli/gather.go +++ b/cli/gather.go @@ -212,9 +212,10 @@ var autoExcludePatterns = []string{ } var gatherCmd = &cobra.Command{ - Use: "gather [files...]", - Short: "Stage files for the next seal/commit", - Long: `Gathers (stages) specified files or all modified files that will be included in the next seal operation`, + Use: "gather [files...]", + Aliases: []string{"add", "select"}, + Short: "Stage files for the next seal/commit", + Long: `Gathers (stages) specified files or all modified files that will be included in the next seal operation`, RunE: func(cmd *cobra.Command, args []string) error { // Check if we're in an Ivaldi repository ivaldiDir := ".ivaldi" From 6b99ddd4e132091339d1826712e374415e83f5bd Mon Sep 17 00:00:00 2001 From: javanhut Date: Thu, 19 Feb 2026 15:14:57 +0000 Subject: [PATCH 05/12] fix: fixed delta upload issue --- internal/github/sync_push.go | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/internal/github/sync_push.go b/internal/github/sync_push.go index 3de700d..6603418 100644 --- a/internal/github/sync_push.go +++ b/internal/github/sync_push.go @@ -476,6 +476,19 @@ func (rs *RepoSyncer) PushCommit(ctx context.Context, owner, repo, branch string isNewBranch = false } + // Check if this exact commit was already pushed to this branch + if parentSHA != "" && !isNewBranch && !force { + refsManager, err := refs.NewRefsManager(rs.ivaldiDir) + if err == nil { + timeline, err := refsManager.GetTimeline(branch, refs.LocalTimeline) + refsManager.Close() + if err == nil && timeline.GitSHA1Hash == parentSHA { + fmt.Printf("Already up to date - no new commits to push\n") + return nil + } + } + } + // Get parent tree SHA from GitHub for delta optimization if parentSHA != "" && !isNewBranch { // Fetch the parent commit to get its tree SHA @@ -667,6 +680,12 @@ func (rs *RepoSyncer) PushCommit(ctx context.Context, owner, repo, branch string return fmt.Errorf("failed to create tree: %w", err) } + // Skip if the resulting tree is identical to what's already on GitHub + if parentTreeSHA != "" && treeResp.SHA == parentTreeSHA { + fmt.Printf("Already up to date - no file changes to push\n") + return nil + } + // Create commit on GitHub var parents []string if parentSHA != "" { From 386c39e4a5f1e9f76d0c6a73d5c10fbc481aef3d Mon Sep 17 00:00:00 2001 From: javanhut Date: Thu, 19 Feb 2026 22:33:09 +0000 Subject: [PATCH 06/12] feat: added rename function --- cli/cli.go | 2 +- cli/timeline.go | 49 +++++++++++++++++++++++++++++++++++++++++ internal/refs/refs.go | 51 +++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 101 insertions(+), 1 deletion(-) diff --git a/cli/cli.go b/cli/cli.go index 6edeae0..431c9ce 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -61,7 +61,7 @@ func init() { // Timeline management commands rootCmd.AddCommand(timelineCmd) - timelineCmd.AddCommand(createTimelineCmd, switchTimelineCmd, listTimelineCmd, removeTimelineCmd) + timelineCmd.AddCommand(createTimelineCmd, switchTimelineCmd, listTimelineCmd, removeTimelineCmd, renameTimelineCmd) timelineCmd.AddCommand(butterflyCmd) butterflyCmd.AddCommand(butterflyUpCmd, butterflyDownCmd, butterflyRemoveCmd) diff --git a/cli/timeline.go b/cli/timeline.go index 0798d4e..48b6ede 100644 --- a/cli/timeline.go +++ b/cli/timeline.go @@ -368,6 +368,55 @@ var removeTimelineCmd = &cobra.Command{ }, } +var renameTimelineCmd = &cobra.Command{ + Use: "rename ", + Aliases: []string{"rn", "mv"}, + Short: "Rename a timeline", + Long: `Renames a local timeline. If the renamed timeline is the current one, HEAD is updated automatically. +Any corresponding remote tracking reference is also renamed. + +When you upload after renaming, the new name will be used as the remote branch name. + +Example: + ivaldi timeline rename master main + ivaldi upload # pushes to 'main' on remote`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + oldName := args[0] + newName := args[1] + + // Check if we're in an Ivaldi repository + ivaldiDir := ".ivaldi" + if _, err := os.Stat(ivaldiDir); os.IsNotExist(err) { + return fmt.Errorf("not in an Ivaldi repository (no .ivaldi directory found)") + } + + // Initialize refs manager + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return fmt.Errorf("failed to initialize refs manager: %w", err) + } + defer refsManager.Close() + + // Perform the rename + if err := refsManager.RenameTimeline(oldName, newName, refs.LocalTimeline); err != nil { + return fmt.Errorf("failed to rename timeline: %w", err) + } + + fmt.Printf("Renamed timeline '%s' -> '%s'\n", oldName, newName) + + // Check if HEAD was updated + currentTimeline, err := refsManager.GetCurrentTimeline() + if err == nil && currentTimeline == newName { + fmt.Printf("HEAD updated to '%s'\n", newName) + } + + fmt.Printf("Next upload will push to '%s' on remote\n", newName) + + return nil + }, +} + // createCommitFromWorkspace creates a commit object from the current workspace state // and stores the commit hash in the provided baseHashes array. // If preScannedIndex is non-nil, it is used instead of rescanning the workspace. diff --git a/internal/refs/refs.go b/internal/refs/refs.go index 0d5fc85..5397e42 100644 --- a/internal/refs/refs.go +++ b/internal/refs/refs.go @@ -351,6 +351,57 @@ func (rm *RefsManager) TimelineExists(name string, timelineType TimelineType) bo return err == nil } +// RenameTimeline renames a timeline from oldName to newName. +// It renames the ref file, updates HEAD if this is the current timeline, +// and renames the corresponding remote tracking ref if one exists. +func (rm *RefsManager) RenameTimeline(oldName, newName string, timelineType TimelineType) error { + // Verify old timeline exists + if !rm.TimelineExists(oldName, timelineType) { + return fmt.Errorf("timeline '%s' does not exist", oldName) + } + + // Verify new name doesn't already exist + if rm.TimelineExists(newName, timelineType) { + return fmt.Errorf("timeline '%s' already exists", newName) + } + + // Rename the ref file + oldPath := rm.getRefPath(oldName, timelineType) + newPath := rm.getRefPath(newName, timelineType) + + // Ensure parent directory exists for new path + if err := os.MkdirAll(filepath.Dir(newPath), 0755); err != nil { + return fmt.Errorf("create ref parent dir: %w", err) + } + + if err := os.Rename(oldPath, newPath); err != nil { + return fmt.Errorf("rename timeline ref: %w", err) + } + + // Update HEAD if this was the current timeline + if timelineType == LocalTimeline { + currentTimeline, err := rm.GetCurrentTimeline() + if err == nil && currentTimeline == oldName { + if err := rm.SetCurrentTimeline(newName); err != nil { + // Try to roll back the rename + os.Rename(newPath, oldPath) + return fmt.Errorf("update HEAD: %w", err) + } + } + + // Rename remote tracking ref if it exists + if rm.TimelineExists(oldName, RemoteTimeline) { + oldRemotePath := rm.getRefPath(oldName, RemoteTimeline) + newRemotePath := rm.getRefPath(newName, RemoteTimeline) + if err := os.MkdirAll(filepath.Dir(newRemotePath), 0755); err == nil { + os.Rename(oldRemotePath, newRemotePath) // Best-effort + } + } + } + + return nil +} + // GetTimelineSyncStatus compares local and remote timelines type TimelineSyncStatus struct { Name string From 795683a85955097d4fcb4b9c4d6ce02700c7aedf Mon Sep 17 00:00:00 2001 From: javanhut Date: Fri, 20 Feb 2026 04:14:38 +0000 Subject: [PATCH 07/12] feat: updated sync to use no standard default branch --- internal/github/sync_clone.go | 50 ++++++++++++++++++++++++++++------- 1 file changed, 40 insertions(+), 10 deletions(-) diff --git a/internal/github/sync_clone.go b/internal/github/sync_clone.go index fe03eff..4df13a3 100644 --- a/internal/github/sync_clone.go +++ b/internal/github/sync_clone.go @@ -23,30 +23,60 @@ func (rs *RepoSyncer) CloneRepository(ctx context.Context, owner, repo string, d if skipHistory { fmt.Println("Downloading latest snapshot (no API calls)...") - // Try common default branch names directly with archive download - var lastErr error + // Try common default branch names directly with archive download. + // This avoids API calls for the vast majority of repos. for _, branchName := range []string{"main", "master"} { fileCount, err := rs.downloadAndExtractArchive(ctx, owner, repo, branchName) if err == nil { fmt.Printf("Extracted %d files from archive (branch: %s)\n", fileCount, branchName) - - // Create initial commit in Ivaldi err = rs.createIvaldiCommit(fmt.Sprintf("Import from GitHub: %s/%s", owner, repo)) if err != nil { return fmt.Errorf("failed to create Ivaldi commit: %w", err) } - fmt.Printf("Successfully cloned snapshot from %s/%s\n", owner, repo) return nil } - lastErr = err } - // If all branch names failed, show the error and don't fall back to API - // (since API is likely also rate limited or repo doesn't exist) - if lastErr != nil { - return fmt.Errorf("failed to download repository: %w\n\nNote: This could mean:\n - The repository doesn't exist or is private\n - Check the repository name for typos\n - If private, run 'ivaldi auth login' first", lastErr) + // Neither "main" nor "master" worked. + // Fall back to a single API call to discover the actual default branch. + fmt.Println("Standard branch names not found, querying repository info...") + rs.client.WaitForRateLimit() + + repoInfo, err := rs.client.GetRepository(ctx, owner, repo) + if err != nil { + return fmt.Errorf("failed to download repository (tried branches 'main' and 'master', then API lookup failed): %w\n\n"+ + "Note: This could mean:\n"+ + " - The repository doesn't exist or is private\n"+ + " - Check the repository name for typos\n"+ + " - If private, run 'ivaldi auth login' first", err) + } + + defaultBranch := repoInfo.DefaultBranch + if defaultBranch == "" { + return fmt.Errorf("repository '%s/%s' exists but has no default branch configured.\n"+ + "This usually means the repository is empty or misconfigured", owner, repo) } + + fmt.Printf("Repository uses non-standard default branch: '%s'\n", defaultBranch) + + fileCount, err := rs.downloadAndExtractArchive(ctx, owner, repo, defaultBranch) + if err != nil { + if repoInfo.Size == 0 { + return fmt.Errorf("repository '%s/%s' exists but appears to be empty (no commits).\n"+ + "Initialize the repository on GitHub first, or push content to it before downloading", owner, repo) + } + return fmt.Errorf("failed to download repository archive for branch '%s': %w", defaultBranch, err) + } + + fmt.Printf("Extracted %d files from archive (branch: %s)\n", fileCount, defaultBranch) + err = rs.createIvaldiCommit(fmt.Sprintf("Import from GitHub: %s/%s", owner, repo)) + if err != nil { + return fmt.Errorf("failed to create Ivaldi commit: %w", err) + } + + fmt.Printf("Successfully cloned snapshot from %s/%s\n", owner, repo) + return nil } // Check rate limits before API calls From 015ea17bff4f3bbb3d4d9243435510f8a58072ec Mon Sep 17 00:00:00 2001 From: javanhut Date: Fri, 20 Feb 2026 05:13:27 +0000 Subject: [PATCH 08/12] feat: updated rename logic --- cli/cli.go | 18 ++++++++++++------ cli/download.go | 30 +++++++++++++++++++++++++++--- cli/timeline.go | 9 ++++++++- cli/whereami.go | 3 +++ internal/gitclone/cloner.go | 33 +++++++++++++++++++++++---------- internal/github/sync_clone.go | 35 ++++++++++++++++++----------------- internal/gitlab/sync.go | 19 ++++++++++--------- internal/ignore/ignore.go | 20 +++++++++++++++++--- internal/refs/refs.go | 24 +++++++++++++++++++++--- 9 files changed, 139 insertions(+), 52 deletions(-) diff --git a/cli/cli.go b/cli/cli.go index 431c9ce..d1fdd5f 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -244,8 +244,8 @@ func forgeCommand(cmd *cobra.Command, args []string) { if err != nil { logging.Warn("Failed to create initial commit", "error", err) } else if commitHash != nil { - // Update main timeline to point to the initial commit - logging.Info("Updating main timeline with initial commit...") + // Update current timeline to point to the initial commit + logging.Info("Updating current timeline with initial commit...") // Re-open refs manager to update the timeline refsManager2, err := refs.NewRefsManager(ivaldiDir) @@ -254,18 +254,24 @@ func forgeCommand(cmd *cobra.Command, args []string) { } else { defer refsManager2.Close() - // Update main timeline with the commit hash + // Read the current timeline from HEAD (set by InitializeFromGit) + currentTimeline, err := refsManager2.GetCurrentTimeline() + if err != nil { + currentTimeline = "main" // fallback for non-git repos + } + + // Update timeline with the commit hash err = refsManager2.UpdateTimeline( - "main", + currentTimeline, refs.LocalTimeline, *commitHash, // Use the actual commit hash [32]byte{}, // No SHA256 for now "", // No Git SHA1 ) if err != nil { - logging.Warn("Failed to update main timeline with initial commit", "error", err) + logging.Warn("Failed to update timeline with initial commit", "timeline", currentTimeline, "error", err) } else { - logging.Info("Successfully updated main timeline with initial commit") + logging.Info("Successfully updated timeline with initial commit", "timeline", currentTimeline) } } } diff --git a/cli/download.go b/cli/download.go index 8b27d4c..7813c58 100644 --- a/cli/download.go +++ b/cli/download.go @@ -226,11 +226,19 @@ func handleGitHubDownload(rawURL string, args []string, depth int, skipHistory b defer cancel() fmt.Printf("Downloading from GitHub: %s/%s...\n", owner, repo) - if err := syncer.CloneRepository(ctx, owner, repo, depth, skipHistory, includeTags); err != nil { + defaultBranch, err := syncer.CloneRepository(ctx, owner, repo, depth, skipHistory, includeTags) + if err != nil { cleanup() return fmt.Errorf("failed to clone repository: %w", err) } + // Rename timeline if actual default branch differs from "main" + if defaultBranch != "" && defaultBranch != "main" { + if err := refsManager.RenameTimeline("main", defaultBranch, refs.LocalTimeline, false); err != nil { + log.Printf("Warning: Failed to rename timeline to '%s': %v", defaultBranch, err) + } + } + // Automatically detect and convert Git submodules (enabled by default) if recurseSubmodules { gitmodulesPath := filepath.Join(workDir, ".gitmodules") @@ -425,11 +433,19 @@ func handleGitLabDownload(rawURL string, args []string, baseURL string, depth in } else { fmt.Printf("Downloading from GitLab: %s/%s...\n", owner, repo) } - if err := syncer.CloneRepository(ctx, owner, repo, depth, skipHistory, includeTags); err != nil { + defaultBranch, err := syncer.CloneRepository(ctx, owner, repo, depth, skipHistory, includeTags) + if err != nil { cleanup() return fmt.Errorf("failed to clone repository: %w", err) } + // Rename timeline if actual default branch differs from "main" + if defaultBranch != "" && defaultBranch != "main" { + if err := refsManager.RenameTimeline("main", defaultBranch, refs.LocalTimeline, false); err != nil { + log.Printf("Warning: Failed to rename timeline to '%s': %v", defaultBranch, err) + } + } + fmt.Printf("Successfully downloaded repository from GitLab\n") return nil } @@ -543,11 +559,19 @@ func handleGenericGitDownload(rawURL string, args []string, depth int, skipHisto SSHKey: sshKey, } - if err := cloner.Clone(ctx, cloneOpts); err != nil { + defaultBranch, err := cloner.Clone(ctx, cloneOpts) + if err != nil { cleanup() return fmt.Errorf("failed to clone repository: %w", err) } + // Rename timeline if actual default branch differs from "main" + if defaultBranch != "" && defaultBranch != "main" { + if err := refsManager.RenameTimeline("main", defaultBranch, refs.LocalTimeline, false); err != nil { + log.Printf("Warning: Failed to rename timeline to '%s': %v", defaultBranch, err) + } + } + fmt.Printf("Successfully downloaded repository from Git server\n") return nil } diff --git a/cli/timeline.go b/cli/timeline.go index 48b6ede..991d3d6 100644 --- a/cli/timeline.go +++ b/cli/timeline.go @@ -398,8 +398,11 @@ Example: } defer refsManager.Close() + // Check --force flag + force, _ := cmd.Flags().GetBool("force") + // Perform the rename - if err := refsManager.RenameTimeline(oldName, newName, refs.LocalTimeline); err != nil { + if err := refsManager.RenameTimeline(oldName, newName, refs.LocalTimeline, force); err != nil { return fmt.Errorf("failed to rename timeline: %w", err) } @@ -507,3 +510,7 @@ func createCommitFromWorkspace(casStore cas.CAS, ivaldiDir string, parentTimelin return nil } + +func init() { + renameTimelineCmd.Flags().BoolP("force", "f", false, "Overwrite destination timeline if it already exists") +} diff --git a/cli/whereami.go b/cli/whereami.go index bc1acac..ae90fd8 100644 --- a/cli/whereami.go +++ b/cli/whereami.go @@ -14,6 +14,7 @@ import ( "github.com/javanhut/Ivaldi-vcs/internal/commit" "github.com/javanhut/Ivaldi-vcs/internal/diffmerge" "github.com/javanhut/Ivaldi-vcs/internal/history" + "github.com/javanhut/Ivaldi-vcs/internal/ignore" "github.com/javanhut/Ivaldi-vcs/internal/refs" "github.com/javanhut/Ivaldi-vcs/internal/workspace" "github.com/spf13/cobra" @@ -239,6 +240,8 @@ func displayWorkspaceStatus(ivaldiDir, workDir string) error { // Create materializer to get workspace status materializer := workspace.NewMaterializer(casStore, ivaldiDir, workDir) + ignoreCache, _ := ignore.LoadPatternCache(workDir) + materializer.SetIgnorePatterns(ignoreCache) status, err := materializer.GetWorkspaceStatus() if err != nil { return fmt.Errorf("failed to get workspace status: %w", err) diff --git a/internal/gitclone/cloner.go b/internal/gitclone/cloner.go index b0ca8da..7fa1d81 100644 --- a/internal/gitclone/cloner.go +++ b/internal/gitclone/cloner.go @@ -58,12 +58,13 @@ func NewCloner(ivaldiDir, workDir string) (*Cloner, error) { }, nil } -// Clone clones a Git repository and converts it to Ivaldi format -func (c *Cloner) Clone(ctx context.Context, opts *CloneOptions) error { +// Clone clones a Git repository and converts it to Ivaldi format. +// Returns the default branch name from the cloned repository. +func (c *Cloner) Clone(ctx context.Context, opts *CloneOptions) (string, error) { // Detect authentication auth, err := DetectAuth(opts.URL, opts.Username, opts.Password, opts.Token, opts.SSHKey) if err != nil { - return fmt.Errorf("authentication setup failed: %w", err) + return "", fmt.Errorf("authentication setup failed: %w", err) } opts.Auth = auth @@ -89,37 +90,43 @@ func (c *Cloner) Clone(ctx context.Context, opts *CloneOptions) error { // Clone to temporary directory tempDir, err := os.MkdirTemp("", "ivaldi-git-clone-*") if err != nil { - return fmt.Errorf("failed to create temp dir: %w", err) + return "", fmt.Errorf("failed to create temp dir: %w", err) } defer os.RemoveAll(tempDir) fmt.Println("Cloning repository using Git protocol...") repo, err := git.PlainCloneContext(ctx, tempDir, false, cloneOpts) if err != nil { - return c.handleCloneError(err, opts.URL) + return "", c.handleCloneError(err, opts.URL) } // Check if cloned repo has existing Ivaldi data ivaldiSrcDir := filepath.Join(tempDir, ".ivaldi") if info, err := os.Stat(ivaldiSrcDir); err == nil && info.IsDir() { fmt.Println("Found existing Ivaldi data in repository") - return c.importExistingIvaldiData(ivaldiSrcDir, tempDir) + return "", c.importExistingIvaldiData(ivaldiSrcDir, tempDir) } // Get HEAD reference ref, err := repo.Head() if err != nil { - return fmt.Errorf("failed to get HEAD: %w", err) + return "", fmt.Errorf("failed to get HEAD: %w", err) + } + + // Extract default branch name from HEAD reference (e.g., "refs/heads/master" -> "master") + defaultBranch := "main" + if ref.Name().IsBranch() { + defaultBranch = ref.Name().Short() } // Convert based on mode if opts.SkipHistory { fmt.Println("Extracting files without history...") - return c.checkoutFiles(repo, ref) + return defaultBranch, c.checkoutFiles(repo, ref) } fmt.Println("Importing commit history...") - return c.importHistory(repo, ref, opts.IncludeTags) + return defaultBranch, c.importHistory(repo, ref, opts.IncludeTags) } // handleCloneError provides user-friendly error messages @@ -192,6 +199,12 @@ func (c *Cloner) importHistory(repo *git.Repository, head *plumbing.Reference, i } defer refsManager.Close() + // Read the current timeline from HEAD + currentTimeline, err := refsManager.GetCurrentTimeline() + if err != nil { + currentTimeline = "main" + } + // Initialize MMR and commit builder mmr, err := history.NewPersistentMMR(c.casStore, c.ivaldiDir) if err != nil { @@ -261,7 +274,7 @@ func (c *Cloner) importHistory(repo *git.Repository, head *plumbing.Reference, i copy(hashArray[:], commitHash[:]) err = refsManager.UpdateTimeline( - "main", + currentTimeline, refs.LocalTimeline, hashArray, [32]byte{}, diff --git a/internal/github/sync_clone.go b/internal/github/sync_clone.go index 4df13a3..13ddddc 100644 --- a/internal/github/sync_clone.go +++ b/internal/github/sync_clone.go @@ -14,8 +14,9 @@ import ( "github.com/javanhut/Ivaldi-vcs/internal/logging" ) -// CloneRepository clones a GitHub repository to the local workspace -func (rs *RepoSyncer) CloneRepository(ctx context.Context, owner, repo string, depth int, skipHistory bool, includeTags bool) error { +// CloneRepository clones a GitHub repository to the local workspace. +// Returns the actual default branch name from the repository. +func (rs *RepoSyncer) CloneRepository(ctx context.Context, owner, repo string, depth int, skipHistory bool, includeTags bool) (string, error) { fmt.Printf("Cloning %s/%s from GitHub...\n", owner, repo) // If skip-history is set, try to download archive directly without API calls @@ -31,10 +32,10 @@ func (rs *RepoSyncer) CloneRepository(ctx context.Context, owner, repo string, d fmt.Printf("Extracted %d files from archive (branch: %s)\n", fileCount, branchName) err = rs.createIvaldiCommit(fmt.Sprintf("Import from GitHub: %s/%s", owner, repo)) if err != nil { - return fmt.Errorf("failed to create Ivaldi commit: %w", err) + return "", fmt.Errorf("failed to create Ivaldi commit: %w", err) } fmt.Printf("Successfully cloned snapshot from %s/%s\n", owner, repo) - return nil + return branchName, nil } } @@ -45,7 +46,7 @@ func (rs *RepoSyncer) CloneRepository(ctx context.Context, owner, repo string, d repoInfo, err := rs.client.GetRepository(ctx, owner, repo) if err != nil { - return fmt.Errorf("failed to download repository (tried branches 'main' and 'master', then API lookup failed): %w\n\n"+ + return "", fmt.Errorf("failed to download repository (tried branches 'main' and 'master', then API lookup failed): %w\n\n"+ "Note: This could mean:\n"+ " - The repository doesn't exist or is private\n"+ " - Check the repository name for typos\n"+ @@ -54,7 +55,7 @@ func (rs *RepoSyncer) CloneRepository(ctx context.Context, owner, repo string, d defaultBranch := repoInfo.DefaultBranch if defaultBranch == "" { - return fmt.Errorf("repository '%s/%s' exists but has no default branch configured.\n"+ + return "", fmt.Errorf("repository '%s/%s' exists but has no default branch configured.\n"+ "This usually means the repository is empty or misconfigured", owner, repo) } @@ -63,20 +64,20 @@ func (rs *RepoSyncer) CloneRepository(ctx context.Context, owner, repo string, d fileCount, err := rs.downloadAndExtractArchive(ctx, owner, repo, defaultBranch) if err != nil { if repoInfo.Size == 0 { - return fmt.Errorf("repository '%s/%s' exists but appears to be empty (no commits).\n"+ + return "", fmt.Errorf("repository '%s/%s' exists but appears to be empty (no commits).\n"+ "Initialize the repository on GitHub first, or push content to it before downloading", owner, repo) } - return fmt.Errorf("failed to download repository archive for branch '%s': %w", defaultBranch, err) + return "", fmt.Errorf("failed to download repository archive for branch '%s': %w", defaultBranch, err) } fmt.Printf("Extracted %d files from archive (branch: %s)\n", fileCount, defaultBranch) err = rs.createIvaldiCommit(fmt.Sprintf("Import from GitHub: %s/%s", owner, repo)) if err != nil { - return fmt.Errorf("failed to create Ivaldi commit: %w", err) + return "", fmt.Errorf("failed to create Ivaldi commit: %w", err) } fmt.Printf("Successfully cloned snapshot from %s/%s\n", owner, repo) - return nil + return defaultBranch, nil } // Check rate limits before API calls @@ -85,7 +86,7 @@ func (rs *RepoSyncer) CloneRepository(ctx context.Context, owner, repo string, d // Get repository info repoInfo, err := rs.client.GetRepository(ctx, owner, repo) if err != nil { - return fmt.Errorf("failed to get repository info: %w", err) + return "", fmt.Errorf("failed to get repository info: %w", err) } fmt.Printf("Repository: %s\n", repoInfo.FullName) @@ -97,13 +98,13 @@ func (rs *RepoSyncer) CloneRepository(ctx context.Context, owner, repo string, d // Get the default branch branch, err := rs.client.GetBranch(ctx, owner, repo, repoInfo.DefaultBranch) if err != nil { - return fmt.Errorf("failed to get branch info: %w", err) + return "", fmt.Errorf("failed to get branch info: %w", err) } // Check if we should skip history migration (backward compatibility) if skipHistory { fmt.Println("Skipping history migration, downloading latest snapshot only...") - return rs.cloneSnapshot(ctx, owner, repo, branch.Commit.SHA, repoInfo.DefaultBranch) + return repoInfo.DefaultBranch, rs.cloneSnapshot(ctx, owner, repo, branch.Commit.SHA, repoInfo.DefaultBranch) } // Fetch commit history @@ -117,11 +118,11 @@ func (rs *RepoSyncer) CloneRepository(ctx context.Context, owner, repo string, d commits, err := rs.client.ListCommits(ctx, owner, repo, repoInfo.DefaultBranch, depth) if err != nil { - return fmt.Errorf("failed to fetch commit history: %w", err) + return "", fmt.Errorf("failed to fetch commit history: %w", err) } if len(commits) == 0 { - return fmt.Errorf("no commits found in repository") + return "", fmt.Errorf("no commits found in repository") } if depth == 0 { @@ -133,7 +134,7 @@ func (rs *RepoSyncer) CloneRepository(ctx context.Context, owner, repo string, d // Import commits in chronological order (reverse the list) err = rs.importCommitHistory(ctx, owner, repo, commits) if err != nil { - return fmt.Errorf("failed to import commit history: %w", err) + return "", fmt.Errorf("failed to import commit history: %w", err) } // Import tags if requested @@ -146,7 +147,7 @@ func (rs *RepoSyncer) CloneRepository(ctx context.Context, owner, repo string, d } fmt.Printf("Successfully cloned %s/%s with %d commits\n", owner, repo, len(commits)) - return nil + return repoInfo.DefaultBranch, nil } // cloneSnapshot downloads only the latest snapshot without history (backward compatibility) diff --git a/internal/gitlab/sync.go b/internal/gitlab/sync.go index 8b182fd..170f183 100644 --- a/internal/gitlab/sync.go +++ b/internal/gitlab/sync.go @@ -58,14 +58,15 @@ func NewRepoSyncerWithURL(ivaldiDir, workDir string, owner, repo, baseURL string }, nil } -// CloneRepository clones a GitLab repository without using Git -func (rs *RepoSyncer) CloneRepository(ctx context.Context, owner, repo string, depth int, skipHistory bool, includeTags bool) error { +// CloneRepository clones a GitLab repository without using Git. +// Returns the actual default branch name from the repository. +func (rs *RepoSyncer) CloneRepository(ctx context.Context, owner, repo string, depth int, skipHistory bool, includeTags bool) (string, error) { fmt.Printf("Cloning %s/%s from GitLab...\n", owner, repo) // Get project info project, err := rs.client.GetProject(ctx, owner, repo) if err != nil { - return fmt.Errorf("failed to get project info: %w", err) + return "", fmt.Errorf("failed to get project info: %w", err) } fmt.Printf("Project: %s\n", project.PathWithNamespace) @@ -77,13 +78,13 @@ func (rs *RepoSyncer) CloneRepository(ctx context.Context, owner, repo string, d // Get the default branch branch, err := rs.client.GetBranch(ctx, owner, repo, project.DefaultBranch) if err != nil { - return fmt.Errorf("failed to get branch info: %w", err) + return "", fmt.Errorf("failed to get branch info: %w", err) } // Check if we should skip history migration (backward compatibility) if skipHistory { fmt.Println("Skipping history migration, downloading latest snapshot only...") - return rs.cloneSnapshot(ctx, owner, repo, branch.Commit.ID, project.DefaultBranch) + return project.DefaultBranch, rs.cloneSnapshot(ctx, owner, repo, branch.Commit.ID, project.DefaultBranch) } // Fetch commit history @@ -97,11 +98,11 @@ func (rs *RepoSyncer) CloneRepository(ctx context.Context, owner, repo string, d commits, err := rs.client.ListCommits(ctx, owner, repo, project.DefaultBranch, depth) if err != nil { - return fmt.Errorf("failed to fetch commit history: %w", err) + return "", fmt.Errorf("failed to fetch commit history: %w", err) } if len(commits) == 0 { - return fmt.Errorf("no commits found in repository") + return "", fmt.Errorf("no commits found in repository") } fmt.Printf("Found %d commits to import\n", len(commits)) @@ -109,7 +110,7 @@ func (rs *RepoSyncer) CloneRepository(ctx context.Context, owner, repo string, d // Import commits in chronological order (reverse the list) err = rs.importCommitHistory(ctx, owner, repo, commits) if err != nil { - return fmt.Errorf("failed to import commit history: %w", err) + return "", fmt.Errorf("failed to import commit history: %w", err) } // Import tags if requested @@ -122,7 +123,7 @@ func (rs *RepoSyncer) CloneRepository(ctx context.Context, owner, repo string, d } fmt.Printf("Successfully cloned %s/%s with %d commits\n", owner, repo, len(commits)) - return nil + return project.DefaultBranch, nil } // cloneSnapshot downloads only the latest snapshot without history (backward compatibility) diff --git a/internal/ignore/ignore.go b/internal/ignore/ignore.go index edb975a..28db159 100644 --- a/internal/ignore/ignore.go +++ b/internal/ignore/ignore.go @@ -12,6 +12,16 @@ import ( "strings" ) +// DefaultPatterns are built-in ignore patterns for common VCS and tool directories. +// These are always applied, even without an .ivaldiignore file. +var DefaultPatterns = []string{ + ".git/", + ".svn/", + ".hg/", + ".fossil/", + ".claude/", +} + // doubleStarPattern holds a pre-split ** glob pattern. type doubleStarPattern struct { prefix string // Part before ** @@ -158,11 +168,15 @@ func LoadPatterns(workDir string) ([]string, error) { } // LoadPatternCache is a convenience function that loads patterns and creates a cache. -// Returns a non-nil cache even on error (with empty patterns). +// It prepends DefaultPatterns before any user-defined patterns from .ivaldiignore. +// Returns a non-nil cache even on error (with default patterns only). func LoadPatternCache(workDir string) (*PatternCache, error) { patterns, err := LoadPatterns(workDir) + combined := make([]string, 0, len(DefaultPatterns)+len(patterns)) + combined = append(combined, DefaultPatterns...) + combined = append(combined, patterns...) if err != nil { - return NewPatternCache(nil), err + return NewPatternCache(DefaultPatterns), err } - return NewPatternCache(patterns), nil + return NewPatternCache(combined), nil } diff --git a/internal/refs/refs.go b/internal/refs/refs.go index 5397e42..b57f9d9 100644 --- a/internal/refs/refs.go +++ b/internal/refs/refs.go @@ -354,15 +354,33 @@ func (rm *RefsManager) TimelineExists(name string, timelineType TimelineType) bo // RenameTimeline renames a timeline from oldName to newName. // It renames the ref file, updates HEAD if this is the current timeline, // and renames the corresponding remote tracking ref if one exists. -func (rm *RefsManager) RenameTimeline(oldName, newName string, timelineType TimelineType) error { +// If force is true, an existing destination timeline will be removed first, +// unless it is the current HEAD timeline. +func (rm *RefsManager) RenameTimeline(oldName, newName string, timelineType TimelineType, force bool) error { // Verify old timeline exists if !rm.TimelineExists(oldName, timelineType) { return fmt.Errorf("timeline '%s' does not exist", oldName) } - // Verify new name doesn't already exist + // Check if new name already exists if rm.TimelineExists(newName, timelineType) { - return fmt.Errorf("timeline '%s' already exists", newName) + if !force { + return fmt.Errorf("timeline '%s' already exists", newName) + } + + // Safety: refuse to overwrite the current HEAD timeline + if timelineType == LocalTimeline { + currentTimeline, err := rm.GetCurrentTimeline() + if err == nil && currentTimeline == newName { + return fmt.Errorf("cannot force-overwrite '%s': it is the current HEAD timeline", newName) + } + } + + // Remove the existing destination ref file + destPath := rm.getRefPath(newName, timelineType) + if err := os.Remove(destPath); err != nil { + return fmt.Errorf("failed to remove existing timeline '%s': %w", newName, err) + } } // Rename the ref file From 2fa407073383c65c10651f664ff9db859a61f961 Mon Sep 17 00:00:00 2001 From: javanhut Date: Wed, 25 Feb 2026 09:01:04 +0000 Subject: [PATCH 09/12] feat: updated ivaldi to be able to pull github repos without logining in --- cli/download.go | 2 +- cli/harvest.go | 9 +++++++-- cli/scout.go | 9 +++++++-- internal/github/client.go | 12 +++++++++--- internal/github/sync.go | 11 ++++++++--- 5 files changed, 32 insertions(+), 11 deletions(-) diff --git a/cli/download.go b/cli/download.go index 7813c58..53f4ff6 100644 --- a/cli/download.go +++ b/cli/download.go @@ -216,7 +216,7 @@ func handleGitHubDownload(rawURL string, args []string, depth int, skipHistory b } // Create syncer for cloning (uses optional auth - works for public repos without login) - syncer, err := github.NewRepoSyncerForClone(ivaldiDir, workDir) + syncer, err := github.NewRepoSyncerOptionalAuth(ivaldiDir, workDir) if err != nil { cleanup() return fmt.Errorf("failed to create syncer: %w", err) diff --git a/cli/harvest.go b/cli/harvest.go index c214f49..3c1c9e4 100644 --- a/cli/harvest.go +++ b/cli/harvest.go @@ -50,8 +50,8 @@ Examples: return fmt.Errorf("no GitHub repository configured. Use 'ivaldi portal add owner/repo' or download from GitHub first") } - // Create syncer - syncer, err := github.NewRepoSyncer(ivaldiDir, workDir) + // Create syncer (optional auth - works for public repos without login) + syncer, err := github.NewRepoSyncerOptionalAuth(ivaldiDir, workDir) if err != nil { return fmt.Errorf("failed to create GitHub syncer: %w", err) } @@ -59,6 +59,11 @@ Examples: ctx, cancel := context.WithTimeout(context.Background(), 10*time.Minute) defer cancel() + if !syncer.IsAuthenticated() { + fmt.Println("Note: Running without authentication (60 API requests/hour limit).") + fmt.Printf(" Run 'ivaldi auth login' for higher limits.\n\n") + } + fmt.Printf("Harvesting from GitHub repository: %s/%s\n", owner, repo) // Get remote timelines to refresh our knowledge diff --git a/cli/scout.go b/cli/scout.go index d48cf64..6537738 100644 --- a/cli/scout.go +++ b/cli/scout.go @@ -47,8 +47,8 @@ Examples: return fmt.Errorf("no GitHub repository configured. Use 'ivaldi portal add owner/repo' or download from GitHub first") } - // Create syncer - syncer, err := github.NewRepoSyncer(ivaldiDir, workDir) + // Create syncer (optional auth - works for public repos without login) + syncer, err := github.NewRepoSyncerOptionalAuth(ivaldiDir, workDir) if err != nil { return fmt.Errorf("failed to create GitHub syncer: %w", err) } @@ -56,6 +56,11 @@ Examples: ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) defer cancel() + if !syncer.IsAuthenticated() { + fmt.Println("Note: Running without authentication (60 API requests/hour limit).") + fmt.Printf(" Run 'ivaldi auth login' for higher limits.\n\n") + } + fmt.Printf("Scouting GitHub repository: %s/%s\n\n", owner, repo) // Get remote timelines (this also updates the refs) diff --git a/internal/github/client.go b/internal/github/client.go index ac3ecd6..4c1f2cf 100644 --- a/internal/github/client.go +++ b/internal/github/client.go @@ -414,7 +414,9 @@ func (c *Client) doRequest(ctx context.Context, method, path string, body interf // Set headers req.Header.Set("Accept", AcceptHeader) - req.Header.Set("Authorization", fmt.Sprintf("%s %s", c.authHeaderType(), c.token)) + if c.token != "" { + req.Header.Set("Authorization", fmt.Sprintf("%s %s", c.authHeaderType(), c.token)) + } if body != nil { req.Header.Set("Content-Type", "application/json") } @@ -600,7 +602,9 @@ func (c *Client) DownloadFile(ctx context.Context, owner, repo, path, ref string req, err := http.NewRequestWithContext(ctx, "GET", rawURL, nil) if err == nil { - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", c.token)) + if c.token != "" { + req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", c.token)) + } resp, err := c.httpClient.Do(req) if err == nil && resp.StatusCode == 200 { @@ -633,7 +637,9 @@ func (c *Client) DownloadFile(ctx context.Context, owner, repo, path, ref string if content.DownloadURL != "" { req, err := http.NewRequestWithContext(ctx, "GET", content.DownloadURL, nil) if err == nil { - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", c.token)) + if c.token != "" { + req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", c.token)) + } resp, err := c.httpClient.Do(req) if err == nil { diff --git a/internal/github/sync.go b/internal/github/sync.go index 39ee976..f47c3a2 100644 --- a/internal/github/sync.go +++ b/internal/github/sync.go @@ -47,9 +47,9 @@ func NewRepoSyncer(ivaldiDir, workDir string) (*RepoSyncer, error) { }, nil } -// NewRepoSyncerForClone creates a repository syncer for cloning/downloading -// Authentication is optional - works for public repos without login -func NewRepoSyncerForClone(ivaldiDir, workDir string) (*RepoSyncer, error) { +// NewRepoSyncerOptionalAuth creates a repository syncer that works with or without auth. +// Suitable for read-only operations on public repos (scout, harvest, download). +func NewRepoSyncerOptionalAuth(ivaldiDir, workDir string) (*RepoSyncer, error) { // Use optional auth - allows downloading public repos without login client := NewClientOptionalAuth() @@ -68,6 +68,11 @@ func NewRepoSyncerForClone(ivaldiDir, workDir string) (*RepoSyncer, error) { }, nil } +// IsAuthenticated returns whether the syncer has auth configured +func (rs *RepoSyncer) IsAuthenticated() bool { + return rs.client.IsAuthenticated() +} + // PullChanges pulls latest changes from GitHub func (rs *RepoSyncer) PullChanges(ctx context.Context, owner, repo, branch string) error { fmt.Printf("Pulling changes from %s/%s...\n", owner, repo) From 5d90124194eb99865fd3f57c5d3077ec7ed95b65 Mon Sep 17 00:00:00 2001 From: javanhut Date: Wed, 25 Feb 2026 09:49:48 +0000 Subject: [PATCH 10/12] feat: implemented tui --- cli/cli.go | 3 + cli/tui.go | 28 ++ go.mod | 28 +- go.sum | 81 +++- internal/engine/diff.go | 507 +++++++++++++++++++++++++ internal/engine/fuse.go | 331 ++++++++++++++++ internal/engine/log.go | 204 ++++++++++ internal/engine/remote.go | 326 ++++++++++++++++ internal/engine/seal.go | 147 +++++++ internal/engine/staging.go | 117 ++++++ internal/engine/status.go | 341 +++++++++++++++++ internal/engine/timeline.go | 226 +++++++++++ internal/tui/app.go | 248 ++++++++++++ internal/tui/components/dialog.go | 120 ++++++ internal/tui/components/diffview.go | 240 ++++++++++++ internal/tui/components/filelist.go | 243 ++++++++++++ internal/tui/components/statusbar.go | 96 +++++ internal/tui/components/tabs.go | 47 +++ internal/tui/keys.go | 63 +++ internal/tui/style/common.go | 60 +++ internal/tui/style/theme.go | 133 +++++++ internal/tui/views/diff.go | 267 +++++++++++++ internal/tui/views/fuse.go | 548 +++++++++++++++++++++++++++ internal/tui/views/help.go | 142 +++++++ internal/tui/views/log.go | 348 +++++++++++++++++ internal/tui/views/remote.go | 503 ++++++++++++++++++++++++ internal/tui/views/status.go | 346 +++++++++++++++++ internal/tui/views/timeline.go | 497 ++++++++++++++++++++++++ 28 files changed, 6233 insertions(+), 7 deletions(-) create mode 100644 cli/tui.go create mode 100644 internal/engine/diff.go create mode 100644 internal/engine/fuse.go create mode 100644 internal/engine/log.go create mode 100644 internal/engine/remote.go create mode 100644 internal/engine/seal.go create mode 100644 internal/engine/staging.go create mode 100644 internal/engine/status.go create mode 100644 internal/engine/timeline.go create mode 100644 internal/tui/app.go create mode 100644 internal/tui/components/dialog.go create mode 100644 internal/tui/components/diffview.go create mode 100644 internal/tui/components/filelist.go create mode 100644 internal/tui/components/statusbar.go create mode 100644 internal/tui/components/tabs.go create mode 100644 internal/tui/keys.go create mode 100644 internal/tui/style/common.go create mode 100644 internal/tui/style/theme.go create mode 100644 internal/tui/views/diff.go create mode 100644 internal/tui/views/fuse.go create mode 100644 internal/tui/views/help.go create mode 100644 internal/tui/views/log.go create mode 100644 internal/tui/views/remote.go create mode 100644 internal/tui/views/status.go create mode 100644 internal/tui/views/timeline.go diff --git a/cli/cli.go b/cli/cli.go index d1fdd5f..e7ed54e 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -104,6 +104,9 @@ func init() { // Sync command rootCmd.AddCommand(syncCmd) + + // TUI command + rootCmd.AddCommand(tuiCmd) } func forgeCommand(cmd *cobra.Command, args []string) { diff --git a/cli/tui.go b/cli/tui.go new file mode 100644 index 0000000..920b32f --- /dev/null +++ b/cli/tui.go @@ -0,0 +1,28 @@ +package cli + +import ( + "fmt" + "os" + + "github.com/javanhut/Ivaldi-vcs/internal/tui" + "github.com/spf13/cobra" +) + +var tuiCmd = &cobra.Command{ + Use: "tui", + Short: "Launch the interactive TUI dashboard", + Long: `Opens an interactive terminal UI for managing your Ivaldi repository — staging, status, logs, and more.`, + RunE: func(cmd *cobra.Command, args []string) error { + ivaldiDir := ".ivaldi" + if _, err := os.Stat(ivaldiDir); os.IsNotExist(err) { + return fmt.Errorf("not in an Ivaldi repository (no .ivaldi directory found)") + } + + workDir, err := os.Getwd() + if err != nil { + return fmt.Errorf("failed to get working directory: %w", err) + } + + return tui.Run(workDir, ivaldiDir) + }, +} diff --git a/go.mod b/go.mod index d33617f..69e1f9f 100644 --- a/go.mod +++ b/go.mod @@ -3,7 +3,12 @@ module github.com/javanhut/Ivaldi-vcs go 1.24.5 require ( + github.com/charmbracelet/bubbles v1.0.0 + github.com/charmbracelet/bubbletea v1.3.10 + github.com/charmbracelet/lipgloss v1.1.0 + github.com/go-git/go-git/v5 v5.16.3 github.com/klauspost/compress v1.18.0 + github.com/schollz/progressbar/v3 v3.18.0 github.com/spf13/cobra v1.10.1 go.etcd.io/bbolt v1.4.3 golang.org/x/term v0.36.0 @@ -14,27 +19,44 @@ require ( dario.cat/mergo v1.0.0 // indirect github.com/Microsoft/go-winio v0.6.2 // indirect github.com/ProtonMail/go-crypto v1.1.6 // indirect + github.com/atotto/clipboard v0.1.4 // indirect + github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect + github.com/charmbracelet/colorprofile v0.4.1 // indirect + github.com/charmbracelet/x/ansi v0.11.6 // indirect + github.com/charmbracelet/x/cellbuf v0.0.15 // indirect + github.com/charmbracelet/x/term v0.2.2 // indirect + github.com/clipperhouse/displaywidth v0.9.0 // indirect + github.com/clipperhouse/stringish v0.1.1 // indirect + github.com/clipperhouse/uax29/v2 v2.5.0 // indirect github.com/cloudflare/circl v1.6.1 // indirect github.com/cyphar/filepath-securejoin v0.4.1 // indirect github.com/emirpasic/gods v1.18.1 // indirect + github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect github.com/go-git/go-billy/v5 v5.6.2 // indirect - github.com/go-git/go-git/v5 v5.16.3 // indirect github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect github.com/kevinburke/ssh_config v1.2.0 // indirect github.com/klauspost/cpuid/v2 v2.3.0 // indirect + github.com/lucasb-eyer/go-colorful v1.3.0 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-localereader v0.0.1 // indirect + github.com/mattn/go-runewidth v0.0.19 // indirect github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db // indirect + github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect + github.com/muesli/cancelreader v0.2.2 // indirect + github.com/muesli/termenv v0.16.0 // indirect github.com/pjbgf/sha1cd v0.3.2 // indirect github.com/rivo/uniseg v0.4.7 // indirect - github.com/schollz/progressbar/v3 v3.18.0 // indirect github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect github.com/skeema/knownhosts v1.3.1 // indirect github.com/spf13/pflag v1.0.10 // indirect github.com/xanzy/ssh-agent v0.3.3 // indirect + github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect golang.org/x/crypto v0.37.0 // indirect golang.org/x/net v0.39.0 // indirect - golang.org/x/sys v0.37.0 // indirect + golang.org/x/sys v0.38.0 // indirect + golang.org/x/text v0.24.0 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect ) diff --git a/go.sum b/go.sum index 89a853d..e883a9a 100644 --- a/go.sum +++ b/go.sum @@ -5,6 +5,36 @@ github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERo github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/ProtonMail/go-crypto v1.1.6 h1:ZcV+Ropw6Qn0AX9brlQLAUXfqLBc7Bl+f/DmNxpLfdw= github.com/ProtonMail/go-crypto v1.1.6/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= +github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= +github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= +github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= +github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= +github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4= +github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI= +github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= +github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= +github.com/charmbracelet/bubbles v1.0.0 h1:12J8/ak/uCZEMQ6KU7pcfwceyjLlWsDLAxB5fXonfvc= +github.com/charmbracelet/bubbles v1.0.0/go.mod h1:9d/Zd5GdnauMI5ivUIVisuEm3ave1XwXtD1ckyV6r3E= +github.com/charmbracelet/bubbletea v1.3.10 h1:otUDHWMMzQSB0Pkc87rm691KZ3SWa4KUlvF9nRvCICw= +github.com/charmbracelet/bubbletea v1.3.10/go.mod h1:ORQfo0fk8U+po9VaNvnV95UPWA1BitP1E0N6xJPlHr4= +github.com/charmbracelet/colorprofile v0.4.1 h1:a1lO03qTrSIRaK8c3JRxJDZOvhvIeSco3ej+ngLk1kk= +github.com/charmbracelet/colorprofile v0.4.1/go.mod h1:U1d9Dljmdf9DLegaJ0nGZNJvoXAhayhmidOdcBwAvKk= +github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY= +github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30= +github.com/charmbracelet/x/ansi v0.11.6 h1:GhV21SiDz/45W9AnV2R61xZMRri5NlLnl6CVF7ihZW8= +github.com/charmbracelet/x/ansi v0.11.6/go.mod h1:2JNYLgQUsyqaiLovhU2Rv/pb8r6ydXKS3NIttu3VGZQ= +github.com/charmbracelet/x/cellbuf v0.0.15 h1:ur3pZy0o6z/R7EylET877CBxaiE1Sp1GMxoFPAIztPI= +github.com/charmbracelet/x/cellbuf v0.0.15/go.mod h1:J1YVbR7MUuEGIFPCaaZ96KDl5NoS0DAWkskup+mOY+Q= +github.com/charmbracelet/x/term v0.2.2 h1:xVRT/S2ZcKdhhOuSP4t5cLi5o+JxklsoEObBSgfgZRk= +github.com/charmbracelet/x/term v0.2.2/go.mod h1:kF8CY5RddLWrsgVwpw4kAa6TESp6EB5y3uxGLeCqzAI= +github.com/chengxilo/virtualterm v1.0.4 h1:Z6IpERbRVlfB8WkOmtbHiDbBANU7cimRIof7mk9/PwM= +github.com/chengxilo/virtualterm v1.0.4/go.mod h1:DyxxBZz/x1iqJjFxTFcr6/x+jSpqN0iwWCOK1q10rlY= +github.com/clipperhouse/displaywidth v0.9.0 h1:Qb4KOhYwRiN3viMv1v/3cTBlz3AcAZX3+y9OLhMtAtA= +github.com/clipperhouse/displaywidth v0.9.0/go.mod h1:aCAAqTlh4GIVkhQnJpbL0T/WfcrJXHcj8C0yjYcjOZA= +github.com/clipperhouse/stringish v0.1.1 h1:+NSqMOr3GR6k1FdRhhnXrLfztGzuG+VuFDfatpWHKCs= +github.com/clipperhouse/stringish v0.1.1/go.mod h1:v/WhFtE1q0ovMta2+m+UbpZ+2/HEXNWYXQgCt4hdOzA= +github.com/clipperhouse/uax29/v2 v2.5.0 h1:x7T0T4eTHDONxFJsL94uKNKPHrclyFI0lm7+w94cO8U= +github.com/clipperhouse/uax29/v2 v2.5.0/go.mod h1:Wn1g7MK6OoeDT0vL+Q0SQLDz/KpfsVRgg6W7ihQeh4g= github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0= github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= @@ -13,16 +43,26 @@ github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGL github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o= +github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= +github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4= +github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM= +github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c= +github.com/gliderlabs/ssh v0.3.8/go.mod h1:xYoytBv1sV0aL3CavoDuJIQNURXkkfPA/wxQ1pL1fAU= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UNbRM= github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU= +github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4= +github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII= github.com/go-git/go-git/v5 v5.16.3 h1:Z8BtvxZ09bYm/yYNgPKCzgWtaRqDTgIKRgIRHBfU6Z8= github.com/go-git/go-git/v5 v5.16.3/go.mod h1:4Ge4alE/5gPs30F2H1esi2gPd69R0C39lolkucHBOp8= github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= @@ -34,17 +74,40 @@ github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYW github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y= github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/lucasb-eyer/go-colorful v1.3.0 h1:2/yBRLdWBZKrf7gB40FoiKfAWYQ0lqNcbuQwVHXptag= +github.com/lucasb-eyer/go-colorful v1.3.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4= +github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88= +github.com/mattn/go-runewidth v0.0.19 h1:v++JhqYnZuu5jSKrk9RbgF5v4CGUjqRfBm05byFGLdw= +github.com/mattn/go-runewidth v0.0.19/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs= github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ= github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw= +github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI= +github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo= +github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA= +github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo= +github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc= +github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk= +github.com/onsi/gomega v1.34.1 h1:EUMJIKUjM8sKjYbtxQI9A4z2o+rruxnzNvpknOXie6k= +github.com/onsi/gomega v1.34.1/go.mod h1:kU1QgUvBDLXBJq618Xvm2LUX6rSAfRaFRTcdOeDLwwY= github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4= github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/schollz/progressbar/v3 v3.18.0 h1:uXdoHABRFmNIjUfte/Ex7WtuyVslrw2wVPQmCN62HpA= github.com/schollz/progressbar/v3 v3.18.0/go.mod h1:IsO3lpbaGuzh8zIMzgY3+J8l4C8GjO0Y9S69eFvNsec= @@ -65,31 +128,41 @@ github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOf github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM= go.etcd.io/bbolt v1.4.3 h1:dEadXpI6G79deX5prL3QRNP6JB8UxVkqo4UPnHaNXJo= go.etcd.io/bbolt v1.4.3/go.mod h1:tKQlpPaYCVFctUIgFKFnAlvbmB3tpy1vkTnDWohtc0E= golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE= golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc= +golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8= +golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY= golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E= -golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= -golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= +golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ= -golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= +golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.36.0 h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q= golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= +golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/internal/engine/diff.go b/internal/engine/diff.go new file mode 100644 index 0000000..8c52b61 --- /dev/null +++ b/internal/engine/diff.go @@ -0,0 +1,507 @@ +package engine + +import ( + "fmt" + "path/filepath" + "strings" + + "github.com/javanhut/Ivaldi-vcs/internal/cas" + "github.com/javanhut/Ivaldi-vcs/internal/commit" + "github.com/javanhut/Ivaldi-vcs/internal/diffmerge" + "github.com/javanhut/Ivaldi-vcs/internal/filechunk" + "github.com/javanhut/Ivaldi-vcs/internal/ignore" + "github.com/javanhut/Ivaldi-vcs/internal/refs" + "github.com/javanhut/Ivaldi-vcs/internal/workspace" + "github.com/javanhut/Ivaldi-vcs/internal/wsindex" +) + +// DiffChangeType mirrors diffmerge.ChangeType for the engine API +type DiffChangeType uint8 + +const ( + DiffAdded DiffChangeType = 1 + DiffModified DiffChangeType = 2 + DiffRemoved DiffChangeType = 3 +) + +// FileDiff holds diff information for a single file +type FileDiff struct { + Path string + Type DiffChangeType + OldSize int64 + NewSize int64 + Hunks []DiffHunk // Line-level hunks (populated for text files) + AddedLines int + RemovedLines int + IsBinary bool +} + +// DiffHunk represents a chunk of line changes +type DiffHunk struct { + OldStart int + OldCount int + NewStart int + NewCount int + Lines []DiffLine +} + +// DiffLine represents a single line in a diff hunk +type DiffLine struct { + Type DiffLineType + Content string +} + +// DiffLineType indicates whether a line was added, removed, or context +type DiffLineType uint8 + +const ( + DiffLineContext DiffLineType = iota + DiffLineAdd + DiffLineRemove +) + +// DiffOptions controls what to diff +type DiffOptions struct { + Staged bool // If true, diff staged vs HEAD; otherwise working dir vs HEAD +} + +// DiffResult holds the complete diff output +type DiffResult struct { + OldName string + NewName string + Files []FileDiff + Stats DiffStats +} + +// DiffStats holds summary statistics +type DiffStats struct { + Added int + Modified int + Removed int +} + +// ComputeDiff computes the diff for the working directory +func ComputeDiff(ivaldiDir, workDir string, opts DiffOptions) (*DiffResult, error) { + objectsDir := filepath.Join(ivaldiDir, "objects") + casStore, err := cas.NewFileCAS(objectsDir) + if err != nil { + return nil, fmt.Errorf("failed to initialize storage: %w", err) + } + + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return nil, fmt.Errorf("failed to initialize refs: %w", err) + } + defer refsManager.Close() + + // Get HEAD index + headIndex, err := getHeadIndex(casStore, ivaldiDir, refsManager) + if err != nil { + return nil, err + } + + var workingIndex wsindex.IndexRef + var oldName, newName string + + if opts.Staged { + // Staged vs HEAD + stagedFiles, err := GetStagedFiles(ivaldiDir) + if err != nil { + return nil, fmt.Errorf("failed to get staged files: %w", err) + } + if len(stagedFiles) == 0 { + return &DiffResult{ + OldName: "HEAD", + NewName: "staged", + }, nil + } + + materializer := workspace.NewMaterializer(casStore, ivaldiDir, workDir) + workingIndex, err = materializer.ScanSpecificFiles(stagedFiles) + if err != nil { + return nil, fmt.Errorf("failed to scan staged files: %w", err) + } + oldName = "HEAD" + newName = "staged" + } else { + // Working directory vs HEAD + materializer := workspace.NewMaterializer(casStore, ivaldiDir, workDir) + ignoreCache, _ := ignore.LoadPatternCache(workDir) + materializer.SetIgnorePatterns(ignoreCache) + workingIndex, err = materializer.ScanWorkspace() + if err != nil { + return nil, fmt.Errorf("failed to scan workspace: %w", err) + } + oldName = "HEAD" + newName = "working directory" + } + + differ := diffmerge.NewDiffer(casStore) + wsDiff, err := differ.DiffWorkspaces(headIndex, workingIndex) + if err != nil { + return nil, fmt.Errorf("failed to compute diff: %w", err) + } + + result := &DiffResult{ + OldName: oldName, + NewName: newName, + } + + for _, change := range wsDiff.FileChanges { + fileDiff := buildFileDiff(casStore, change) + result.Files = append(result.Files, fileDiff) + + switch change.Type { + case diffmerge.Added: + result.Stats.Added++ + case diffmerge.Modified: + result.Stats.Modified++ + case diffmerge.Removed: + result.Stats.Removed++ + } + } + + return result, nil +} + +// buildFileDiff creates a FileDiff from a FileChange, including line-level diffs +func buildFileDiff(casStore cas.CAS, change diffmerge.FileChange) FileDiff { + fd := FileDiff{ + Path: change.Path, + } + + switch change.Type { + case diffmerge.Added: + fd.Type = DiffAdded + if change.NewFile != nil { + fd.NewSize = change.NewFile.FileRef.Size + content, err := readContent(casStore, change.NewFile) + if err == nil && !isBinaryContent(content) { + lines := strings.Split(string(content), "\n") + fd.AddedLines = len(lines) + fd.Hunks = []DiffHunk{buildAddHunk(lines)} + } else { + fd.IsBinary = true + } + } + + case diffmerge.Removed: + fd.Type = DiffRemoved + if change.OldFile != nil { + fd.OldSize = change.OldFile.FileRef.Size + content, err := readContent(casStore, change.OldFile) + if err == nil && !isBinaryContent(content) { + lines := strings.Split(string(content), "\n") + fd.RemovedLines = len(lines) + fd.Hunks = []DiffHunk{buildRemoveHunk(lines)} + } else { + fd.IsBinary = true + } + } + + case diffmerge.Modified: + fd.Type = DiffModified + if change.OldFile != nil { + fd.OldSize = change.OldFile.FileRef.Size + } + if change.NewFile != nil { + fd.NewSize = change.NewFile.FileRef.Size + } + + if change.OldFile != nil && change.NewFile != nil { + oldContent, errOld := readContent(casStore, change.OldFile) + newContent, errNew := readContent(casStore, change.NewFile) + + if errOld == nil && errNew == nil && !isBinaryContent(oldContent) && !isBinaryContent(newContent) { + oldLines := strings.Split(string(oldContent), "\n") + newLines := strings.Split(string(newContent), "\n") + hunks := computeUnifiedDiff(oldLines, newLines, 3) + fd.Hunks = hunks + + for _, h := range hunks { + for _, l := range h.Lines { + switch l.Type { + case DiffLineAdd: + fd.AddedLines++ + case DiffLineRemove: + fd.RemovedLines++ + } + } + } + } else { + fd.IsBinary = true + } + } + } + + return fd +} + +// readContent reads file content from CAS +func readContent(casStore cas.CAS, file *wsindex.FileMetadata) ([]byte, error) { + loader := filechunk.NewLoader(casStore) + return loader.ReadAll(file.FileRef) +} + +// isBinaryContent checks if content appears to be binary +func isBinaryContent(data []byte) bool { + // Check first 8KB for null bytes + checkLen := len(data) + if checkLen > 8192 { + checkLen = 8192 + } + for i := 0; i < checkLen; i++ { + if data[i] == 0 { + return true + } + } + return false +} + +// buildAddHunk creates a hunk for an entirely new file +func buildAddHunk(lines []string) DiffHunk { + h := DiffHunk{ + OldStart: 0, + OldCount: 0, + NewStart: 1, + NewCount: len(lines), + } + for _, line := range lines { + h.Lines = append(h.Lines, DiffLine{Type: DiffLineAdd, Content: line}) + } + return h +} + +// buildRemoveHunk creates a hunk for an entirely removed file +func buildRemoveHunk(lines []string) DiffHunk { + h := DiffHunk{ + OldStart: 1, + OldCount: len(lines), + NewStart: 0, + NewCount: 0, + } + for _, line := range lines { + h.Lines = append(h.Lines, DiffLine{Type: DiffLineRemove, Content: line}) + } + return h +} + +// computeUnifiedDiff computes unified diff hunks with context lines +func computeUnifiedDiff(oldLines, newLines []string, contextLines int) []DiffHunk { + // Compute LCS-based edit script + edits := myersDiff(oldLines, newLines) + + if len(edits) == 0 { + return nil + } + + // Group edits into hunks with context + return groupIntoHunks(edits, oldLines, newLines, contextLines) +} + +// editOp represents a single edit operation +type editOp struct { + oldIdx int + newIdx int + kind DiffLineType // DiffLineContext, DiffLineAdd, DiffLineRemove +} + +// myersDiff computes the edit script between two slices of strings +// using a simplified O(ND) approach +func myersDiff(oldLines, newLines []string) []editOp { + m := len(oldLines) + n := len(newLines) + + // Build a simple LCS-based diff + // Use dynamic programming for the LCS + lcs := make([][]int, m+1) + for i := range lcs { + lcs[i] = make([]int, n+1) + } + + for i := m - 1; i >= 0; i-- { + for j := n - 1; j >= 0; j-- { + if oldLines[i] == newLines[j] { + lcs[i][j] = lcs[i+1][j+1] + 1 + } else { + lcs[i][j] = lcs[i+1][j] + if lcs[i][j+1] > lcs[i][j] { + lcs[i][j] = lcs[i][j+1] + } + } + } + } + + // Trace back to get edit operations + var edits []editOp + i, j := 0, 0 + for i < m || j < n { + if i < m && j < n && oldLines[i] == newLines[j] { + edits = append(edits, editOp{oldIdx: i, newIdx: j, kind: DiffLineContext}) + i++ + j++ + } else if j < n && (i >= m || lcs[i][j+1] >= lcs[i+1][j]) { + edits = append(edits, editOp{oldIdx: -1, newIdx: j, kind: DiffLineAdd}) + j++ + } else if i < m { + edits = append(edits, editOp{oldIdx: i, newIdx: -1, kind: DiffLineRemove}) + i++ + } + } + + return edits +} + +// groupIntoHunks groups edit operations into hunks with context +func groupIntoHunks(edits []editOp, oldLines, newLines []string, contextLines int) []DiffHunk { + // Find ranges of non-context edits + type changeRange struct { + start, end int // indices into edits + } + var ranges []changeRange + + inChange := false + var currentRange changeRange + for i, edit := range edits { + if edit.kind != DiffLineContext { + if !inChange { + currentRange = changeRange{start: i, end: i} + inChange = true + } else { + currentRange.end = i + } + } else if inChange { + // Check if context gap is large enough to split + gapStart := currentRange.end + 1 + gapEnd := i + if gapEnd-gapStart >= contextLines*2 { + ranges = append(ranges, currentRange) + inChange = false + } else { + currentRange.end = i // extend through context + } + } + } + if inChange { + ranges = append(ranges, currentRange) + } + + // Build hunks from ranges + var hunks []DiffHunk + for _, r := range ranges { + // Expand range to include context + start := r.start - contextLines + if start < 0 { + start = 0 + } + end := r.end + contextLines + 1 + if end > len(edits) { + end = len(edits) + } + + var hunk DiffHunk + hunk.OldStart = -1 + hunk.NewStart = -1 + + for i := start; i < end; i++ { + edit := edits[i] + + switch edit.kind { + case DiffLineContext: + if hunk.OldStart < 0 { + hunk.OldStart = edit.oldIdx + 1 + } + if hunk.NewStart < 0 { + hunk.NewStart = edit.newIdx + 1 + } + hunk.OldCount++ + hunk.NewCount++ + hunk.Lines = append(hunk.Lines, DiffLine{ + Type: DiffLineContext, + Content: oldLines[edit.oldIdx], + }) + + case DiffLineAdd: + if hunk.NewStart < 0 { + hunk.NewStart = edit.newIdx + 1 + } + if hunk.OldStart < 0 && edit.newIdx > 0 { + hunk.OldStart = 1 + } else if hunk.OldStart < 0 { + hunk.OldStart = 0 + } + hunk.NewCount++ + hunk.Lines = append(hunk.Lines, DiffLine{ + Type: DiffLineAdd, + Content: newLines[edit.newIdx], + }) + + case DiffLineRemove: + if hunk.OldStart < 0 { + hunk.OldStart = edit.oldIdx + 1 + } + if hunk.NewStart < 0 && edit.oldIdx > 0 { + hunk.NewStart = 1 + } else if hunk.NewStart < 0 { + hunk.NewStart = 0 + } + hunk.OldCount++ + hunk.Lines = append(hunk.Lines, DiffLine{ + Type: DiffLineRemove, + Content: oldLines[edit.oldIdx], + }) + } + } + + if len(hunk.Lines) > 0 { + hunks = append(hunks, hunk) + } + } + + return hunks +} + +// getHeadIndex returns the workspace index for the HEAD commit +func getHeadIndex(casStore cas.CAS, ivaldiDir string, refsManager *refs.RefsManager) (wsindex.IndexRef, error) { + currentTimeline, err := refsManager.GetCurrentTimeline() + if err != nil { + return wsindex.IndexRef{}, fmt.Errorf("failed to get current timeline: %w", err) + } + + timeline, err := refsManager.GetTimeline(currentTimeline, refs.LocalTimeline) + if err != nil { + return wsindex.IndexRef{}, fmt.Errorf("failed to get timeline: %w", err) + } + + if timeline.Blake3Hash == [32]byte{} { + wsBuilder := wsindex.NewBuilder(casStore) + return wsBuilder.Build(nil) + } + + return getCommitIndex(casStore, timeline.Blake3Hash) +} + +// getCommitIndex returns the workspace index for a commit +func getCommitIndex(casStore cas.CAS, commitHash [32]byte) (wsindex.IndexRef, error) { + var hash cas.Hash + copy(hash[:], commitHash[:]) + + commitReader := commit.NewCommitReader(casStore) + commitObj, err := commitReader.ReadCommit(hash) + if err != nil { + return wsindex.IndexRef{}, fmt.Errorf("failed to read commit: %w", err) + } + + tree, err := commitReader.ReadTree(commitObj) + if err != nil { + return wsindex.IndexRef{}, fmt.Errorf("failed to read tree: %w", err) + } + + files, err := commitReader.TreeToFileMetadata(tree) + if err != nil { + return wsindex.IndexRef{}, fmt.Errorf("failed to convert tree to metadata: %w", err) + } + + wsBuilder := wsindex.NewBuilder(casStore) + return wsBuilder.Build(files) +} diff --git a/internal/engine/fuse.go b/internal/engine/fuse.go new file mode 100644 index 0000000..7efd8cc --- /dev/null +++ b/internal/engine/fuse.go @@ -0,0 +1,331 @@ +package engine + +import ( + "fmt" + "os" + "path/filepath" + "strings" + + "github.com/javanhut/Ivaldi-vcs/internal/cas" + "github.com/javanhut/Ivaldi-vcs/internal/commit" + "github.com/javanhut/Ivaldi-vcs/internal/config" + "github.com/javanhut/Ivaldi-vcs/internal/diffmerge" + "github.com/javanhut/Ivaldi-vcs/internal/history" + "github.com/javanhut/Ivaldi-vcs/internal/refs" + "github.com/javanhut/Ivaldi-vcs/internal/seals" + "github.com/javanhut/Ivaldi-vcs/internal/wsindex" +) + +// FuseResultType indicates the outcome of a fuse operation +type FuseResultType int + +const ( + FuseFastForward FuseResultType = iota // Target was ancestor of source + FuseMergeSuccess // Three-way merge succeeded + FuseMergeConflicts // Three-way merge has conflicts +) + +// FuseResult holds the result of a fuse operation +type FuseResult struct { + Type FuseResultType + Source string + Target string + SealName string + Conflicts []string + Added int + Modified int + Removed int +} + +// FuseStatus holds information about a merge in progress +type FuseStatus struct { + InProgress bool + SourceTimeline string + TargetTimeline string + Conflicts []string +} + +// GetFuseStatus checks if a merge is in progress and returns its state +func GetFuseStatus(ivaldiDir string) (*FuseStatus, error) { + mergeHeadPath := filepath.Join(ivaldiDir, "MERGE_HEAD") + if _, err := os.Stat(mergeHeadPath); os.IsNotExist(err) { + return &FuseStatus{InProgress: false}, nil + } + + status := &FuseStatus{InProgress: true} + + // Read merge info + mergeInfoPath := filepath.Join(ivaldiDir, "MERGE_INFO") + data, err := os.ReadFile(mergeInfoPath) + if err == nil { + lines := strings.Split(strings.TrimSpace(string(data)), "\n") + if len(lines) >= 2 { + status.SourceTimeline = lines[0] + status.TargetTimeline = lines[1] + } + } + + // Read conflicts + conflictPath := filepath.Join(ivaldiDir, "MERGE_CONFLICTS") + if conflictData, err := os.ReadFile(conflictPath); err == nil { + for _, line := range strings.Split(strings.TrimSpace(string(conflictData)), "\n") { + if line != "" { + status.Conflicts = append(status.Conflicts, line) + } + } + } + + return status, nil +} + +// AbortFuse aborts the current merge in progress +func AbortFuse(ivaldiDir string) error { + status, err := GetFuseStatus(ivaldiDir) + if err != nil { + return err + } + if !status.InProgress { + return fmt.Errorf("no merge in progress") + } + + os.Remove(filepath.Join(ivaldiDir, "MERGE_HEAD")) + os.Remove(filepath.Join(ivaldiDir, "MERGE_INFO")) + os.Remove(filepath.Join(ivaldiDir, "MERGE_CONFLICTS")) + + resStorage := diffmerge.NewResolutionStorage(ivaldiDir) + resStorage.Delete() + + return nil +} + +// FuseTimelines merges the source timeline into the target timeline +func FuseTimelines(ivaldiDir, workDir, source, target, strategy string) (*FuseResult, error) { + // Check for merge already in progress + fuseStatus, _ := GetFuseStatus(ivaldiDir) + if fuseStatus != nil && fuseStatus.InProgress { + return nil, fmt.Errorf("merge already in progress. Abort first") + } + + if source == target { + return nil, fmt.Errorf("cannot fuse timeline '%s' into itself", source) + } + + // Initialize storage + objectsDir := filepath.Join(ivaldiDir, "objects") + casStore, err := cas.NewFileCAS(objectsDir) + if err != nil { + return nil, fmt.Errorf("failed to initialize storage: %w", err) + } + + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return nil, fmt.Errorf("failed to initialize refs: %w", err) + } + defer refsManager.Close() + + // Get timeline refs + sourceRef, err := refsManager.GetTimeline(source, refs.LocalTimeline) + if err != nil { + return nil, fmt.Errorf("source timeline '%s' not found: %w", source, err) + } + + targetRef, err := refsManager.GetTimeline(target, refs.LocalTimeline) + if err != nil { + return nil, fmt.Errorf("target timeline '%s' not found: %w", target, err) + } + + var sourceHash, targetHash cas.Hash + copy(sourceHash[:], sourceRef.Blake3Hash[:]) + copy(targetHash[:], targetRef.Blake3Hash[:]) + + commitReader := commit.NewCommitReader(casStore) + + // Check fast-forward + isAncestor, err := commitReader.IsAncestor(targetHash, sourceHash) + if err == nil && isAncestor { + var hashArray [32]byte + copy(hashArray[:], sourceHash[:]) + err = refsManager.UpdateTimeline(target, refs.LocalTimeline, hashArray, [32]byte{}, "") + if err != nil { + return nil, fmt.Errorf("failed to fast-forward: %w", err) + } + return &FuseResult{ + Type: FuseFastForward, + Source: source, + Target: target, + }, nil + } + + // Three-way merge + sourceCommit, err := commitReader.ReadCommit(sourceHash) + if err != nil { + return nil, fmt.Errorf("failed to read source commit: %w", err) + } + + targetCommit, err := commitReader.ReadCommit(targetHash) + if err != nil { + return nil, fmt.Errorf("failed to read target commit: %w", err) + } + + // Get workspace indexes + sourceIndex, err := fuseGetWorkspaceIndex(casStore, sourceCommit) + if err != nil { + return nil, fmt.Errorf("failed to get source workspace: %w", err) + } + + targetIndex, err := fuseGetWorkspaceIndex(casStore, targetCommit) + if err != nil { + return nil, fmt.Errorf("failed to get target workspace: %w", err) + } + + // Find base (common ancestor) + var baseIndex wsindex.IndexRef + if len(targetCommit.Parents) > 0 { + baseCommit, bErr := commitReader.ReadCommit(targetCommit.Parents[0]) + if bErr == nil { + baseIndex, _ = fuseGetWorkspaceIndex(casStore, baseCommit) + } + } + if baseIndex.Count == 0 { + wsBuilder := wsindex.NewBuilder(casStore) + baseIndex, _ = wsBuilder.Build(nil) + } + + // Merge with strategy + strategyType := diffmerge.StrategyType(strategy) + merger := diffmerge.NewMerger(casStore) + mergeResult, err := merger.MergeWorkspacesWithStrategy(baseIndex, targetIndex, sourceIndex, strategyType) + if err != nil { + return nil, fmt.Errorf("merge failed: %w", err) + } + + if !mergeResult.Success { + // Save merge state + saveFuseState(ivaldiDir, source, target, sourceHash, targetHash, mergeResult.Conflicts) + + // Save resolution metadata + resStorage := diffmerge.NewResolutionStorage(ivaldiDir) + resolution := diffmerge.CreateResolution(source, target, sourceHash, targetHash, strategyType) + resStorage.Save(resolution) + + var conflictPaths []string + for _, c := range mergeResult.Conflicts { + conflictPaths = append(conflictPaths, c.Path) + } + return &FuseResult{ + Type: FuseMergeConflicts, + Source: source, + Target: target, + Conflicts: conflictPaths, + }, nil + } + + // Compute diff for stats + var added, modified, removed int + differ := diffmerge.NewDiffer(casStore) + diff, dErr := differ.DiffWorkspaces(targetIndex, *mergeResult.MergedIndex) + if dErr == nil && diff != nil { + for _, change := range diff.FileChanges { + switch change.Type { + case diffmerge.Added: + added++ + case diffmerge.Modified: + modified++ + case diffmerge.Removed: + removed++ + } + } + } + + // Create merge commit + author, err := config.GetAuthor() + if err != nil { + return nil, fmt.Errorf("failed to get author: %w", err) + } + + wsLoader := wsindex.NewLoader(casStore) + mergedFiles, err := wsLoader.ListAll(*mergeResult.MergedIndex) + if err != nil { + return nil, fmt.Errorf("failed to list merged files: %w", err) + } + + mmr, err := history.NewPersistentMMR(casStore, ivaldiDir) + if err != nil { + mmr = &history.PersistentMMR{MMR: history.NewMMR()} + } + defer mmr.Close() + + commitBuilder := commit.NewCommitBuilder(casStore, mmr.MMR) + mergeCommit, err := commitBuilder.CreateCommit( + mergedFiles, + []cas.Hash{targetHash, sourceHash}, + author, + author, + fmt.Sprintf("Fuse %s into %s", source, target), + ) + if err != nil { + return nil, fmt.Errorf("failed to create merge commit: %w", err) + } + + mergeHash := commitBuilder.GetCommitHash(mergeCommit) + var mergeHashArray [32]byte + copy(mergeHashArray[:], mergeHash[:]) + + err = refsManager.UpdateTimeline(target, refs.LocalTimeline, mergeHashArray, [32]byte{}, "") + if err != nil { + return nil, fmt.Errorf("failed to update timeline: %w", err) + } + + sealName := seals.GenerateSealName(mergeHashArray) + refsManager.StoreSealName(sealName, mergeHashArray, fmt.Sprintf("Fuse %s into %s", source, target)) + + // Clean up resolution storage + resStorage := diffmerge.NewResolutionStorage(ivaldiDir) + if res, _ := resStorage.Load(); res != nil { + res.MarkCompleted() + resStorage.SaveHistory(res) + } + resStorage.Delete() + + return &FuseResult{ + Type: FuseMergeSuccess, + Source: source, + Target: target, + SealName: sealName, + Added: added, + Modified: modified, + Removed: removed, + }, nil +} + +// fuseGetWorkspaceIndex extracts a workspace index from a commit +func fuseGetWorkspaceIndex(casStore cas.CAS, commitObj *commit.CommitObject) (wsindex.IndexRef, error) { + commitReader := commit.NewCommitReader(casStore) + tree, err := commitReader.ReadTree(commitObj) + if err != nil { + return wsindex.IndexRef{}, err + } + // TODO: Properly convert tree to workspace index + _ = tree + wsBuilder := wsindex.NewBuilder(casStore) + return wsBuilder.Build(nil) +} + +// saveFuseState persists merge state to disk so it can be aborted or continued +func saveFuseState(ivaldiDir, source, target string, sourceHash, targetHash cas.Hash, conflicts []diffmerge.Conflict) { + mergeHeadPath := filepath.Join(ivaldiDir, "MERGE_HEAD") + os.WriteFile(mergeHeadPath, []byte(sourceHash.String()), 0644) + + mergeInfoPath := filepath.Join(ivaldiDir, "MERGE_INFO") + info := fmt.Sprintf("%s\n%s\n%s\n%s\n", source, target, sourceHash.String(), targetHash.String()) + os.WriteFile(mergeInfoPath, []byte(info), 0644) + + if len(conflicts) > 0 { + conflictListPath := filepath.Join(ivaldiDir, "MERGE_CONFLICTS") + var paths []string + for _, c := range conflicts { + paths = append(paths, c.Path) + } + os.WriteFile(conflictListPath, []byte(strings.Join(paths, "\n")), 0644) + } +} diff --git a/internal/engine/log.go b/internal/engine/log.go new file mode 100644 index 0000000..3352a5a --- /dev/null +++ b/internal/engine/log.go @@ -0,0 +1,204 @@ +package engine + +import ( + "encoding/hex" + "fmt" + "path/filepath" + "time" + + "github.com/javanhut/Ivaldi-vcs/internal/cas" + "github.com/javanhut/Ivaldi-vcs/internal/commit" + "github.com/javanhut/Ivaldi-vcs/internal/refs" +) + +// CommitEntry holds display-ready information about a single commit +type CommitEntry struct { + Hash string // Short hex hash + FullHash [32]byte // Full hash for lookups + SealName string // Seal name (if assigned) + Author string // Author name + Message string // Commit message + Time time.Time // Commit time + Timeline string // Timeline this commit belongs to + Parents []string // Parent short hashes + IsMerge bool // Whether this is a merge commit +} + +// LogOptions controls what commits to retrieve +type LogOptions struct { + Limit int // Max commits to return (0 = unlimited) + AllTimelines bool // Show commits from all timelines +} + +// GetCommitHistory retrieves commit history for the repository +func GetCommitHistory(ivaldiDir string, opts LogOptions) ([]CommitEntry, string, error) { + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return nil, "", fmt.Errorf("failed to initialize refs manager: %w", err) + } + defer refsManager.Close() + + objectsDir := filepath.Join(ivaldiDir, "objects") + casStore, err := cas.NewFileCAS(objectsDir) + if err != nil { + return nil, "", fmt.Errorf("failed to initialize storage: %w", err) + } + + currentTimeline, err := refsManager.GetCurrentTimeline() + if err != nil { + return nil, "", fmt.Errorf("failed to get current timeline: %w", err) + } + + var commits []CommitEntry + + if opts.AllTimelines { + timelines, err := refsManager.ListTimelines(refs.LocalTimeline) + if err != nil { + return nil, currentTimeline, fmt.Errorf("failed to list timelines: %w", err) + } + + for _, timeline := range timelines { + entries, err := walkTimeline(casStore, refsManager, timeline.Name, timeline.Blake3Hash) + if err != nil { + continue + } + commits = append(commits, entries...) + } + + sortEntriesByTime(commits) + } else { + timeline, err := refsManager.GetTimeline(currentTimeline, refs.LocalTimeline) + if err != nil { + return nil, currentTimeline, fmt.Errorf("failed to get timeline info: %w", err) + } + + commits, err = walkTimeline(casStore, refsManager, currentTimeline, timeline.Blake3Hash) + if err != nil { + return nil, currentTimeline, fmt.Errorf("failed to get commits: %w", err) + } + } + + if opts.Limit > 0 && len(commits) > opts.Limit { + commits = commits[:opts.Limit] + } + + return commits, currentTimeline, nil +} + +// walkTimeline retrieves all commits for a timeline starting from HEAD +func walkTimeline(casStore cas.CAS, refsManager *refs.RefsManager, timelineName string, headHash [32]byte) ([]CommitEntry, error) { + if headHash == [32]byte{} { + return nil, nil + } + + var entries []CommitEntry + visited := make(map[cas.Hash]bool) + commitReader := commit.NewCommitReader(casStore) + + var currentHash cas.Hash + copy(currentHash[:], headHash[:]) + + for { + if visited[currentHash] { + break + } + visited[currentHash] = true + + commitObj, err := commitReader.ReadCommit(currentHash) + if err != nil { + break + } + + var hashArray [32]byte + copy(hashArray[:], currentHash[:]) + sealName, _ := refsManager.GetSealNameByHash(hashArray) + + shortHash := hex.EncodeToString(currentHash[:4]) + + var parentHashes []string + for _, p := range commitObj.Parents { + parentHashes = append(parentHashes, hex.EncodeToString(p[:4])) + } + + entries = append(entries, CommitEntry{ + Hash: shortHash, + FullHash: hashArray, + SealName: sealName, + Author: commitObj.Author, + Message: commitObj.Message, + Time: commitObj.CommitTime, + Timeline: timelineName, + Parents: parentHashes, + IsMerge: len(commitObj.Parents) > 1, + }) + + if len(commitObj.Parents) == 0 { + break + } + + currentHash = commitObj.Parents[0] + } + + return entries, nil +} + +// sortEntriesByTime sorts commit entries by time (newest first) +func sortEntriesByTime(entries []CommitEntry) { + for i := 0; i < len(entries); i++ { + for j := i + 1; j < len(entries); j++ { + if entries[i].Time.Before(entries[j].Time) { + entries[i], entries[j] = entries[j], entries[i] + } + } + } +} + +// RelativeTime returns a human-readable relative time string +func RelativeTime(t time.Time) string { + now := time.Now() + diff := now.Sub(t) + + if diff < time.Minute { + return "just now" + } + if diff < time.Hour { + mins := int(diff.Minutes()) + if mins == 1 { + return "1 minute ago" + } + return fmt.Sprintf("%d minutes ago", mins) + } + if diff < 24*time.Hour { + hours := int(diff.Hours()) + if hours == 1 { + return "1 hour ago" + } + return fmt.Sprintf("%d hours ago", hours) + } + if diff < 7*24*time.Hour { + days := int(diff.Hours() / 24) + if days == 1 { + return "1 day ago" + } + return fmt.Sprintf("%d days ago", days) + } + if diff < 30*24*time.Hour { + weeks := int(diff.Hours() / 24 / 7) + if weeks == 1 { + return "1 week ago" + } + return fmt.Sprintf("%d weeks ago", weeks) + } + if diff < 365*24*time.Hour { + months := int(diff.Hours() / 24 / 30) + if months == 1 { + return "1 month ago" + } + return fmt.Sprintf("%d months ago", months) + } + years := int(diff.Hours() / 24 / 365) + if years == 1 { + return "1 year ago" + } + return fmt.Sprintf("%d years ago", years) +} diff --git a/internal/engine/remote.go b/internal/engine/remote.go new file mode 100644 index 0000000..31ac753 --- /dev/null +++ b/internal/engine/remote.go @@ -0,0 +1,326 @@ +package engine + +import ( + "context" + "fmt" + "sort" + "time" + + "github.com/javanhut/Ivaldi-vcs/internal/cas" + "github.com/javanhut/Ivaldi-vcs/internal/github" + "github.com/javanhut/Ivaldi-vcs/internal/refs" +) + +// PortalInfo holds the configured remote repository connection +type PortalInfo struct { + Owner string + Repo string + Timeline string + HasAuth bool +} + +// ScoutResult holds the results of scouting remote timelines +type ScoutResult struct { + RemoteOnly []string + LocalOnly []string + Both []string + Total int +} + +// SyncResult holds the results of a sync operation +type SyncResult struct { + Timeline string + Added []string + Modified []string + Deleted []string + NoChanges bool +} + +// UploadResult holds the results of an upload operation +type UploadResult struct { + Timeline string + Branch string + Owner string + Repo string +} + +// HarvestResult holds the results of a harvest operation +type HarvestResult struct { + Successful []string + Failed []HarvestFailure +} + +// HarvestFailure records a single timeline harvest failure +type HarvestFailure struct { + Name string + Err string +} + +// GetPortalInfo returns the configured remote repository connection +func GetPortalInfo(ivaldiDir string) (*PortalInfo, error) { + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return nil, fmt.Errorf("failed to initialize refs manager: %w", err) + } + defer refsManager.Close() + + owner, repo, err := refsManager.GetGitHubRepository() + if err != nil { + return nil, fmt.Errorf("no repository configured") + } + + currentTimeline, _ := refsManager.GetCurrentTimeline() + + // Check auth availability + syncer, sErr := github.NewRepoSyncerOptionalAuth(ivaldiDir, ".") + hasAuth := sErr == nil && syncer != nil && syncer.IsAuthenticated() + + return &PortalInfo{ + Owner: owner, + Repo: repo, + Timeline: currentTimeline, + HasAuth: hasAuth, + }, nil +} + +// SetPortal configures the remote repository connection +func SetPortal(ivaldiDir, ownerRepo string) error { + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return fmt.Errorf("failed to initialize refs manager: %w", err) + } + defer refsManager.Close() + + // Parse owner/repo + for i, c := range ownerRepo { + if c == '/' { + owner := ownerRepo[:i] + repo := ownerRepo[i+1:] + if owner == "" || repo == "" { + return fmt.Errorf("invalid format. Use: owner/repo") + } + return refsManager.SetGitHubRepository(owner, repo) + } + } + return fmt.Errorf("invalid format. Use: owner/repo") +} + +// RemovePortal removes the remote repository connection +func RemovePortal(ivaldiDir string) error { + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return fmt.Errorf("failed to initialize refs manager: %w", err) + } + defer refsManager.Close() + + return refsManager.RemoveGitHubRepository() +} + +// Scout discovers remote timelines and their sync status +func Scout(ivaldiDir, workDir string) (*ScoutResult, error) { + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return nil, fmt.Errorf("failed to initialize refs manager: %w", err) + } + defer refsManager.Close() + + owner, repo, err := refsManager.GetGitHubRepository() + if err != nil { + return nil, fmt.Errorf("no repository configured. Use portal to add one") + } + + syncer, err := github.NewRepoSyncerOptionalAuth(ivaldiDir, workDir) + if err != nil { + return nil, fmt.Errorf("failed to create syncer: %w", err) + } + + ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) + defer cancel() + + _, err = syncer.GetRemoteTimelines(ctx, owner, repo) + if err != nil { + return nil, fmt.Errorf("failed to get remote timelines: %w", err) + } + + syncStatuses, err := refsManager.GetTimelineSyncStatuses() + if err != nil { + return nil, fmt.Errorf("failed to get sync statuses: %w", err) + } + + sort.Slice(syncStatuses, func(i, j int) bool { + return syncStatuses[i].Name < syncStatuses[j].Name + }) + + result := &ScoutResult{} + for _, status := range syncStatuses { + switch { + case status.Status == "remote-only": + result.RemoteOnly = append(result.RemoteOnly, status.Name) + case status.LocalExists && !status.RemoteExists: + result.LocalOnly = append(result.LocalOnly, status.Name) + case status.LocalExists && status.RemoteExists: + result.Both = append(result.Both, status.Name) + } + } + result.Total = len(syncStatuses) + + return result, nil +} + +// Upload pushes the current timeline to the remote repository +func Upload(ivaldiDir, workDir string, force bool) (*UploadResult, error) { + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return nil, fmt.Errorf("failed to initialize refs manager: %w", err) + } + defer refsManager.Close() + + currentTimeline, err := refsManager.GetCurrentTimeline() + if err != nil { + return nil, fmt.Errorf("failed to get current timeline: %w", err) + } + + owner, repo, err := refsManager.GetGitHubRepository() + if err != nil { + return nil, fmt.Errorf("no repository configured") + } + + timeline, err := refsManager.GetTimeline(currentTimeline, refs.LocalTimeline) + if err != nil { + return nil, fmt.Errorf("failed to get timeline info: %w", err) + } + + if timeline.Blake3Hash == [32]byte{} { + return nil, fmt.Errorf("no commits to push") + } + + var commitHash cas.Hash + copy(commitHash[:], timeline.Blake3Hash[:]) + + syncer, err := github.NewRepoSyncer(ivaldiDir, workDir) + if err != nil { + return nil, fmt.Errorf("failed to create syncer (authentication required): %w", err) + } + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Minute) + defer cancel() + + if err := syncer.PushCommit(ctx, owner, repo, currentTimeline, commitHash, force); err != nil { + return nil, fmt.Errorf("failed to push: %w", err) + } + + return &UploadResult{ + Timeline: currentTimeline, + Branch: currentTimeline, + Owner: owner, + Repo: repo, + }, nil +} + +// SyncTimeline syncs the current or specified timeline with remote +func SyncTimeline(ivaldiDir, workDir, timelineName string) (*SyncResult, error) { + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return nil, fmt.Errorf("failed to initialize refs manager: %w", err) + } + defer refsManager.Close() + + if timelineName == "" { + timelineName, err = refsManager.GetCurrentTimeline() + if err != nil { + return nil, fmt.Errorf("failed to get current timeline: %w", err) + } + } + + owner, repo, err := refsManager.GetGitHubRepository() + if err != nil { + return nil, fmt.Errorf("no repository configured") + } + + timeline, err := refsManager.GetTimeline(timelineName, refs.LocalTimeline) + if err != nil { + return nil, fmt.Errorf("failed to get timeline '%s': %w", timelineName, err) + } + + syncer, err := github.NewRepoSyncer(ivaldiDir, workDir) + if err != nil { + return nil, fmt.Errorf("failed to create syncer: %w", err) + } + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Minute) + defer cancel() + + delta, err := syncer.SyncTimeline(ctx, owner, repo, timelineName, timeline.Blake3Hash) + if err != nil { + return nil, fmt.Errorf("failed to sync: %w", err) + } + + return &SyncResult{ + Timeline: timelineName, + Added: delta.AddedFiles, + Modified: delta.ModifiedFiles, + Deleted: delta.DeletedFiles, + NoChanges: delta.NoChanges, + }, nil +} + +// HarvestTimelines downloads remote timelines into the local repository +func HarvestTimelines(ivaldiDir, workDir string, names []string) (*HarvestResult, error) { + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return nil, fmt.Errorf("failed to initialize refs manager: %w", err) + } + defer refsManager.Close() + + owner, repo, err := refsManager.GetGitHubRepository() + if err != nil { + return nil, fmt.Errorf("no repository configured") + } + + syncer, err := github.NewRepoSyncerOptionalAuth(ivaldiDir, workDir) + if err != nil { + return nil, fmt.Errorf("failed to create syncer: %w", err) + } + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Minute) + defer cancel() + + // Refresh remote timelines + _, err = syncer.GetRemoteTimelines(ctx, owner, repo) + if err != nil { + return nil, fmt.Errorf("failed to discover remote timelines: %w", err) + } + + // If no names specified, harvest all remote-only + if len(names) == 0 { + syncStatuses, err := refsManager.GetTimelineSyncStatuses() + if err != nil { + return nil, fmt.Errorf("failed to get sync statuses: %w", err) + } + for _, status := range syncStatuses { + if status.Status == "remote-only" { + names = append(names, status.Name) + } + } + sort.Strings(names) + } + + if len(names) == 0 { + return &HarvestResult{}, nil + } + + result := &HarvestResult{} + for _, name := range names { + err := syncer.FetchTimeline(ctx, owner, repo, name) + if err != nil { + result.Failed = append(result.Failed, HarvestFailure{ + Name: name, + Err: err.Error(), + }) + } else { + result.Successful = append(result.Successful, name) + } + } + + return result, nil +} diff --git a/internal/engine/seal.go b/internal/engine/seal.go new file mode 100644 index 0000000..1800faa --- /dev/null +++ b/internal/engine/seal.go @@ -0,0 +1,147 @@ +package engine + +import ( + "fmt" + "log" + "os" + "path/filepath" + "strings" + + "github.com/javanhut/Ivaldi-vcs/internal/cas" + "github.com/javanhut/Ivaldi-vcs/internal/commit" + "github.com/javanhut/Ivaldi-vcs/internal/config" + "github.com/javanhut/Ivaldi-vcs/internal/history" + "github.com/javanhut/Ivaldi-vcs/internal/refs" + "github.com/javanhut/Ivaldi-vcs/internal/seals" + "github.com/javanhut/Ivaldi-vcs/internal/workspace" + "github.com/javanhut/Ivaldi-vcs/internal/wsindex" +) + +// SealResult holds the result of a seal (commit) operation +type SealResult struct { + SealName string + Hash string // short hex hash + Timeline string + Message string + Files int +} + +// CreateSeal creates a sealed commit from staged files +func CreateSeal(ivaldiDir, workDir, message string) (*SealResult, error) { + // Check for staged files + stageFile := filepath.Join(ivaldiDir, "stage", "files") + if _, err := os.Stat(stageFile); os.IsNotExist(err) { + return nil, fmt.Errorf("no files staged for commit. Use gather to stage files first") + } + + stageData, err := os.ReadFile(stageFile) + if err != nil { + return nil, fmt.Errorf("failed to read staged files: %w", err) + } + + stagedFiles := strings.Fields(string(stageData)) + if len(stagedFiles) == 0 { + return nil, fmt.Errorf("no files staged for commit") + } + + // Initialize refs manager + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return nil, fmt.Errorf("failed to initialize refs manager: %w", err) + } + defer refsManager.Close() + + // Get current timeline + currentTimeline, err := refsManager.GetCurrentTimeline() + if err != nil { + return nil, fmt.Errorf("failed to get current timeline: %w", err) + } + + // Initialize CAS + objectsDir := filepath.Join(ivaldiDir, "objects") + casStore, err := cas.NewFileCAS(objectsDir) + if err != nil { + return nil, fmt.Errorf("failed to initialize storage: %w", err) + } + + mmr := history.NewMMR() + commitBuilder := commit.NewCommitBuilder(casStore, mmr) + + // Scan staged files + materializer := workspace.NewMaterializer(casStore, ivaldiDir, workDir) + wsIndex, err := materializer.ScanSpecificFiles(stagedFiles) + if err != nil { + return nil, fmt.Errorf("failed to scan staged files: %w", err) + } + + wsLoader := wsindex.NewLoader(casStore) + workspaceFiles, err := wsLoader.ListAll(wsIndex) + if err != nil { + return nil, fmt.Errorf("failed to list workspace files: %w", err) + } + + // Get author + author, err := config.GetAuthor() + if err != nil { + return nil, fmt.Errorf("author not configured: %w", err) + } + + // Get parent commit + var parents []cas.Hash + timeline, err := refsManager.GetTimeline(currentTimeline, refs.LocalTimeline) + if err == nil && timeline.Blake3Hash != [32]byte{} { + var parentHash cas.Hash + copy(parentHash[:], timeline.Blake3Hash[:]) + parents = append(parents, parentHash) + } + + // Create commit + commitObj, err := commitBuilder.CreateCommit( + workspaceFiles, + parents, + author, + author, + message, + ) + if err != nil { + return nil, fmt.Errorf("failed to create commit: %w", err) + } + + commitHash := commitBuilder.GetCommitHash(commitObj) + + var commitHashArray [32]byte + copy(commitHashArray[:], commitHash[:]) + + // Generate and store seal name + sealName := seals.GenerateSealName(commitHashArray) + err = refsManager.StoreSealName(sealName, commitHashArray, message) + if err != nil { + log.Printf("Warning: Failed to store seal name: %v", err) + } + + // Update timeline reference + err = refsManager.CreateTimeline( + currentTimeline, + refs.LocalTimeline, + commitHashArray, + [32]byte{}, + "", + fmt.Sprintf("Commit: %s", message), + ) + if err != nil { + log.Printf("Note: Timeline update: %v", err) + } + + // Clean up staging area + if err := os.Remove(stageFile); err != nil { + log.Printf("Warning: Failed to clean up staging area: %v", err) + } + + return &SealResult{ + SealName: sealName, + Hash: shortHash(commitHashArray), + Timeline: currentTimeline, + Message: message, + Files: len(stagedFiles), + }, nil +} diff --git a/internal/engine/staging.go b/internal/engine/staging.go new file mode 100644 index 0000000..72e8c0e --- /dev/null +++ b/internal/engine/staging.go @@ -0,0 +1,117 @@ +package engine + +import ( + "fmt" + "os" + "path/filepath" + "strings" +) + +// GatherFiles stages the specified files for the next seal +func GatherFiles(workDir, ivaldiDir string, files []string) error { + stageDir := filepath.Join(ivaldiDir, "stage") + if err := os.MkdirAll(stageDir, 0755); err != nil { + return fmt.Errorf("failed to create staging directory: %w", err) + } + + stageFile := filepath.Join(stageDir, "files") + + // Read existing staged files + existingStaged := make(map[string]bool) + if data, err := os.ReadFile(stageFile); err == nil { + lines := strings.Split(string(data), "\n") + for _, line := range lines { + line = strings.TrimSpace(line) + if line != "" { + existingStaged[line] = true + } + } + } + + // Add new files + for _, file := range files { + existingStaged[file] = true + } + + // Write all staged files + f, err := os.Create(stageFile) + if err != nil { + return fmt.Errorf("failed to create stage file: %w", err) + } + defer f.Close() + + for file := range existingStaged { + if _, err := f.WriteString(file + "\n"); err != nil { + return fmt.Errorf("failed to write to stage file: %w", err) + } + } + + return nil +} + +// UngatherFiles removes the specified files from the staging area +func UngatherFiles(ivaldiDir string, files []string) error { + stageFile := filepath.Join(ivaldiDir, "stage", "files") + if _, err := os.Stat(stageFile); os.IsNotExist(err) { + return nil // Nothing staged + } + + data, err := os.ReadFile(stageFile) + if err != nil { + return fmt.Errorf("failed to read stage file: %w", err) + } + + // Build set of files to remove + removeSet := make(map[string]bool, len(files)) + for _, f := range files { + removeSet[f] = true + } + + // Filter out the files to ungather + lines := strings.Split(string(data), "\n") + var remaining []string + for _, line := range lines { + line = strings.TrimSpace(line) + if line != "" && !removeSet[line] { + remaining = append(remaining, line) + } + } + + // Write back + f, err := os.Create(stageFile) + if err != nil { + return fmt.Errorf("failed to create stage file: %w", err) + } + defer f.Close() + + for _, file := range remaining { + if _, err := f.WriteString(file + "\n"); err != nil { + return fmt.Errorf("failed to write to stage file: %w", err) + } + } + + return nil +} + +// GatherAllUnstaged stages all files that are modified or untracked +func GatherAllUnstaged(workDir, ivaldiDir string, files []FileStatusInfo) error { + var toGather []string + for _, f := range files { + if f.Status == StatusModified || f.Status == StatusUntracked || f.Status == StatusDeleted { + toGather = append(toGather, f.Path) + } + } + if len(toGather) == 0 { + return nil + } + return GatherFiles(workDir, ivaldiDir, toGather) +} + +// UngatherAll removes all files from the staging area +func UngatherAll(ivaldiDir string) error { + stageFile := filepath.Join(ivaldiDir, "stage", "files") + if _, err := os.Stat(stageFile); os.IsNotExist(err) { + return nil + } + return os.Remove(stageFile) +} diff --git a/internal/engine/status.go b/internal/engine/status.go new file mode 100644 index 0000000..a6f63ed --- /dev/null +++ b/internal/engine/status.go @@ -0,0 +1,341 @@ +package engine + +import ( + "log" + "os" + "path/filepath" + "strings" + + "github.com/javanhut/Ivaldi-vcs/internal/cas" + "github.com/javanhut/Ivaldi-vcs/internal/commit" + "github.com/javanhut/Ivaldi-vcs/internal/ignore" + "github.com/javanhut/Ivaldi-vcs/internal/objects" + "github.com/javanhut/Ivaldi-vcs/internal/refs" +) + +// FileStatus represents the status of a file +type FileStatus int + +const ( + StatusUnknown FileStatus = iota + StatusUntracked // File exists but not in any previous commit + StatusAdded // File is staged for commit (new file) + StatusModified // File is modified from last commit + StatusDeleted // File was deleted from working directory + StatusStaged // File is staged for commit (modified) + StatusIgnored // File is ignored by .ivaldiignore +) + +// FileStatusInfo holds information about a file's status +type FileStatusInfo struct { + Path string + Status FileStatus + StagedStatus FileStatus // Status in staging area vs HEAD + WorkStatus FileStatus // Status in working directory vs staging area +} + +// StatusResult holds the complete status information for the repository +type StatusResult struct { + Timeline string + SealName string + FileCount int // files tracked in last seal + Files []FileStatusInfo + Staged []FileStatusInfo + Modified []FileStatusInfo + Deleted []FileStatusInfo + Untracked []FileStatusInfo + Ignored []FileStatusInfo +} + +// GetFileStatuses computes file statuses for the working directory +func GetFileStatuses(workDir, ivaldiDir string) (*StatusResult, error) { + // Initialize refs manager + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return nil, err + } + defer refsManager.Close() + + // Get current timeline + currentTimeline, err := refsManager.GetCurrentTimeline() + if err != nil { + return nil, err + } + + // Load ignore patterns + patternCache, err := ignore.LoadPatternCache(workDir) + if err != nil { + log.Printf("Warning: Failed to load ignore patterns: %v", err) + } + + // Get known files + knownFiles, err := GetKnownFiles(ivaldiDir, refsManager) + if err != nil { + log.Printf("Warning: Failed to get known files: %v", err) + knownFiles = make(map[string][32]byte) + } + + // Get file statuses + fileStatuses, err := computeFileStatuses(workDir, ivaldiDir, patternCache, knownFiles) + if err != nil { + return nil, err + } + + // Get seal name + sealName := getSealName(refsManager, currentTimeline) + + result := &StatusResult{ + Timeline: currentTimeline, + SealName: sealName, + FileCount: len(knownFiles), + Files: fileStatuses, + } + + // Group files by status + for _, f := range fileStatuses { + switch f.Status { + case StatusStaged, StatusAdded: + result.Staged = append(result.Staged, f) + case StatusModified: + result.Modified = append(result.Modified, f) + case StatusDeleted: + result.Deleted = append(result.Deleted, f) + case StatusUntracked: + result.Untracked = append(result.Untracked, f) + case StatusIgnored: + result.Ignored = append(result.Ignored, f) + } + } + + return result, nil +} + +// getSealName returns the seal name for the current timeline head +func getSealName(refsManager *refs.RefsManager, currentTimeline string) string { + timeline, err := refsManager.GetTimeline(currentTimeline, refs.LocalTimeline) + if err != nil { + return "" + } + if timeline.Blake3Hash == [32]byte{} { + return "" + } + sealName, err := refsManager.GetSealNameByHash(timeline.Blake3Hash) + if err != nil || sealName == "" { + return "" + } + return sealName +} + +// computeFileStatuses analyzes the working directory and returns file status information +func computeFileStatuses(workDir, ivaldiDir string, patternCache *ignore.PatternCache, knownFiles map[string][32]byte) ([]FileStatusInfo, error) { + var fileStatuses []FileStatusInfo + + // Get staged files + stagedFiles, err := GetStagedFiles(ivaldiDir) + if err != nil { + log.Printf("Warning: Failed to get staged files: %v", err) + } + + // Build staged map for O(1) lookups + stagedMap := make(map[string]bool, len(stagedFiles)) + for _, f := range stagedFiles { + stagedMap[f] = true + } + + // Walk the working directory + err = filepath.Walk(workDir, func(path string, info os.FileInfo, err error) error { + if err != nil { + if os.IsNotExist(err) { + return nil + } + return err + } + + relPath, err := filepath.Rel(workDir, path) + if err != nil { + return err + } + + if info.IsDir() { + if relPath == ".ivaldi" || strings.HasPrefix(relPath, ".ivaldi"+string(filepath.Separator)) { + return filepath.SkipDir + } + if patternCache != nil && patternCache.IsDirIgnored(relPath) { + return filepath.SkipDir + } + return nil + } + + if strings.HasPrefix(relPath, ".ivaldi") { + return nil + } + + if patternCache != nil && patternCache.IsIgnored(relPath) { + fileStatuses = append(fileStatuses, FileStatusInfo{ + Path: relPath, + Status: StatusIgnored, + }) + return nil + } + + isStaged := stagedMap[relPath] + knownHash, wasKnown := knownFiles[relPath] + + if isStaged { + if wasKnown { + fileStatuses = append(fileStatuses, FileStatusInfo{ + Path: relPath, + Status: StatusStaged, + }) + } else { + fileStatuses = append(fileStatuses, FileStatusInfo{ + Path: relPath, + Status: StatusAdded, + }) + } + } else { + if wasKnown { + currentHash, err := computeFileHash(path) + if err != nil { + log.Printf("Warning: Failed to compute hash for %s: %v", relPath, err) + return nil + } + if currentHash != knownHash { + fileStatuses = append(fileStatuses, FileStatusInfo{ + Path: relPath, + Status: StatusModified, + }) + } + } else { + fileStatuses = append(fileStatuses, FileStatusInfo{ + Path: relPath, + Status: StatusUntracked, + }) + } + } + + return nil + }) + + if err != nil { + return nil, err + } + + // Check for deleted files + for filePath := range knownFiles { + fullPath := filepath.Join(workDir, filePath) + if _, err := os.Stat(fullPath); os.IsNotExist(err) { + isStaged := stagedMap[filePath] + if isStaged { + fileStatuses = append(fileStatuses, FileStatusInfo{ + Path: filePath, + Status: StatusStaged, + }) + } else { + fileStatuses = append(fileStatuses, FileStatusInfo{ + Path: filePath, + Status: StatusDeleted, + }) + } + } + } + + return fileStatuses, nil +} + +// GetStagedFiles returns a list of files that are currently staged +func GetStagedFiles(ivaldiDir string) ([]string, error) { + stageFile := filepath.Join(ivaldiDir, "stage", "files") + if _, err := os.Stat(stageFile); os.IsNotExist(err) { + return []string{}, nil + } + + data, err := os.ReadFile(stageFile) + if err != nil { + return nil, err + } + + lines := strings.Split(string(data), "\n") + var files []string + for _, line := range lines { + line = strings.TrimSpace(line) + if line != "" { + files = append(files, line) + } + } + return files, nil +} + +// GetKnownFiles reads files from the last commit/seal for proper status tracking. +func GetKnownFiles(ivaldiDir string, refsManager *refs.RefsManager) (map[string][32]byte, error) { + knownFiles := make(map[string][32]byte) + + rm := refsManager + if rm == nil { + var err error + rm, err = refs.NewRefsManager(ivaldiDir) + if err != nil { + return knownFiles, nil + } + defer rm.Close() + } + + currentTimeline, err := rm.GetCurrentTimeline() + if err != nil { + return knownFiles, nil + } + + timeline, err := rm.GetTimeline(currentTimeline, refs.LocalTimeline) + if err != nil { + return knownFiles, nil + } + + if timeline.Blake3Hash == [32]byte{} { + return knownFiles, nil + } + + objectsDir := filepath.Join(ivaldiDir, "objects") + casStore, err := cas.NewFileCAS(objectsDir) + if err != nil { + return knownFiles, nil + } + + var commitHash cas.Hash + copy(commitHash[:], timeline.Blake3Hash[:]) + + commitReader := commit.NewCommitReader(casStore) + commitObj, err := commitReader.ReadCommit(commitHash) + if err != nil { + return knownFiles, nil + } + + tree, err := commitReader.ReadTree(commitObj) + if err != nil { + return knownFiles, nil + } + + filePaths, err := commitReader.ListFiles(tree) + if err != nil { + return knownFiles, nil + } + + for _, filePath := range filePaths { + content, err := commitReader.GetFileContent(tree, filePath) + if err != nil { + continue + } + hash := objects.HashBlobBLAKE3(content) + knownFiles[filePath] = hash + } + + return knownFiles, nil +} + +// computeFileHash computes the BLAKE3 hash of a file +func computeFileHash(filePath string) ([32]byte, error) { + content, err := os.ReadFile(filePath) + if err != nil { + return [32]byte{}, err + } + return objects.HashBlobBLAKE3(content), nil +} diff --git a/internal/engine/timeline.go b/internal/engine/timeline.go new file mode 100644 index 0000000..11f9855 --- /dev/null +++ b/internal/engine/timeline.go @@ -0,0 +1,226 @@ +package engine + +import ( + "encoding/hex" + "fmt" + "os" + "path/filepath" + + "github.com/javanhut/Ivaldi-vcs/internal/butterfly" + "github.com/javanhut/Ivaldi-vcs/internal/cas" + "github.com/javanhut/Ivaldi-vcs/internal/history" + "github.com/javanhut/Ivaldi-vcs/internal/ignore" + "github.com/javanhut/Ivaldi-vcs/internal/refs" + "github.com/javanhut/Ivaldi-vcs/internal/workspace" +) + +// TimelineInfo holds display information about a timeline +type TimelineInfo struct { + Name string + IsCurrent bool + IsButterfly bool + Hash string // Short hex hash + Description string + Type refs.TimelineType +} + +// TimelineListResult holds all timelines grouped by type +type TimelineListResult struct { + Current string + Local []TimelineInfo + Remote []TimelineInfo + Tags []TimelineInfo +} + +// ListTimelines returns all timelines grouped by type +func ListTimelines(ivaldiDir string) (*TimelineListResult, error) { + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return nil, fmt.Errorf("failed to initialize refs manager: %w", err) + } + defer refsManager.Close() + + currentTimeline, _ := refsManager.GetCurrentTimeline() + + // Initialize butterfly manager + objectsDir := filepath.Join(ivaldiDir, "objects") + casStore, _ := cas.NewFileCAS(objectsDir) + var bfManager *butterfly.Manager + if casStore != nil { + mmr, err := history.NewPersistentMMR(casStore, ivaldiDir) + if err == nil { + defer mmr.Close() + bfManager, err = butterfly.NewManager(ivaldiDir, casStore, refsManager, mmr) + if err == nil && bfManager != nil { + defer bfManager.Close() + } + } + } + + result := &TimelineListResult{ + Current: currentTimeline, + } + + // Local timelines + localTimelines, err := refsManager.ListTimelines(refs.LocalTimeline) + if err != nil { + return nil, fmt.Errorf("failed to list local timelines: %w", err) + } + for _, tl := range localTimelines { + info := TimelineInfo{ + Name: tl.Name, + IsCurrent: tl.Name == currentTimeline, + Description: tl.Description, + Type: refs.LocalTimeline, + Hash: shortHash(tl.Blake3Hash), + } + if bfManager != nil { + info.IsButterfly = bfManager.IsButterfly(tl.Name) + } + result.Local = append(result.Local, info) + } + + // Remote timelines + remoteTimelines, _ := refsManager.ListTimelines(refs.RemoteTimeline) + for _, tl := range remoteTimelines { + result.Remote = append(result.Remote, TimelineInfo{ + Name: tl.Name, + Description: tl.Description, + Type: refs.RemoteTimeline, + Hash: shortHash(tl.Blake3Hash), + }) + } + + // Tags + tags, _ := refsManager.ListTimelines(refs.TagTimeline) + for _, tl := range tags { + result.Tags = append(result.Tags, TimelineInfo{ + Name: tl.Name, + Description: tl.Description, + Type: refs.TagTimeline, + Hash: shortHash(tl.Blake3Hash), + }) + } + + return result, nil +} + +// SwitchTimeline switches to a different timeline with auto-shelving +func SwitchTimeline(ivaldiDir, workDir, name string) error { + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return fmt.Errorf("failed to initialize refs manager: %w", err) + } + defer refsManager.Close() + + // Check timeline exists + _, err = refsManager.GetTimeline(name, refs.LocalTimeline) + if err != nil { + return fmt.Errorf("timeline '%s' does not exist: %w", name, err) + } + + // Check if already on this timeline + currentTimeline, err := refsManager.GetCurrentTimeline() + if err == nil && currentTimeline == name { + return fmt.Errorf("already on timeline '%s'", name) + } + + objectsDir := filepath.Join(ivaldiDir, "objects") + casStore, err := cas.NewFileCAS(objectsDir) + if err != nil { + return fmt.Errorf("failed to initialize storage: %w", err) + } + + materializer := workspace.NewMaterializer(casStore, ivaldiDir, workDir) + ignoreCache, _ := ignore.LoadPatternCache(workDir) + materializer.SetIgnorePatterns(ignoreCache) + + err = materializer.MaterializeTimelineWithAutoShelf(name, true) + if err != nil { + return fmt.Errorf("failed to switch to timeline '%s': %w", name, err) + } + + return nil +} + +// RemoveTimeline removes a timeline +func RemoveTimeline(ivaldiDir, name string) error { + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return fmt.Errorf("failed to initialize refs manager: %w", err) + } + defer refsManager.Close() + + // Check it exists + _, err = refsManager.GetTimeline(name, refs.LocalTimeline) + if err != nil { + return fmt.Errorf("timeline '%s' does not exist: %w", name, err) + } + + // Prevent removing current timeline + currentTimeline, err := refsManager.GetCurrentTimeline() + if err == nil && currentTimeline == name { + return fmt.Errorf("cannot remove current timeline '%s'. Switch to another timeline first", name) + } + + refPath := fmt.Sprintf("%s/refs/heads/%s", ivaldiDir, name) + if err := os.Remove(refPath); err != nil { + return fmt.Errorf("failed to remove timeline file: %w", err) + } + + return nil +} + +// RenameTimeline renames a timeline +func RenameTimeline(ivaldiDir, oldName, newName string, force bool) error { + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return fmt.Errorf("failed to initialize refs manager: %w", err) + } + defer refsManager.Close() + + return refsManager.RenameTimeline(oldName, newName, refs.LocalTimeline, force) +} + +// CreateTimeline creates a new local timeline branched from the current one +func CreateTimeline(ivaldiDir, name string) error { + refsManager, err := refs.NewRefsManager(ivaldiDir) + if err != nil { + return fmt.Errorf("failed to initialize refs manager: %w", err) + } + defer refsManager.Close() + + // Get current timeline to branch from + currentTimeline, _ := refsManager.GetCurrentTimeline() + var blake3Hash, sha256Hash [32]byte + + if currentTimeline != "" { + timeline, err := refsManager.GetTimeline(currentTimeline, refs.LocalTimeline) + if err == nil && timeline.Blake3Hash != [32]byte{} { + blake3Hash = timeline.Blake3Hash + sha256Hash = timeline.SHA256Hash + } + } + + err = refsManager.CreateTimeline( + name, + refs.LocalTimeline, + blake3Hash, + sha256Hash, + "", + fmt.Sprintf("Created timeline '%s'", name), + ) + if err != nil { + return fmt.Errorf("failed to create timeline: %w", err) + } + + return nil +} + +// shortHash returns the first 8 hex characters of a hash, or empty if zero +func shortHash(hash [32]byte) string { + if hash == [32]byte{} { + return "" + } + return hex.EncodeToString(hash[:])[:8] +} diff --git a/internal/tui/app.go b/internal/tui/app.go new file mode 100644 index 0000000..3aa90c9 --- /dev/null +++ b/internal/tui/app.go @@ -0,0 +1,248 @@ +package tui + +import ( + "fmt" + "strings" + + "github.com/charmbracelet/bubbles/key" + tea "github.com/charmbracelet/bubbletea" + "github.com/javanhut/Ivaldi-vcs/internal/tui/components" + "github.com/javanhut/Ivaldi-vcs/internal/tui/style" + "github.com/javanhut/Ivaldi-vcs/internal/tui/views" +) + +// Model is the root TUI model +type Model struct { + workDir string + ivaldiDir string + + tabs components.TabBar + statusBar components.StatusBar + keys KeyMap + theme style.Theme + + activeTab style.TabID + views map[style.TabID]style.View + showHelp bool + helpView views.HelpModel + + width int + height int + + err error +} + +// New creates a new root TUI model +func New(workDir, ivaldiDir string) Model { + theme := style.DefaultTheme() + keys := DefaultKeyMap() + + tabs := style.AllTabs() + tabLabels := make([]string, len(tabs)) + for i, t := range tabs { + tabLabels[i] = t.Label + } + + m := Model{ + workDir: workDir, + ivaldiDir: ivaldiDir, + tabs: components.NewTabBar(tabLabels), + statusBar: components.NewStatusBar(), + keys: keys, + theme: theme, + activeTab: style.TabStatus, + views: make(map[style.TabID]style.View), + helpView: views.NewHelpModel(), + } + + // Initialize views + m.views[style.TabStatus] = views.NewStatusModel(workDir, ivaldiDir) + m.views[style.TabLog] = views.NewLogModel(ivaldiDir) + m.views[style.TabDiff] = views.NewDiffModel(workDir, ivaldiDir) + m.views[style.TabTimelines] = views.NewTimelineModel(workDir, ivaldiDir) + m.views[style.TabRemote] = views.NewRemoteModel(workDir, ivaldiDir) + m.views[style.TabFuse] = views.NewFuseModel(workDir, ivaldiDir) + + return m +} + +// Init initializes the TUI +func (m Model) Init() tea.Cmd { + if v, ok := m.views[m.activeTab]; ok { + return v.Init() + } + return nil +} + +// Update handles messages +func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { + var cmds []tea.Cmd + + switch msg := msg.(type) { + case tea.WindowSizeMsg: + m.width = msg.Width + m.height = msg.Height + m.statusBar.Width = msg.Width + + contentHeight := msg.Height - 4 + + for id, v := range m.views { + sizeMsg := tea.WindowSizeMsg{ + Width: msg.Width, + Height: contentHeight, + } + updated, cmd := v.Update(sizeMsg) + m.views[id] = updated.(style.View) + if cmd != nil { + cmds = append(cmds, cmd) + } + } + + return m, tea.Batch(cmds...) + + case tea.KeyMsg: + if key.Matches(msg, m.keys.Help) { + m.showHelp = !m.showHelp + return m, nil + } + + if m.showHelp { + m.showHelp = false + return m, nil + } + + allTabs := style.AllTabs() + switch { + case key.Matches(msg, m.keys.Quit): + return m, tea.Quit + + case key.Matches(msg, m.keys.Tab): + m.activeTab = style.TabID((int(m.activeTab) + 1) % len(allTabs)) + m.tabs.SetActive(int(m.activeTab)) + return m, m.initActiveView() + + case key.Matches(msg, m.keys.ShiftTab): + n := len(allTabs) + m.activeTab = style.TabID((int(m.activeTab) - 1 + n) % n) + m.tabs.SetActive(int(m.activeTab)) + return m, m.initActiveView() + + case key.Matches(msg, m.keys.Tab1): + return m, m.switchTab(style.TabStatus) + case key.Matches(msg, m.keys.Tab2): + return m, m.switchTab(style.TabLog) + case key.Matches(msg, m.keys.Tab3): + return m, m.switchTab(style.TabDiff) + case key.Matches(msg, m.keys.Tab4): + return m, m.switchTab(style.TabTimelines) + case key.Matches(msg, m.keys.Tab5): + return m, m.switchTab(style.TabRemote) + case key.Matches(msg, m.keys.Tab6): + return m, m.switchTab(style.TabFuse) + } + + case style.StatusUpdateMsg: + m.statusBar.Timeline = msg.Timeline + m.statusBar.SealName = msg.SealName + m.statusBar.Staged = msg.Staged + m.statusBar.Modified = msg.Modified + m.statusBar.Untracked = msg.Untracked + return m, nil + + case style.ErrMsg: + m.err = msg.Err + return m, nil + } + + // Forward to active view + if v, ok := m.views[m.activeTab]; ok { + updated, cmd := v.Update(msg) + m.views[m.activeTab] = updated.(style.View) + if cmd != nil { + cmds = append(cmds, cmd) + } + } + + return m, tea.Batch(cmds...) +} + +// View renders the TUI +func (m Model) View() string { + if m.width == 0 { + return "Loading..." + } + + var b strings.Builder + + // Tab bar + b.WriteString(m.tabs.View(m.theme)) + b.WriteString("\n") + + // Content area + contentHeight := m.height - 4 + if contentHeight < 1 { + contentHeight = 1 + } + + allTabs := style.AllTabs() + var content string + if m.showHelp { + content = m.helpView.View(m.width, contentHeight, m.theme) + } else if v, ok := m.views[m.activeTab]; ok { + content = v.View() + } else { + content = m.theme.Dim.Render(fmt.Sprintf(" %s view — coming soon", allTabs[m.activeTab].Label)) + } + + // Pad content to fill the available height + contentLines := strings.Count(content, "\n") + 1 + if contentLines < contentHeight { + content += strings.Repeat("\n", contentHeight-contentLines) + } + + b.WriteString(content) + + // Error display + if m.err != nil { + b.WriteString("\n") + b.WriteString(m.theme.Error.Render("Error: " + m.err.Error())) + } + + // Status bar + b.WriteString("\n") + b.WriteString(m.statusBar.View(m.theme)) + + return b.String() +} + +// switchTab switches to the given tab +func (m *Model) switchTab(tab style.TabID) tea.Cmd { + m.activeTab = tab + m.tabs.SetActive(int(tab)) + return m.initActiveView() +} + +// initActiveView initializes the active view if it exists +func (m *Model) initActiveView() tea.Cmd { + if v, ok := m.views[m.activeTab]; ok { + if m.width > 0 && m.height > 0 { + contentHeight := m.height - 4 + updated, cmd := v.Update(tea.WindowSizeMsg{ + Width: m.width, + Height: contentHeight, + }) + m.views[m.activeTab] = updated.(style.View) + return tea.Batch(cmd, v.Init()) + } + return v.Init() + } + return nil +} + +// Run starts the TUI application +func Run(workDir, ivaldiDir string) error { + m := New(workDir, ivaldiDir) + p := tea.NewProgram(m, tea.WithAltScreen()) + _, err := p.Run() + return err +} diff --git a/internal/tui/components/dialog.go b/internal/tui/components/dialog.go new file mode 100644 index 0000000..03b2303 --- /dev/null +++ b/internal/tui/components/dialog.go @@ -0,0 +1,120 @@ +package components + +import ( + "strings" + + "github.com/charmbracelet/bubbles/textinput" + tea "github.com/charmbracelet/bubbletea" + "github.com/javanhut/Ivaldi-vcs/internal/tui/style" +) + +// DialogType indicates which dialog is open +type DialogType int + +const ( + DialogNone DialogType = iota + DialogSeal + DialogPortal +) + +// DialogSubmitMsg is sent when the user submits the dialog +type DialogSubmitMsg struct { + Type DialogType + Value string +} + +// DialogCancelMsg is sent when the user cancels the dialog +type DialogCancelMsg struct { + Type DialogType +} + +// Dialog is a text input overlay for single-line prompts +type Dialog struct { + Type DialogType + Title string + textInput textinput.Model + active bool +} + +// NewDialog creates a new dialog component +func NewDialog() Dialog { + ti := textinput.New() + ti.CharLimit = 256 + ti.Width = 50 + return Dialog{ + textInput: ti, + } +} + +// Open opens the dialog with a title and placeholder +func (d *Dialog) Open(dialogType DialogType, title, placeholder string) tea.Cmd { + d.Type = dialogType + d.Title = title + d.active = true + d.textInput.Placeholder = placeholder + d.textInput.SetValue("") + d.textInput.Focus() + return textinput.Blink +} + +// Close closes the dialog +func (d *Dialog) Close() { + d.active = false + d.textInput.Blur() +} + +// IsActive returns whether the dialog is open +func (d *Dialog) IsActive() bool { + return d.active +} + +// Update handles key events for the dialog. Returns a command and whether the +// event was consumed by the dialog. +func (d *Dialog) Update(msg tea.KeyMsg) (tea.Cmd, bool) { + if !d.active { + return nil, false + } + + switch msg.String() { + case "esc": + dialogType := d.Type + d.Close() + return func() tea.Msg { + return DialogCancelMsg{Type: dialogType} + }, true + + case "enter": + value := strings.TrimSpace(d.textInput.Value()) + dialogType := d.Type + d.Close() + if value == "" { + return func() tea.Msg { + return DialogCancelMsg{Type: dialogType} + }, true + } + return func() tea.Msg { + return DialogSubmitMsg{Type: dialogType, Value: value} + }, true + } + + var cmd tea.Cmd + d.textInput, cmd = d.textInput.Update(msg) + return cmd, true +} + +// View renders the dialog +func (d Dialog) View(theme style.Theme) string { + if !d.active { + return "" + } + + var b strings.Builder + b.WriteString("\n") + b.WriteString(" ") + b.WriteString(theme.Title.Render(d.Title)) + b.WriteString("\n ") + b.WriteString(d.textInput.View()) + b.WriteString("\n") + b.WriteString(theme.Dim.Render(" enter: confirm esc: cancel")) + return b.String() +} diff --git a/internal/tui/components/diffview.go b/internal/tui/components/diffview.go new file mode 100644 index 0000000..bf8e724 --- /dev/null +++ b/internal/tui/components/diffview.go @@ -0,0 +1,240 @@ +package components + +import ( + "fmt" + "strings" + + "github.com/javanhut/Ivaldi-vcs/internal/engine" + "github.com/javanhut/Ivaldi-vcs/internal/tui/style" +) + +// DiffView renders a scrollable diff with syntax highlighting +type DiffView struct { + lines []diffLine + offset int + width int + height int +} + +// diffLine is a pre-rendered diff line with a type for coloring +type diffLine struct { + lineType engine.DiffLineType + text string + isHeader bool + isSep bool +} + +// NewDiffView creates a new diff view +func NewDiffView() DiffView { + return DiffView{ + height: 20, + } +} + +// SetSize updates the display size +func (d *DiffView) SetSize(width, height int) { + d.width = width + d.height = height + d.fixScroll() +} + +// SetContent builds the rendered lines from file diffs +func (d *DiffView) SetContent(files []engine.FileDiff, theme style.Theme) { + d.lines = nil + d.offset = 0 + + for i, file := range files { + // File header + var header string + switch file.Type { + case engine.DiffAdded: + header = fmt.Sprintf("+++ %s (new file)", file.Path) + case engine.DiffRemoved: + header = fmt.Sprintf("--- %s (deleted)", file.Path) + case engine.DiffModified: + header = fmt.Sprintf("--- a/%s\n+++ b/%s", file.Path, file.Path) + } + for _, h := range strings.Split(header, "\n") { + d.lines = append(d.lines, diffLine{isHeader: true, text: h}) + } + + if file.IsBinary { + d.lines = append(d.lines, diffLine{text: " Binary file differs"}) + } else { + // Hunks + for _, hunk := range file.Hunks { + hunkHeader := fmt.Sprintf("@@ -%d,%d +%d,%d @@", + hunk.OldStart, hunk.OldCount, + hunk.NewStart, hunk.NewCount) + d.lines = append(d.lines, diffLine{isHeader: true, text: hunkHeader}) + + for _, line := range hunk.Lines { + var prefix string + switch line.Type { + case engine.DiffLineContext: + prefix = " " + case engine.DiffLineAdd: + prefix = "+" + case engine.DiffLineRemove: + prefix = "-" + } + d.lines = append(d.lines, diffLine{ + lineType: line.Type, + text: prefix + line.Content, + }) + } + } + } + + // Stats line + stats := fmt.Sprintf(" +%d -%d", file.AddedLines, file.RemovedLines) + d.lines = append(d.lines, diffLine{text: stats}) + + // Separator between files + if i < len(files)-1 { + d.lines = append(d.lines, diffLine{isSep: true, text: ""}) + } + } +} + +// ScrollUp scrolls up by one line +func (d *DiffView) ScrollUp() { + if d.offset > 0 { + d.offset-- + } +} + +// ScrollDown scrolls down by one line +func (d *DiffView) ScrollDown() { + max := len(d.lines) - d.height + if max < 0 { + max = 0 + } + if d.offset < max { + d.offset++ + } +} + +// PageUp scrolls up by a page +func (d *DiffView) PageUp() { + d.offset -= d.height + if d.offset < 0 { + d.offset = 0 + } +} + +// PageDown scrolls down by a page +func (d *DiffView) PageDown() { + max := len(d.lines) - d.height + if max < 0 { + max = 0 + } + d.offset += d.height + if d.offset > max { + d.offset = max + } +} + +// ScrollToTop scrolls to the beginning +func (d *DiffView) ScrollToTop() { + d.offset = 0 +} + +// ScrollToBottom scrolls to the end +func (d *DiffView) ScrollToBottom() { + max := len(d.lines) - d.height + if max < 0 { + max = 0 + } + d.offset = max +} + +// NextFile jumps to the next file header +func (d *DiffView) NextFile() { + for i := d.offset + 1; i < len(d.lines); i++ { + if d.lines[i].isHeader && (i == 0 || d.lines[i-1].isSep || i == d.offset+1) { + // Found the start of a file section + if strings.HasPrefix(d.lines[i].text, "---") || strings.HasPrefix(d.lines[i].text, "+++") { + d.offset = i + d.fixScroll() + return + } + } + } +} + +// PrevFile jumps to the previous file header +func (d *DiffView) PrevFile() { + for i := d.offset - 1; i >= 0; i-- { + if d.lines[i].isHeader { + if strings.HasPrefix(d.lines[i].text, "---") || strings.HasPrefix(d.lines[i].text, "+++") { + d.offset = i + d.fixScroll() + return + } + } + } +} + +// TotalLines returns the total number of lines +func (d *DiffView) TotalLines() int { + return len(d.lines) +} + +// Offset returns the current scroll offset +func (d *DiffView) Offset() int { + return d.offset +} + +// fixScroll ensures the offset is within bounds +func (d *DiffView) fixScroll() { + max := len(d.lines) - d.height + if max < 0 { + max = 0 + } + if d.offset > max { + d.offset = max + } + if d.offset < 0 { + d.offset = 0 + } +} + +// View renders the visible portion of the diff +func (d DiffView) View(theme style.Theme) string { + if len(d.lines) == 0 { + return theme.Dim.Render(" No differences") + } + + var b strings.Builder + end := d.offset + d.height + if end > len(d.lines) { + end = len(d.lines) + } + + for i := d.offset; i < end; i++ { + line := d.lines[i] + + var rendered string + switch { + case line.isSep: + rendered = "" + case line.isHeader: + rendered = theme.Bold.Render(line.text) + case line.lineType == engine.DiffLineAdd: + rendered = theme.Added.Render(line.text) + case line.lineType == engine.DiffLineRemove: + rendered = theme.Deleted.Render(line.text) + default: + rendered = line.text + } + + b.WriteString(" ") + b.WriteString(rendered) + if i < end-1 { + b.WriteString("\n") + } + } + + return b.String() +} diff --git a/internal/tui/components/filelist.go b/internal/tui/components/filelist.go new file mode 100644 index 0000000..c44211c --- /dev/null +++ b/internal/tui/components/filelist.go @@ -0,0 +1,243 @@ +package components + +import ( + "fmt" + "strings" + + "github.com/charmbracelet/bubbles/key" + tea "github.com/charmbracelet/bubbletea" + "github.com/javanhut/Ivaldi-vcs/internal/engine" + "github.com/javanhut/Ivaldi-vcs/internal/tui/style" +) + +// FileItem represents a file in the list +type FileItem struct { + Info engine.FileStatusInfo + Selected bool +} + +// FileList is a selectable, scrollable file list with toggle +type FileList struct { + Items []FileItem + cursor int + offset int // scroll offset + height int + width int + focused bool + + Keys FileListKeyMap +} + +// FileListKeyMap defines keybindings for the file list +type FileListKeyMap struct { + Up key.Binding + Down key.Binding + Toggle key.Binding + Top key.Binding + Bottom key.Binding +} + +// DefaultFileListKeyMap returns default keybindings +func DefaultFileListKeyMap() FileListKeyMap { + return FileListKeyMap{ + Up: key.NewBinding( + key.WithKeys("up", "k"), + key.WithHelp("k/up", "move up"), + ), + Down: key.NewBinding( + key.WithKeys("down", "j"), + key.WithHelp("j/down", "move down"), + ), + Toggle: key.NewBinding( + key.WithKeys(" "), + key.WithHelp("space", "toggle staging"), + ), + Top: key.NewBinding( + key.WithKeys("g"), + ), + Bottom: key.NewBinding( + key.WithKeys("G"), + ), + } +} + +// NewFileList creates a new file list +func NewFileList() FileList { + return FileList{ + Keys: DefaultFileListKeyMap(), + height: 20, + } +} + +// SetItems updates the list items +func (f *FileList) SetItems(items []FileItem) { + f.Items = items + if f.cursor >= len(items) { + f.cursor = len(items) - 1 + } + if f.cursor < 0 { + f.cursor = 0 + } + f.fixScroll() +} + +// SetSize updates the visible dimensions +func (f *FileList) SetSize(width, height int) { + f.width = width + f.height = height + f.fixScroll() +} + +// Focus sets whether the list is focused +func (f *FileList) Focus(focused bool) { + f.focused = focused +} + +// Cursor returns the current cursor position +func (f *FileList) Cursor() int { + return f.cursor +} + +// SelectedItem returns the item under the cursor, or nil +func (f *FileList) SelectedItem() *FileItem { + if f.cursor >= 0 && f.cursor < len(f.Items) { + return &f.Items[f.cursor] + } + return nil +} + +// ToggleCurrent toggles the selected state of the item under the cursor +func (f *FileList) ToggleCurrent() { + if f.cursor >= 0 && f.cursor < len(f.Items) { + f.Items[f.cursor].Selected = !f.Items[f.cursor].Selected + } +} + +// Update handles key messages +func (f *FileList) Update(msg tea.Msg) tea.Cmd { + if !f.focused || len(f.Items) == 0 { + return nil + } + + switch msg := msg.(type) { + case tea.KeyMsg: + switch { + case key.Matches(msg, f.Keys.Up): + if f.cursor > 0 { + f.cursor-- + f.fixScroll() + } + case key.Matches(msg, f.Keys.Down): + if f.cursor < len(f.Items)-1 { + f.cursor++ + f.fixScroll() + } + case key.Matches(msg, f.Keys.Top): + f.cursor = 0 + f.fixScroll() + case key.Matches(msg, f.Keys.Bottom): + f.cursor = len(f.Items) - 1 + f.fixScroll() + case key.Matches(msg, f.Keys.Toggle): + f.ToggleCurrent() + } + } + + return nil +} + +// fixScroll ensures the cursor is visible +func (f *FileList) fixScroll() { + if f.height <= 0 { + return + } + if f.cursor < f.offset { + f.offset = f.cursor + } + if f.cursor >= f.offset+f.height { + f.offset = f.cursor - f.height + 1 + } +} + +// View renders the file list +func (f FileList) View(theme style.Theme) string { + if len(f.Items) == 0 { + return theme.Dim.Render(" No files") + } + + var b strings.Builder + end := f.offset + f.height + if end > len(f.Items) { + end = len(f.Items) + } + + for i := f.offset; i < end; i++ { + item := f.Items[i] + isCursor := i == f.cursor && f.focused + + // Cursor indicator + prefix := " " + if isCursor { + prefix = theme.Cursor.Render("> ") + } + + // Status indicator + statusStr := statusLabel(item.Info.Status, theme) + + // File path + pathStr := styleFilePath(item.Info.Path, item.Info.Status, theme) + + line := fmt.Sprintf("%s%s %s", prefix, statusStr, pathStr) + + if isCursor { + line = theme.Selected.Render(line) + } + + b.WriteString(line) + if i < end-1 { + b.WriteString("\n") + } + } + + return b.String() +} + +// statusLabel returns a styled status label +func statusLabel(status engine.FileStatus, theme style.Theme) string { + switch status { + case engine.StatusAdded: + return theme.Added.Render("[new] ") + case engine.StatusStaged: + return theme.Staged.Render("[staged] ") + case engine.StatusModified: + return theme.Modified.Render("[modified] ") + case engine.StatusDeleted: + return theme.Deleted.Render("[deleted] ") + case engine.StatusUntracked: + return theme.Untracked.Render("[untracked]") + case engine.StatusIgnored: + return theme.Ignored.Render("[ignored] ") + default: + return " " + } +} + +// styleFilePath styles a file path based on status +func styleFilePath(path string, status engine.FileStatus, theme style.Theme) string { + switch status { + case engine.StatusAdded: + return theme.Added.Render(path) + case engine.StatusStaged: + return theme.Staged.Render(path) + case engine.StatusModified: + return theme.Modified.Render(path) + case engine.StatusDeleted: + return theme.Deleted.Render(path) + case engine.StatusUntracked: + return theme.Untracked.Render(path) + case engine.StatusIgnored: + return theme.Ignored.Render(path) + default: + return path + } +} diff --git a/internal/tui/components/statusbar.go b/internal/tui/components/statusbar.go new file mode 100644 index 0000000..ae87f80 --- /dev/null +++ b/internal/tui/components/statusbar.go @@ -0,0 +1,96 @@ +package components + +import ( + "fmt" + "strings" + + "github.com/javanhut/Ivaldi-vcs/internal/tui/style" +) + +// StatusBar renders the bottom info bar +type StatusBar struct { + Timeline string + SealName string + Staged int + Modified int + Untracked int + Width int +} + +// NewStatusBar creates a new status bar +func NewStatusBar() StatusBar { + return StatusBar{} +} + +// View renders the status bar +func (s StatusBar) View(theme style.Theme) string { + var parts []string + + if s.Timeline != "" { + parts = append(parts, + theme.StatusKey.Render("Timeline: ")+theme.StatusValue.Render(s.Timeline)) + } + + if s.SealName != "" { + parts = append(parts, + theme.StatusKey.Render("Seal: ")+theme.StatusValue.Render(s.SealName)) + } + + // File counts + var counts []string + if s.Staged > 0 { + counts = append(counts, fmt.Sprintf("%d staged", s.Staged)) + } + if s.Modified > 0 { + counts = append(counts, fmt.Sprintf("%d modified", s.Modified)) + } + if s.Untracked > 0 { + counts = append(counts, fmt.Sprintf("%d untracked", s.Untracked)) + } + if len(counts) > 0 { + parts = append(parts, theme.StatusValue.Render(strings.Join(counts, ", "))) + } + + divider := theme.StatusDivider.Render(" | ") + content := strings.Join(parts, divider) + + // Add help hint on the right + helpHint := theme.Help.Render("?=help") + contentWidth := lipglossWidth(content) + helpWidth := lipglossWidth(helpHint) + + padding := s.Width - contentWidth - helpWidth - 2 + if padding < 1 { + padding = 1 + } + + bar := content + strings.Repeat(" ", padding) + helpHint + + return theme.StatusBar.Width(s.Width).Render(bar) +} + +// lipglossWidth estimates the visible width of a styled string +func lipglossWidth(s string) int { + clean := stripAnsi(s) + return len([]rune(clean)) +} + +// stripAnsi removes ANSI escape sequences from a string +func stripAnsi(s string) string { + var result strings.Builder + inEscape := false + for _, r := range s { + if r == '\033' { + inEscape = true + continue + } + if inEscape { + if (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') { + inEscape = false + } + continue + } + result.WriteRune(r) + } + return result.String() +} diff --git a/internal/tui/components/tabs.go b/internal/tui/components/tabs.go new file mode 100644 index 0000000..055a117 --- /dev/null +++ b/internal/tui/components/tabs.go @@ -0,0 +1,47 @@ +package components + +import ( + "strings" + + "github.com/javanhut/Ivaldi-vcs/internal/tui/style" +) + +// TabBar represents a horizontal tab bar +type TabBar struct { + Labels []string + Active int +} + +// NewTabBar creates a new tab bar with the given labels +func NewTabBar(labels []string) TabBar { + return TabBar{ + Labels: labels, + Active: 0, + } +} + +// SetActive sets the active tab index +func (t *TabBar) SetActive(index int) { + if index >= 0 && index < len(t.Labels) { + t.Active = index + } +} + +// View renders the tab bar +func (t TabBar) View(theme style.Theme) string { + var tabs []string + for i, label := range t.Labels { + prefix := " " + if i+1 <= 9 { + prefix = string(rune('0' + i + 1)) + } + display := prefix + ":" + label + if i == t.Active { + tabs = append(tabs, theme.ActiveTab.Render(display)) + } else { + tabs = append(tabs, theme.InactiveTab.Render(display)) + } + } + row := strings.Join(tabs, " ") + return theme.TabBar.Render(row) +} diff --git a/internal/tui/keys.go b/internal/tui/keys.go new file mode 100644 index 0000000..ee64a37 --- /dev/null +++ b/internal/tui/keys.go @@ -0,0 +1,63 @@ +package tui + +import "github.com/charmbracelet/bubbles/key" + +// KeyMap defines the global keybindings +type KeyMap struct { + Quit key.Binding + Help key.Binding + Tab key.Binding + ShiftTab key.Binding + Tab1 key.Binding + Tab2 key.Binding + Tab3 key.Binding + Tab4 key.Binding + Tab5 key.Binding + Tab6 key.Binding +} + +// DefaultKeyMap returns the default global keybindings +func DefaultKeyMap() KeyMap { + return KeyMap{ + Quit: key.NewBinding( + key.WithKeys("q", "ctrl+c"), + key.WithHelp("q", "quit"), + ), + Help: key.NewBinding( + key.WithKeys("?"), + key.WithHelp("?", "help"), + ), + Tab: key.NewBinding( + key.WithKeys("tab"), + key.WithHelp("tab", "next tab"), + ), + ShiftTab: key.NewBinding( + key.WithKeys("shift+tab"), + key.WithHelp("shift+tab", "prev tab"), + ), + Tab1: key.NewBinding( + key.WithKeys("1"), + key.WithHelp("1", "status"), + ), + Tab2: key.NewBinding( + key.WithKeys("2"), + key.WithHelp("2", "log"), + ), + Tab3: key.NewBinding( + key.WithKeys("3"), + key.WithHelp("3", "diff"), + ), + Tab4: key.NewBinding( + key.WithKeys("4"), + key.WithHelp("4", "timelines"), + ), + Tab5: key.NewBinding( + key.WithKeys("5"), + key.WithHelp("5", "remote"), + ), + Tab6: key.NewBinding( + key.WithKeys("6"), + key.WithHelp("6", "fuse"), + ), + } +} diff --git a/internal/tui/style/common.go b/internal/tui/style/common.go new file mode 100644 index 0000000..7aa312e --- /dev/null +++ b/internal/tui/style/common.go @@ -0,0 +1,60 @@ +package style + +import tea "github.com/charmbracelet/bubbletea" + +// Tab identifiers +type TabID int + +const ( + TabStatus TabID = iota + TabLog + TabDiff + TabTimelines + TabRemote + TabFuse +) + +// TabInfo describes a tab +type TabInfo struct { + ID TabID + Label string +} + +// AllTabs returns the ordered list of tabs +func AllTabs() []TabInfo { + return []TabInfo{ + {TabStatus, "Status"}, + {TabLog, "Log"}, + {TabDiff, "Diff"}, + {TabTimelines, "Timelines"}, + {TabRemote, "Remote"}, + {TabFuse, "Fuse"}, + } +} + +// View is the interface that all TUI views must implement +type View interface { + tea.Model + ShortHelp() string +} + +// Messages + +// ErrMsg signals an error to the root model +type ErrMsg struct { + Err error +} + +// RefreshMsg signals that views should reload their data +type RefreshMsg struct{} + +// StatusUpdateMsg carries updated status data +type StatusUpdateMsg struct { + Timeline string + SealName string + FileCount int + Staged int + Modified int + Untracked int + Deleted int +} diff --git a/internal/tui/style/theme.go b/internal/tui/style/theme.go new file mode 100644 index 0000000..e0a8baa --- /dev/null +++ b/internal/tui/style/theme.go @@ -0,0 +1,133 @@ +package style + +import "github.com/charmbracelet/lipgloss" + +// Theme holds all the styles used in the TUI +type Theme struct { + // Tab bar + ActiveTab lipgloss.Style + InactiveTab lipgloss.Style + TabBar lipgloss.Style + + // Status bar + StatusBar lipgloss.Style + StatusKey lipgloss.Style + StatusValue lipgloss.Style + StatusDivider lipgloss.Style + + // File statuses + Staged lipgloss.Style + Added lipgloss.Style + Modified lipgloss.Style + Deleted lipgloss.Style + Untracked lipgloss.Style + Ignored lipgloss.Style + + // General + Title lipgloss.Style + Subtitle lipgloss.Style + Selected lipgloss.Style + Cursor lipgloss.Style + Dim lipgloss.Style + Bold lipgloss.Style + Error lipgloss.Style + Success lipgloss.Style + Warning lipgloss.Style + Info lipgloss.Style + Help lipgloss.Style + HelpKey lipgloss.Style + HelpDesc lipgloss.Style + SectionHead lipgloss.Style + + // Viewport + ViewportBorder lipgloss.Style +} + +// DefaultTheme returns the default TUI theme +func DefaultTheme() Theme { + return Theme{ + // Tab bar + ActiveTab: lipgloss.NewStyle(). + Bold(true). + Foreground(lipgloss.Color("#FFFFFF")). + Background(lipgloss.Color("#7D56F4")). + Padding(0, 2), + InactiveTab: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#888888")). + Padding(0, 2), + TabBar: lipgloss.NewStyle(). + BorderStyle(lipgloss.NormalBorder()). + BorderBottom(true). + BorderForeground(lipgloss.Color("#444444")), + + // Status bar + StatusBar: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#AAAAAA")). + Background(lipgloss.Color("#333333")). + Padding(0, 1), + StatusKey: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#7D56F4")). + Bold(true), + StatusValue: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#DDDDDD")), + StatusDivider: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#555555")), + + // File statuses + Staged: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#00FF00")), + Added: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#00FF00")). + Bold(true), + Modified: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#5599FF")), + Deleted: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#FF5555")), + Untracked: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#FFFF55")), + Ignored: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#666666")), + + // General + Title: lipgloss.NewStyle(). + Bold(true). + Foreground(lipgloss.Color("#FFFFFF")), + Subtitle: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#AAAAAA")), + Selected: lipgloss.NewStyle(). + Background(lipgloss.Color("#444444")). + Foreground(lipgloss.Color("#FFFFFF")), + Cursor: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#7D56F4")). + Bold(true), + Dim: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#666666")), + Bold: lipgloss.NewStyle(). + Bold(true), + Error: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#FF5555")). + Bold(true), + Success: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#00FF00")), + Warning: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#FFFF55")), + Info: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#55FFFF")), + Help: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#888888")), + HelpKey: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#7D56F4")). + Bold(true), + HelpDesc: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#888888")), + SectionHead: lipgloss.NewStyle(). + Bold(true). + Foreground(lipgloss.Color("#FFFFFF")). + MarginTop(1), + + // Viewport + ViewportBorder: lipgloss.NewStyle(). + BorderStyle(lipgloss.RoundedBorder()). + BorderForeground(lipgloss.Color("#444444")), + } +} diff --git a/internal/tui/views/diff.go b/internal/tui/views/diff.go new file mode 100644 index 0000000..39d8d42 --- /dev/null +++ b/internal/tui/views/diff.go @@ -0,0 +1,267 @@ +package views + +import ( + "fmt" + "strings" + + "github.com/charmbracelet/bubbles/key" + tea "github.com/charmbracelet/bubbletea" + "github.com/javanhut/Ivaldi-vcs/internal/engine" + "github.com/javanhut/Ivaldi-vcs/internal/tui/components" + "github.com/javanhut/Ivaldi-vcs/internal/tui/style" +) + +// diffKeyMap defines keybindings specific to the diff view +type diffKeyMap struct { + Up key.Binding + Down key.Binding + PageUp key.Binding + PageDown key.Binding + Top key.Binding + Bottom key.Binding + NextFile key.Binding + PrevFile key.Binding + Toggle key.Binding + Refresh key.Binding +} + +func defaultDiffKeyMap() diffKeyMap { + return diffKeyMap{ + Up: key.NewBinding( + key.WithKeys("up", "k"), + key.WithHelp("k/up", "scroll up"), + ), + Down: key.NewBinding( + key.WithKeys("down", "j"), + key.WithHelp("j/down", "scroll down"), + ), + PageUp: key.NewBinding( + key.WithKeys("pgup", "ctrl+u"), + key.WithHelp("pgup", "page up"), + ), + PageDown: key.NewBinding( + key.WithKeys("pgdown", "ctrl+d"), + key.WithHelp("pgdn", "page down"), + ), + Top: key.NewBinding( + key.WithKeys("g"), + key.WithHelp("g", "top"), + ), + Bottom: key.NewBinding( + key.WithKeys("G"), + key.WithHelp("G", "bottom"), + ), + NextFile: key.NewBinding( + key.WithKeys("n"), + key.WithHelp("n", "next file"), + ), + PrevFile: key.NewBinding( + key.WithKeys("p"), + key.WithHelp("p", "prev file"), + ), + Toggle: key.NewBinding( + key.WithKeys("s"), + key.WithHelp("s", "toggle staged"), + ), + Refresh: key.NewBinding( + key.WithKeys("r"), + key.WithHelp("r", "refresh"), + ), + } +} + +// DiffModel is the diff view model +type DiffModel struct { + workDir string + ivaldiDir string + diffView components.DiffView + keys diffKeyMap + theme style.Theme + + result *engine.DiffResult + staged bool + loading bool + err error + width int + height int +} + +// diffLoadedMsg carries loaded diff data +type diffLoadedMsg struct { + result *engine.DiffResult + err error +} + +// NewDiffModel creates a new diff view +func NewDiffModel(workDir, ivaldiDir string) *DiffModel { + return &DiffModel{ + workDir: workDir, + ivaldiDir: ivaldiDir, + diffView: components.NewDiffView(), + keys: defaultDiffKeyMap(), + theme: style.DefaultTheme(), + loading: true, + } +} + +// Init loads the initial diff data +func (m *DiffModel) Init() tea.Cmd { + m.loading = true + return m.loadDiff() +} + +// Update handles messages +func (m *DiffModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { + switch msg := msg.(type) { + case tea.WindowSizeMsg: + m.width = msg.Width + m.height = msg.Height + // Reserve space for header + footer + viewHeight := msg.Height - 4 + if viewHeight < 1 { + viewHeight = 1 + } + m.diffView.SetSize(msg.Width, viewHeight) + return m, nil + + case diffLoadedMsg: + m.loading = false + if msg.err != nil { + m.err = msg.err + return m, nil + } + m.result = msg.result + m.err = nil + m.diffView.SetContent(msg.result.Files, m.theme) + return m, nil + + case tea.KeyMsg: + switch { + case key.Matches(msg, m.keys.Refresh): + m.loading = true + return m, m.loadDiff() + + case key.Matches(msg, m.keys.Toggle): + m.staged = !m.staged + m.loading = true + return m, m.loadDiff() + + case key.Matches(msg, m.keys.Up): + m.diffView.ScrollUp() + return m, nil + + case key.Matches(msg, m.keys.Down): + m.diffView.ScrollDown() + return m, nil + + case key.Matches(msg, m.keys.PageUp): + m.diffView.PageUp() + return m, nil + + case key.Matches(msg, m.keys.PageDown): + m.diffView.PageDown() + return m, nil + + case key.Matches(msg, m.keys.Top): + m.diffView.ScrollToTop() + return m, nil + + case key.Matches(msg, m.keys.Bottom): + m.diffView.ScrollToBottom() + return m, nil + + case key.Matches(msg, m.keys.NextFile): + m.diffView.NextFile() + return m, nil + + case key.Matches(msg, m.keys.PrevFile): + m.diffView.PrevFile() + return m, nil + } + } + + return m, nil +} + +// View renders the diff view +func (m *DiffModel) View() string { + if m.loading { + return m.theme.Dim.Render(" Computing diff...") + } + + if m.err != nil { + return m.theme.Error.Render(fmt.Sprintf(" Error: %v", m.err)) + } + + if m.result == nil { + return m.theme.Dim.Render(" No diff data") + } + + var b strings.Builder + + // Header + b.WriteString(" ") + modeLabel := "working directory vs HEAD" + if m.staged { + modeLabel = "staged vs HEAD" + } + b.WriteString(m.theme.Title.Render(fmt.Sprintf("Diff — %s", modeLabel))) + + if len(m.result.Files) > 0 { + var statParts []string + if m.result.Stats.Added > 0 { + statParts = append(statParts, m.theme.Added.Render(fmt.Sprintf("+%d", m.result.Stats.Added))) + } + if m.result.Stats.Modified > 0 { + statParts = append(statParts, m.theme.Modified.Render(fmt.Sprintf("~%d", m.result.Stats.Modified))) + } + if m.result.Stats.Removed > 0 { + statParts = append(statParts, m.theme.Deleted.Render(fmt.Sprintf("-%d", m.result.Stats.Removed))) + } + b.WriteString(" ") + b.WriteString(strings.Join(statParts, " ")) + } + b.WriteString("\n") + + // Diff content + if len(m.result.Files) == 0 { + b.WriteString(m.theme.Success.Render(" No differences")) + } else { + b.WriteString(m.diffView.View(m.theme)) + } + + // Footer with scroll info + total := m.diffView.TotalLines() + if total > 0 { + b.WriteString("\n") + offset := m.diffView.Offset() + pct := 0 + if total > 0 { + pct = (offset * 100) / total + } + b.WriteString(m.theme.Dim.Render( + fmt.Sprintf(" %d files %d lines %d%%", + len(m.result.Files), total, pct))) + } + + return b.String() +} + +// ShortHelp returns a short help string +func (m *DiffModel) ShortHelp() string { + return "j/k:scroll n/p:next/prev file s:staged g/G:top/bottom r:refresh" +} + +// loadDiff loads diff data asynchronously +func (m *DiffModel) loadDiff() tea.Cmd { + workDir := m.workDir + ivaldiDir := m.ivaldiDir + staged := m.staged + return func() tea.Msg { + opts := engine.DiffOptions{ + Staged: staged, + } + result, err := engine.ComputeDiff(ivaldiDir, workDir, opts) + return diffLoadedMsg{result: result, err: err} + } +} diff --git a/internal/tui/views/fuse.go b/internal/tui/views/fuse.go new file mode 100644 index 0000000..0ef2294 --- /dev/null +++ b/internal/tui/views/fuse.go @@ -0,0 +1,548 @@ +package views + +import ( + "fmt" + "strings" + + "github.com/charmbracelet/bubbles/key" + tea "github.com/charmbracelet/bubbletea" + "github.com/javanhut/Ivaldi-vcs/internal/diffmerge" + "github.com/javanhut/Ivaldi-vcs/internal/engine" + "github.com/javanhut/Ivaldi-vcs/internal/tui/style" +) + +// Available merge strategies in display order +var fuseStrategies = []diffmerge.StrategyType{ + diffmerge.StrategyAuto, + diffmerge.StrategyOurs, + diffmerge.StrategyTheirs, + diffmerge.StrategyUnion, + diffmerge.StrategyBase, +} + +var fuseStrategyDescs = map[diffmerge.StrategyType]string{ + diffmerge.StrategyAuto: "Intelligent chunk-level merge", + diffmerge.StrategyOurs: "Keep target timeline version", + diffmerge.StrategyTheirs: "Accept source timeline version", + diffmerge.StrategyUnion: "Combine both versions", + diffmerge.StrategyBase: "Revert to common ancestor", +} + +// fuseKeyMap defines keybindings for the fuse view +type fuseKeyMap struct { + Up key.Binding + Down key.Binding + Select key.Binding + Strategy key.Binding + Abort key.Binding + Refresh key.Binding + GoTop key.Binding + GoBottom key.Binding +} + +func defaultFuseKeyMap() fuseKeyMap { + return fuseKeyMap{ + Up: key.NewBinding( + key.WithKeys("up", "k"), + key.WithHelp("k/up", "move up"), + ), + Down: key.NewBinding( + key.WithKeys("down", "j"), + key.WithHelp("j/down", "move down"), + ), + Select: key.NewBinding( + key.WithKeys("enter", "f"), + key.WithHelp("enter/f", "fuse selected"), + ), + Strategy: key.NewBinding( + key.WithKeys("s"), + key.WithHelp("s", "cycle strategy"), + ), + Abort: key.NewBinding( + key.WithKeys("a"), + key.WithHelp("a", "abort merge"), + ), + Refresh: key.NewBinding( + key.WithKeys("r"), + key.WithHelp("r", "refresh"), + ), + GoTop: key.NewBinding( + key.WithKeys("g"), + key.WithHelp("g", "go to top"), + ), + GoBottom: key.NewBinding( + key.WithKeys("G"), + key.WithHelp("G", "go to bottom"), + ), + } +} + +// fuseState represents the current state of the fuse view +type fuseState int + +const ( + fuseStateIdle fuseState = iota // Selecting timeline to fuse + fuseStateBusy // Operation in progress + fuseStateResult // Showing fuse result + fuseStateConflicts // Showing conflicts from merge in progress +) + +// FuseModel is the fuse (merge) view model +type FuseModel struct { + workDir string + ivaldiDir string + keys fuseKeyMap + theme style.Theme + + state fuseState + loading bool + busy string + err error + msg string + width int + height int + + // Timeline selection + timelines []engine.TimelineInfo + current string + cursor int + strategyIndex int // Index into fuseStrategies + + // Merge status + fuseStatus *engine.FuseStatus + lastResult *engine.FuseResult +} + +// Fuse message types + +type fuseTimelinesLoadedMsg struct { + timelines []engine.TimelineInfo + current string + status *engine.FuseStatus + err error +} + +type fuseDoneMsg struct { + result *engine.FuseResult + err error +} + +type fuseAbortDoneMsg struct { + err error +} + +// NewFuseModel creates a new fuse view +func NewFuseModel(workDir, ivaldiDir string) *FuseModel { + return &FuseModel{ + workDir: workDir, + ivaldiDir: ivaldiDir, + keys: defaultFuseKeyMap(), + theme: style.DefaultTheme(), + loading: true, + } +} + +// Init loads timeline list and merge status +func (m *FuseModel) Init() tea.Cmd { + m.loading = true + return m.loadData() +} + +// Update handles messages +func (m *FuseModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { + switch msg := msg.(type) { + case tea.WindowSizeMsg: + m.width = msg.Width + m.height = msg.Height + return m, nil + + case fuseTimelinesLoadedMsg: + m.loading = false + if msg.err != nil { + m.err = msg.err + return m, nil + } + m.timelines = msg.timelines + m.current = msg.current + m.fuseStatus = msg.status + m.err = nil + + // If merge in progress, show conflicts state + if m.fuseStatus != nil && m.fuseStatus.InProgress { + m.state = fuseStateConflicts + } else { + m.state = fuseStateIdle + } + + // Reset cursor if out of bounds + if m.cursor >= len(m.timelines) { + m.cursor = 0 + } + return m, nil + + case fuseDoneMsg: + m.busy = "" + if msg.err != nil { + m.err = msg.err + m.state = fuseStateIdle + return m, nil + } + m.err = nil + m.lastResult = msg.result + + if msg.result.Type == engine.FuseMergeConflicts { + m.state = fuseStateConflicts + // Reload to get merge status + return m, m.loadData() + } + + m.state = fuseStateResult + return m, nil + + case fuseAbortDoneMsg: + m.busy = "" + if msg.err != nil { + m.err = msg.err + return m, nil + } + m.err = nil + m.msg = "Merge aborted" + m.state = fuseStateIdle + m.fuseStatus = nil + m.lastResult = nil + return m, m.loadData() + + case tea.KeyMsg: + if m.loading || m.busy != "" { + return m, nil + } + + switch { + case key.Matches(msg, m.keys.Refresh): + m.loading = true + m.msg = "" + m.err = nil + return m, m.loadData() + + case key.Matches(msg, m.keys.Abort): + if m.fuseStatus != nil && m.fuseStatus.InProgress { + m.busy = "Aborting merge..." + m.msg = "" + m.err = nil + return m, m.doAbort() + } + return m, nil + + case key.Matches(msg, m.keys.Strategy): + if m.state == fuseStateIdle { + m.strategyIndex = (m.strategyIndex + 1) % len(fuseStrategies) + } + return m, nil + + case key.Matches(msg, m.keys.Select): + if m.state == fuseStateIdle && len(m.timelines) > 0 && m.cursor < len(m.timelines) { + source := m.timelines[m.cursor].Name + strategy := string(fuseStrategies[m.strategyIndex]) + m.busy = fmt.Sprintf("Fusing %s into %s...", source, m.current) + m.msg = "" + m.err = nil + m.state = fuseStateBusy + return m, m.doFuse(source, m.current, strategy) + } + return m, nil + + case key.Matches(msg, m.keys.Up): + if m.state == fuseStateIdle && m.cursor > 0 { + m.cursor-- + } + return m, nil + + case key.Matches(msg, m.keys.Down): + if m.state == fuseStateIdle && m.cursor < len(m.timelines)-1 { + m.cursor++ + } + return m, nil + + case key.Matches(msg, m.keys.GoTop): + if m.state == fuseStateIdle { + m.cursor = 0 + } + return m, nil + + case key.Matches(msg, m.keys.GoBottom): + if m.state == fuseStateIdle && len(m.timelines) > 0 { + m.cursor = len(m.timelines) - 1 + } + return m, nil + } + } + + return m, nil +} + +// View renders the fuse view +func (m *FuseModel) View() string { + if m.loading { + return m.theme.Dim.Render(" Loading fuse info...") + } + + var b strings.Builder + + // Header + b.WriteString(" ") + b.WriteString(m.theme.Title.Render("Fuse (Merge)")) + b.WriteString("\n\n") + + // Show merge-in-progress state + if m.fuseStatus != nil && m.fuseStatus.InProgress { + m.renderMergeInProgress(&b) + } else if m.state == fuseStateResult && m.lastResult != nil { + m.renderResult(&b) + } else { + m.renderTimelineSelection(&b) + } + + // Busy indicator + if m.busy != "" { + b.WriteString("\n ") + b.WriteString(m.theme.Info.Render("~ " + m.busy)) + b.WriteString("\n") + } + + // Success message + if m.msg != "" && m.busy == "" { + b.WriteString("\n ") + b.WriteString(m.theme.Success.Render(m.msg)) + b.WriteString("\n") + } + + // Error + if m.err != nil && m.busy == "" { + b.WriteString("\n ") + b.WriteString(m.theme.Error.Render(fmt.Sprintf("Error: %v", m.err))) + b.WriteString("\n") + } + + // Footer + b.WriteString("\n") + if m.fuseStatus != nil && m.fuseStatus.InProgress { + b.WriteString(m.theme.Dim.Render(" a:abort s:retry strategy r:refresh")) + } else if m.state == fuseStateResult { + b.WriteString(m.theme.Dim.Render(" r:refresh")) + } else { + b.WriteString(m.theme.Dim.Render(" j/k:navigate enter/f:fuse s:strategy r:refresh")) + } + + return b.String() +} + +func (m *FuseModel) renderTimelineSelection(b *strings.Builder) { + // Current timeline + b.WriteString(" ") + b.WriteString(m.theme.SectionHead.Render("Target")) + b.WriteString(" ") + if m.current != "" { + b.WriteString(m.theme.Bold.Render(m.current)) + } else { + b.WriteString(m.theme.Dim.Render("(no current timeline)")) + } + b.WriteString("\n") + + // Strategy + b.WriteString(" ") + b.WriteString(m.theme.SectionHead.Render("Strategy")) + b.WriteString(" ") + strategy := fuseStrategies[m.strategyIndex] + b.WriteString(m.theme.Info.Render(string(strategy))) + b.WriteString(m.theme.Dim.Render(" — " + fuseStrategyDescs[strategy])) + b.WriteString("\n\n") + + // Source timeline selection + b.WriteString(" ") + b.WriteString(m.theme.SectionHead.Render("Select Source Timeline")) + b.WriteString("\n") + + if len(m.timelines) == 0 { + b.WriteString(" ") + b.WriteString(m.theme.Dim.Render("No other timelines available")) + b.WriteString("\n") + return + } + + for i, tl := range m.timelines { + b.WriteString(" ") + if i == m.cursor { + b.WriteString(m.theme.Cursor.Render("> ")) + name := tl.Name + if tl.IsButterfly { + name += " [butterfly]" + } + b.WriteString(m.theme.Selected.Render(padRight(name, 30))) + if tl.Hash != "" { + b.WriteString(m.theme.Dim.Render(" " + tl.Hash)) + } + } else { + b.WriteString(" ") + name := tl.Name + if tl.IsButterfly { + name += " [butterfly]" + } + b.WriteString(m.theme.Info.Render(padRight(name, 30))) + if tl.Hash != "" { + b.WriteString(m.theme.Dim.Render(" " + tl.Hash)) + } + } + b.WriteString("\n") + } +} + +func (m *FuseModel) renderMergeInProgress(b *strings.Builder) { + b.WriteString(" ") + b.WriteString(m.theme.Warning.Render("Merge in progress")) + b.WriteString("\n\n") + + b.WriteString(" ") + b.WriteString(m.theme.Bold.Render(fmt.Sprintf("%s -> %s", m.fuseStatus.SourceTimeline, m.fuseStatus.TargetTimeline))) + b.WriteString("\n") + + if len(m.fuseStatus.Conflicts) > 0 { + b.WriteString("\n ") + b.WriteString(m.theme.SectionHead.Render("Conflicts")) + b.WriteString("\n") + + for _, path := range m.fuseStatus.Conflicts { + b.WriteString(" ") + b.WriteString(m.theme.Error.Render("CONFLICT: ")) + b.WriteString(m.theme.Bold.Render(path)) + b.WriteString("\n") + } + + b.WriteString("\n ") + b.WriteString(m.theme.Dim.Render(fmt.Sprintf("%d file(s) with conflicts", len(m.fuseStatus.Conflicts)))) + b.WriteString("\n") + } + + b.WriteString("\n ") + b.WriteString(m.theme.SectionHead.Render("Options")) + b.WriteString("\n") + b.WriteString(" ") + b.WriteString(m.theme.Info.Render("a")) + b.WriteString(m.theme.Dim.Render(" — Abort merge")) + b.WriteString("\n") + b.WriteString(" ") + b.WriteString(m.theme.Dim.Render("Use CLI 'ivaldi fuse --continue' for interactive conflict resolution")) + b.WriteString("\n") +} + +func (m *FuseModel) renderResult(b *strings.Builder) { + result := m.lastResult + + switch result.Type { + case engine.FuseFastForward: + b.WriteString(" ") + b.WriteString(m.theme.Success.Render("Fast-forward merge complete")) + b.WriteString("\n\n") + b.WriteString(" ") + b.WriteString(m.theme.Bold.Render(fmt.Sprintf("%s fast-forwarded to %s", result.Target, result.Source))) + b.WriteString("\n") + + case engine.FuseMergeSuccess: + b.WriteString(" ") + b.WriteString(m.theme.Success.Render("Three-way merge complete")) + b.WriteString("\n\n") + b.WriteString(" ") + b.WriteString(m.theme.Bold.Render(fmt.Sprintf("%s fused into %s", result.Source, result.Target))) + b.WriteString("\n") + if result.SealName != "" { + b.WriteString(" ") + b.WriteString(m.theme.Dim.Render("Seal: ")) + b.WriteString(m.theme.Info.Render(result.SealName)) + b.WriteString("\n") + } + + // Change stats + if result.Added > 0 || result.Modified > 0 || result.Removed > 0 { + b.WriteString("\n ") + b.WriteString(m.theme.SectionHead.Render("Changes")) + b.WriteString("\n") + if result.Added > 0 { + b.WriteString(" ") + b.WriteString(m.theme.Success.Render(fmt.Sprintf("+ %d file(s)", result.Added))) + b.WriteString("\n") + } + if result.Modified > 0 { + b.WriteString(" ") + b.WriteString(m.theme.Info.Render(fmt.Sprintf("~ %d file(s)", result.Modified))) + b.WriteString("\n") + } + if result.Removed > 0 { + b.WriteString(" ") + b.WriteString(m.theme.Error.Render(fmt.Sprintf("- %d file(s)", result.Removed))) + b.WriteString("\n") + } + } + + case engine.FuseMergeConflicts: + b.WriteString(" ") + b.WriteString(m.theme.Warning.Render("Merge has conflicts")) + b.WriteString("\n\n") + for _, path := range result.Conflicts { + b.WriteString(" ") + b.WriteString(m.theme.Error.Render("CONFLICT: ")) + b.WriteString(m.theme.Bold.Render(path)) + b.WriteString("\n") + } + } +} + +// ShortHelp returns a short help string +func (m *FuseModel) ShortHelp() string { + return "j/k:navigate enter:fuse s:strategy a:abort r:refresh" +} + +// Async commands + +func (m *FuseModel) loadData() tea.Cmd { + ivaldiDir := m.ivaldiDir + return func() tea.Msg { + // Get timelines + result, err := engine.ListTimelines(ivaldiDir) + if err != nil { + return fuseTimelinesLoadedMsg{err: err} + } + + // Filter out current timeline from selection list + var selectable []engine.TimelineInfo + for _, tl := range result.Local { + if !tl.IsCurrent { + selectable = append(selectable, tl) + } + } + + // Get merge status + fuseStatus, _ := engine.GetFuseStatus(ivaldiDir) + + return fuseTimelinesLoadedMsg{ + timelines: selectable, + current: result.Current, + status: fuseStatus, + } + } +} + +func (m *FuseModel) doFuse(source, target, strategy string) tea.Cmd { + ivaldiDir := m.ivaldiDir + workDir := m.workDir + return func() tea.Msg { + result, err := engine.FuseTimelines(ivaldiDir, workDir, source, target, strategy) + return fuseDoneMsg{result: result, err: err} + } +} + +func (m *FuseModel) doAbort() tea.Cmd { + ivaldiDir := m.ivaldiDir + return func() tea.Msg { + err := engine.AbortFuse(ivaldiDir) + return fuseAbortDoneMsg{err: err} + } +} diff --git a/internal/tui/views/help.go b/internal/tui/views/help.go new file mode 100644 index 0000000..f70c699 --- /dev/null +++ b/internal/tui/views/help.go @@ -0,0 +1,142 @@ +package views + +import ( + "strings" + + "github.com/javanhut/Ivaldi-vcs/internal/tui/style" +) + +// HelpModel is the help overlay model +type HelpModel struct{} + +// NewHelpModel creates a new help model +func NewHelpModel() HelpModel { + return HelpModel{} +} + +type helpSection struct { + title string + keys []helpEntry +} + +type helpEntry struct { + key string + desc string +} + +// View renders the help overlay +func (h HelpModel) View(width, height int, theme style.Theme) string { + sections := []helpSection{ + { + title: "Global", + keys: []helpEntry{ + {"q / ctrl+c", "Quit"}, + {"?", "Toggle help"}, + {"tab", "Next tab"}, + {"shift+tab", "Previous tab"}, + {"1-6", "Jump to tab"}, + }, + }, + { + title: "Status View", + keys: []helpEntry{ + {"j / k / arrows", "Navigate files"}, + {"space", "Toggle file staging"}, + {"a", "Gather all unstaged files"}, + {"u", "Ungather all staged files"}, + {"s", "Seal (commit) staged files"}, + {"r", "Refresh status"}, + {"g", "Jump to top"}, + {"G", "Jump to bottom"}, + }, + }, + { + title: "Log View", + keys: []helpEntry{ + {"j / k / arrows", "Scroll up/down"}, + {"o", "Toggle oneline/full format"}, + {"t", "Toggle all timelines"}, + {"r", "Refresh log"}, + }, + }, + { + title: "Diff View", + keys: []helpEntry{ + {"j / k / arrows", "Scroll up/down"}, + {"pgup / ctrl+u", "Page up"}, + {"pgdn / ctrl+d", "Page down"}, + {"n / p", "Next/prev file"}, + {"g / G", "Top/bottom"}, + {"s", "Toggle staged diff"}, + {"r", "Refresh diff"}, + }, + }, + { + title: "Timeline View", + keys: []helpEntry{ + {"j / k / arrows", "Navigate timelines"}, + {"enter", "Switch to timeline"}, + {"c", "Create new timeline"}, + {"d", "Remove timeline"}, + {"R", "Rename timeline"}, + {"g / G", "Top/bottom"}, + {"r", "Refresh"}, + }, + }, + { + title: "Remote View", + keys: []helpEntry{ + {"s", "Scout remote timelines"}, + {"u", "Upload (push) current"}, + {"y", "Sync (pull) current"}, + {"h", "Harvest all new timelines"}, + {"enter", "Harvest selected timeline"}, + {"p", "Set portal (owner/repo)"}, + {"j / k", "Navigate scout results"}, + {"r", "Refresh"}, + }, + }, + { + title: "Fuse View", + keys: []helpEntry{ + {"j / k / arrows", "Navigate timelines"}, + {"enter / f", "Fuse selected into current"}, + {"s", "Cycle merge strategy"}, + {"a", "Abort merge in progress"}, + {"g / G", "Top/bottom"}, + {"r", "Refresh"}, + }, + }, + } + + var b strings.Builder + b.WriteString("\n") + b.WriteString(theme.Title.Render(" Ivaldi TUI — Keybinding Reference")) + b.WriteString("\n\n") + + for _, section := range sections { + b.WriteString(" ") + b.WriteString(theme.SectionHead.Render(section.title)) + b.WriteString("\n") + + for _, entry := range section.keys { + b.WriteString(" ") + b.WriteString(theme.HelpKey.Render(padRight(entry.key, 20))) + b.WriteString(theme.HelpDesc.Render(entry.desc)) + b.WriteString("\n") + } + b.WriteString("\n") + } + + b.WriteString(theme.Dim.Render(" Press any key to dismiss")) + + return b.String() +} + +// padRight pads a string to the given width +func padRight(s string, width int) string { + if len(s) >= width { + return s + } + return s + strings.Repeat(" ", width-len(s)) +} diff --git a/internal/tui/views/log.go b/internal/tui/views/log.go new file mode 100644 index 0000000..4a84caf --- /dev/null +++ b/internal/tui/views/log.go @@ -0,0 +1,348 @@ +package views + +import ( + "fmt" + "strings" + + "github.com/charmbracelet/bubbles/key" + "github.com/charmbracelet/bubbles/viewport" + tea "github.com/charmbracelet/bubbletea" + "github.com/javanhut/Ivaldi-vcs/internal/engine" + "github.com/javanhut/Ivaldi-vcs/internal/tui/style" +) + +// logKeyMap defines keybindings specific to the log view +type logKeyMap struct { + ToggleFormat key.Binding + ToggleTimelines key.Binding + Refresh key.Binding +} + +func defaultLogKeyMap() logKeyMap { + return logKeyMap{ + ToggleFormat: key.NewBinding( + key.WithKeys("o"), + key.WithHelp("o", "toggle oneline/full"), + ), + ToggleTimelines: key.NewBinding( + key.WithKeys("t"), + key.WithHelp("t", "toggle all timelines"), + ), + Refresh: key.NewBinding( + key.WithKeys("r"), + key.WithHelp("r", "refresh"), + ), + } +} + +// LogModel is the log view model +type LogModel struct { + ivaldiDir string + viewport viewport.Model + keys logKeyMap + theme style.Theme + + commits []engine.CommitEntry + currentTimeline string + oneline bool + allTimelines bool + loading bool + err error + ready bool + width int + height int +} + +// logLoadedMsg carries loaded log data +type logLoadedMsg struct { + commits []engine.CommitEntry + timeline string + err error +} + +// NewLogModel creates a new log view +func NewLogModel(ivaldiDir string) *LogModel { + return &LogModel{ + ivaldiDir: ivaldiDir, + keys: defaultLogKeyMap(), + theme: style.DefaultTheme(), + loading: true, + } +} + +// Init loads the initial log data +func (m *LogModel) Init() tea.Cmd { + m.loading = true + return m.loadLog() +} + +// Update handles messages +func (m *LogModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { + switch msg := msg.(type) { + case tea.WindowSizeMsg: + m.width = msg.Width + m.height = msg.Height + if !m.ready { + m.viewport = viewport.New(msg.Width, msg.Height) + m.viewport.HighPerformanceRendering = false + m.ready = true + } else { + m.viewport.Width = msg.Width + m.viewport.Height = msg.Height + } + m.renderContent() + return m, nil + + case logLoadedMsg: + m.loading = false + if msg.err != nil { + m.err = msg.err + return m, nil + } + m.commits = msg.commits + m.currentTimeline = msg.timeline + m.err = nil + m.renderContent() + return m, nil + + case tea.KeyMsg: + switch { + case key.Matches(msg, m.keys.Refresh): + m.loading = true + return m, m.loadLog() + + case key.Matches(msg, m.keys.ToggleFormat): + m.oneline = !m.oneline + m.renderContent() + return m, nil + + case key.Matches(msg, m.keys.ToggleTimelines): + m.allTimelines = !m.allTimelines + m.loading = true + return m, m.loadLog() + } + + // Forward to viewport for scrolling + if m.ready { + var cmd tea.Cmd + m.viewport, cmd = m.viewport.Update(msg) + return m, cmd + } + } + + return m, nil +} + +// View renders the log view +func (m *LogModel) View() string { + if m.loading { + return m.theme.Dim.Render(" Loading commit history...") + } + + if m.err != nil { + return m.theme.Error.Render(fmt.Sprintf(" Error: %v", m.err)) + } + + if !m.ready { + return m.theme.Dim.Render(" Initializing...") + } + + var b strings.Builder + + // Header + b.WriteString(" ") + if m.allTimelines { + b.WriteString(m.theme.Title.Render("Commit History (all timelines)")) + } else { + b.WriteString(m.theme.Title.Render(fmt.Sprintf("Commit History — %s", m.currentTimeline))) + } + + formatLabel := "full" + if m.oneline { + formatLabel = "oneline" + } + b.WriteString(m.theme.Dim.Render(fmt.Sprintf(" [%s]", formatLabel))) + b.WriteString("\n") + + // Viewport content + b.WriteString(m.viewport.View()) + + // Scroll indicator + scrollPct := m.viewport.ScrollPercent() + b.WriteString("\n") + b.WriteString(m.theme.Dim.Render(fmt.Sprintf(" %d commits %.0f%%", len(m.commits), scrollPct*100))) + + return b.String() +} + +// ShortHelp returns a short help string +func (m *LogModel) ShortHelp() string { + return "j/k:scroll o:oneline/full t:all timelines r:refresh" +} + +// renderContent rebuilds the viewport content from commits +func (m *LogModel) renderContent() { + if !m.ready { + return + } + + if len(m.commits) == 0 { + m.viewport.SetContent(m.theme.Dim.Render(" No commits yet.")) + return + } + + var content string + if m.oneline { + content = m.renderOneline() + } else { + content = m.renderFull() + } + + m.viewport.SetContent(content) +} + +// renderFull renders commits in full multi-line format +func (m *LogModel) renderFull() string { + var b strings.Builder + + for i, entry := range m.commits { + // Commit identifier line + b.WriteString(" ") + if entry.SealName != "" { + b.WriteString(m.theme.Info.Render("seal ")) + b.WriteString(m.theme.Bold.Render(entry.SealName)) + } else { + b.WriteString(m.theme.Info.Render("commit ")) + b.WriteString(m.theme.Bold.Render(entry.Hash)) + } + + if entry.IsMerge { + b.WriteString(m.theme.Warning.Render(" (merge)")) + } + b.WriteString("\n") + + // Author + b.WriteString(" ") + b.WriteString(m.theme.Dim.Render("Author: ")) + b.WriteString(entry.Author) + b.WriteString("\n") + + // Date + relTime := engine.RelativeTime(entry.Time) + b.WriteString(" ") + b.WriteString(m.theme.Dim.Render("Date: ")) + b.WriteString(entry.Time.Format("Mon Jan 2 15:04:05 2006")) + b.WriteString(m.theme.Dim.Render(fmt.Sprintf(" (%s)", relTime))) + b.WriteString("\n") + + // Timeline (if showing all) + if m.allTimelines { + b.WriteString(" ") + b.WriteString(m.theme.Dim.Render("Timeline: ")) + b.WriteString(m.theme.Info.Render(entry.Timeline)) + b.WriteString("\n") + } + + // Parents (for merge commits) + if entry.IsMerge && len(entry.Parents) > 0 { + b.WriteString(" ") + b.WriteString(m.theme.Dim.Render("Parents: ")) + b.WriteString(m.theme.Dim.Render(strings.Join(entry.Parents, ", "))) + b.WriteString("\n") + } + + // Message + b.WriteString("\n") + // Indent each line of the message + msgLines := strings.Split(entry.Message, "\n") + for _, line := range msgLines { + b.WriteString(" ") + b.WriteString(line) + b.WriteString("\n") + } + + // Separator between commits + if i < len(m.commits)-1 { + b.WriteString("\n") + } + } + + return b.String() +} + +// renderOneline renders commits in compact one-line format +func (m *LogModel) renderOneline() string { + var b strings.Builder + + for _, entry := range m.commits { + b.WriteString(" ") + + // Identifier (seal name or short hash) + if entry.SealName != "" { + name := entry.SealName + if len(name) > 20 { + name = name[:20] + } + b.WriteString(m.theme.Info.Render(padRight(name, 22))) + } else { + b.WriteString(m.theme.Info.Render(padRight(entry.Hash, 22))) + } + + // Timeline indicator (if showing all) + if m.allTimelines { + label := entry.Timeline + if len(label) > 12 { + label = label[:12] + } + b.WriteString(m.theme.Dim.Render(padRight("["+label+"]", 15))) + } + + // Relative time + relTime := engine.RelativeTime(entry.Time) + b.WriteString(m.theme.Dim.Render(padRight(relTime, 16))) + + // Message (first line, truncated) + message := firstLine(entry.Message) + maxMsgLen := m.width - 60 + if m.allTimelines { + maxMsgLen -= 15 + } + if maxMsgLen < 20 { + maxMsgLen = 20 + } + if len(message) > maxMsgLen { + message = message[:maxMsgLen-3] + "..." + } + b.WriteString(message) + + // Merge indicator + if entry.IsMerge { + b.WriteString(m.theme.Warning.Render(" *")) + } + + b.WriteString("\n") + } + + return b.String() +} + +// loadLog loads commit history asynchronously +func (m *LogModel) loadLog() tea.Cmd { + ivaldiDir := m.ivaldiDir + allTimelines := m.allTimelines + return func() tea.Msg { + opts := engine.LogOptions{ + AllTimelines: allTimelines, + } + commits, timeline, err := engine.GetCommitHistory(ivaldiDir, opts) + return logLoadedMsg{commits: commits, timeline: timeline, err: err} + } +} + +// firstLine returns the first line of a string +func firstLine(s string) string { + if idx := strings.IndexByte(s, '\n'); idx >= 0 { + return s[:idx] + } + return s +} diff --git a/internal/tui/views/remote.go b/internal/tui/views/remote.go new file mode 100644 index 0000000..735308d --- /dev/null +++ b/internal/tui/views/remote.go @@ -0,0 +1,503 @@ +package views + +import ( + "fmt" + "strings" + + "github.com/charmbracelet/bubbles/key" + tea "github.com/charmbracelet/bubbletea" + "github.com/javanhut/Ivaldi-vcs/internal/engine" + "github.com/javanhut/Ivaldi-vcs/internal/tui/components" + "github.com/javanhut/Ivaldi-vcs/internal/tui/style" +) + +// remoteKeyMap defines keybindings for the remote view +type remoteKeyMap struct { + Scout key.Binding + Upload key.Binding + Sync key.Binding + Harvest key.Binding + Portal key.Binding + Refresh key.Binding + Up key.Binding + Down key.Binding + Select key.Binding +} + +func defaultRemoteKeyMap() remoteKeyMap { + return remoteKeyMap{ + Scout: key.NewBinding( + key.WithKeys("s"), + key.WithHelp("s", "scout remotes"), + ), + Upload: key.NewBinding( + key.WithKeys("u"), + key.WithHelp("u", "upload (push)"), + ), + Sync: key.NewBinding( + key.WithKeys("y"), + key.WithHelp("y", "sync (pull)"), + ), + Harvest: key.NewBinding( + key.WithKeys("h"), + key.WithHelp("h", "harvest all new"), + ), + Portal: key.NewBinding( + key.WithKeys("p"), + key.WithHelp("p", "set portal"), + ), + Refresh: key.NewBinding( + key.WithKeys("r"), + key.WithHelp("r", "refresh"), + ), + Up: key.NewBinding( + key.WithKeys("up", "k"), + key.WithHelp("k/up", "move up"), + ), + Down: key.NewBinding( + key.WithKeys("down", "j"), + key.WithHelp("j/down", "move down"), + ), + Select: key.NewBinding( + key.WithKeys("enter"), + key.WithHelp("enter", "harvest selected"), + ), + } +} + +// RemoteModel is the remote operations view model +type RemoteModel struct { + workDir string + ivaldiDir string + keys remoteKeyMap + theme style.Theme + dialog components.Dialog + + portal *engine.PortalInfo + scout *engine.ScoutResult + loading bool + busy string // operation in progress description + err error + msg string // success message + width int + height int + + // Cursor for scout results (remote-only timelines) + cursor int + harvestables []string +} + +// Remote message types + +type portalLoadedMsg struct { + portal *engine.PortalInfo + err error +} + +type scoutDoneMsg struct { + result *engine.ScoutResult + err error +} + +type uploadDoneMsg struct { + result *engine.UploadResult + err error +} + +type syncDoneMsg struct { + result *engine.SyncResult + err error +} + +type harvestDoneMsg struct { + result *engine.HarvestResult + err error +} + +type portalSetMsg struct { + err error +} + +// NewRemoteModel creates a new remote operations view +func NewRemoteModel(workDir, ivaldiDir string) *RemoteModel { + return &RemoteModel{ + workDir: workDir, + ivaldiDir: ivaldiDir, + keys: defaultRemoteKeyMap(), + theme: style.DefaultTheme(), + dialog: components.NewDialog(), + loading: true, + } +} + +// Init loads portal info +func (m *RemoteModel) Init() tea.Cmd { + m.loading = true + return m.loadPortal() +} + +// Update handles messages +func (m *RemoteModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { + switch msg := msg.(type) { + case tea.WindowSizeMsg: + m.width = msg.Width + m.height = msg.Height + return m, nil + + case portalLoadedMsg: + m.loading = false + if msg.err != nil { + m.portal = nil + // Not an error - just no portal configured + m.err = nil + return m, nil + } + m.portal = msg.portal + m.err = nil + return m, nil + + case scoutDoneMsg: + m.busy = "" + if msg.err != nil { + m.err = msg.err + return m, nil + } + m.scout = msg.result + m.err = nil + m.harvestables = msg.result.RemoteOnly + m.cursor = 0 + m.msg = fmt.Sprintf("Scouted %d remote timelines", msg.result.Total) + return m, nil + + case uploadDoneMsg: + m.busy = "" + if msg.err != nil { + m.err = msg.err + return m, nil + } + m.err = nil + m.msg = fmt.Sprintf("Uploaded '%s' to %s/%s", msg.result.Timeline, msg.result.Owner, msg.result.Repo) + return m, nil + + case syncDoneMsg: + m.busy = "" + if msg.err != nil { + m.err = msg.err + return m, nil + } + m.err = nil + if msg.result.NoChanges { + m.msg = fmt.Sprintf("'%s' is already up to date", msg.result.Timeline) + } else { + total := len(msg.result.Added) + len(msg.result.Modified) + len(msg.result.Deleted) + m.msg = fmt.Sprintf("Synced %d file(s) for '%s'", total, msg.result.Timeline) + } + return m, nil + + case harvestDoneMsg: + m.busy = "" + if msg.err != nil { + m.err = msg.err + return m, nil + } + m.err = nil + if len(msg.result.Successful) == 0 && len(msg.result.Failed) == 0 { + m.msg = "No new timelines to harvest" + } else if len(msg.result.Failed) == 0 { + m.msg = fmt.Sprintf("Harvested %d timeline(s)", len(msg.result.Successful)) + } else { + m.msg = fmt.Sprintf("Harvested %d, failed %d", len(msg.result.Successful), len(msg.result.Failed)) + } + // Refresh scout data + return m, m.doScout() + + case portalSetMsg: + if msg.err != nil { + m.err = msg.err + return m, nil + } + m.msg = "Portal configured" + m.loading = true + return m, m.loadPortal() + + case components.DialogSubmitMsg: + if msg.Type == components.DialogPortal { // reuse DialogSeal type for portal input + m.busy = "Configuring portal..." + return m, m.setPortal(msg.Value) + } + return m, nil + + case components.DialogCancelMsg: + return m, nil + + case tea.KeyMsg: + // Handle dialog first + if m.dialog.IsActive() { + cmd, consumed := m.dialog.Update(msg) + if consumed { + return m, cmd + } + } + + // Don't process keys while busy + if m.busy != "" { + return m, nil + } + + switch { + case key.Matches(msg, m.keys.Refresh): + m.loading = true + m.msg = "" + m.err = nil + return m, m.loadPortal() + + case key.Matches(msg, m.keys.Scout): + if m.portal == nil { + m.err = fmt.Errorf("no portal configured. Press 'p' to add one") + return m, nil + } + m.busy = "Scouting remote timelines..." + m.msg = "" + m.err = nil + return m, m.doScout() + + case key.Matches(msg, m.keys.Upload): + if m.portal == nil { + m.err = fmt.Errorf("no portal configured") + return m, nil + } + m.busy = "Uploading..." + m.msg = "" + m.err = nil + return m, m.doUpload() + + case key.Matches(msg, m.keys.Sync): + if m.portal == nil { + m.err = fmt.Errorf("no portal configured") + return m, nil + } + m.busy = "Syncing..." + m.msg = "" + m.err = nil + return m, m.doSync() + + case key.Matches(msg, m.keys.Harvest): + if m.portal == nil { + m.err = fmt.Errorf("no portal configured") + return m, nil + } + m.busy = "Harvesting all new timelines..." + m.msg = "" + m.err = nil + return m, m.doHarvestAll() + + case key.Matches(msg, m.keys.Select): + if len(m.harvestables) > 0 && m.cursor < len(m.harvestables) { + name := m.harvestables[m.cursor] + m.busy = fmt.Sprintf("Harvesting '%s'...", name) + m.msg = "" + m.err = nil + return m, m.doHarvestOne(name) + } + return m, nil + + case key.Matches(msg, m.keys.Portal): + cmd := m.dialog.Open(components.DialogPortal, "Portal (owner/repo):", "owner/repo") + return m, cmd + + case key.Matches(msg, m.keys.Up): + if m.cursor > 0 { + m.cursor-- + } + return m, nil + + case key.Matches(msg, m.keys.Down): + if m.cursor < len(m.harvestables)-1 { + m.cursor++ + } + return m, nil + } + } + + return m, nil +} + +// View renders the remote operations view +func (m *RemoteModel) View() string { + if m.loading { + return m.theme.Dim.Render(" Loading remote info...") + } + + var b strings.Builder + + // Header + b.WriteString(" ") + b.WriteString(m.theme.Title.Render("Remote Operations")) + b.WriteString("\n\n") + + // Portal section + b.WriteString(" ") + b.WriteString(m.theme.SectionHead.Render("Portal")) + b.WriteString("\n") + if m.portal != nil { + b.WriteString(" ") + b.WriteString(m.theme.Bold.Render(fmt.Sprintf("%s/%s", m.portal.Owner, m.portal.Repo))) + if m.portal.HasAuth { + b.WriteString(m.theme.Success.Render(" [authenticated]")) + } else { + b.WriteString(m.theme.Warning.Render(" [no auth]")) + } + b.WriteString("\n") + b.WriteString(" ") + b.WriteString(m.theme.Dim.Render(fmt.Sprintf("Timeline: %s", m.portal.Timeline))) + b.WriteString("\n") + } else { + b.WriteString(" ") + b.WriteString(m.theme.Dim.Render("No repository configured. Press 'p' to add one.")) + b.WriteString("\n") + } + + // Scout results + if m.scout != nil { + b.WriteString("\n ") + b.WriteString(m.theme.SectionHead.Render("Scout Results")) + b.WriteString("\n") + + if len(m.scout.RemoteOnly) > 0 { + b.WriteString(" ") + b.WriteString(m.theme.Info.Render(fmt.Sprintf("Available to harvest: %d", len(m.scout.RemoteOnly)))) + b.WriteString("\n") + for i, name := range m.harvestables { + b.WriteString(" ") + if i == m.cursor { + b.WriteString(m.theme.Cursor.Render("> ")) + b.WriteString(m.theme.Selected.Render(padRight(name, 30))) + } else { + b.WriteString(" ") + b.WriteString(m.theme.Info.Render(padRight(name, 30))) + } + b.WriteString("\n") + } + } + + if len(m.scout.Both) > 0 { + b.WriteString(" ") + b.WriteString(m.theme.Success.Render(fmt.Sprintf("Synced locally: %d", len(m.scout.Both)))) + b.WriteString(" ") + b.WriteString(m.theme.Dim.Render(strings.Join(m.scout.Both, ", "))) + b.WriteString("\n") + } + + if len(m.scout.LocalOnly) > 0 { + b.WriteString(" ") + b.WriteString(m.theme.Warning.Render(fmt.Sprintf("Local only: %d", len(m.scout.LocalOnly)))) + b.WriteString(" ") + b.WriteString(m.theme.Dim.Render(strings.Join(m.scout.LocalOnly, ", "))) + b.WriteString("\n") + } + } + + // Busy indicator + if m.busy != "" { + b.WriteString("\n ") + b.WriteString(m.theme.Info.Render("~ " + m.busy)) + b.WriteString("\n") + } + + // Success message + if m.msg != "" && m.busy == "" { + b.WriteString("\n ") + b.WriteString(m.theme.Success.Render(m.msg)) + b.WriteString("\n") + } + + // Error + if m.err != nil && m.busy == "" { + b.WriteString("\n ") + b.WriteString(m.theme.Error.Render(fmt.Sprintf("Error: %v", m.err))) + b.WriteString("\n") + } + + // Dialog overlay + if m.dialog.IsActive() { + b.WriteString(m.dialog.View(m.theme)) + } + + // Footer + b.WriteString("\n") + if m.portal != nil { + b.WriteString(m.theme.Dim.Render(" s:scout u:upload y:sync h:harvest all enter:harvest selected p:portal r:refresh")) + } else { + b.WriteString(m.theme.Dim.Render(" p:set portal r:refresh")) + } + + return b.String() +} + +// ShortHelp returns a short help string +func (m *RemoteModel) ShortHelp() string { + return "s:scout u:upload y:sync h:harvest p:portal r:refresh" +} + +// Async commands + +func (m *RemoteModel) loadPortal() tea.Cmd { + ivaldiDir := m.ivaldiDir + return func() tea.Msg { + portal, err := engine.GetPortalInfo(ivaldiDir) + return portalLoadedMsg{portal: portal, err: err} + } +} + +func (m *RemoteModel) doScout() tea.Cmd { + ivaldiDir := m.ivaldiDir + workDir := m.workDir + return func() tea.Msg { + result, err := engine.Scout(ivaldiDir, workDir) + return scoutDoneMsg{result: result, err: err} + } +} + +func (m *RemoteModel) doUpload() tea.Cmd { + ivaldiDir := m.ivaldiDir + workDir := m.workDir + return func() tea.Msg { + result, err := engine.Upload(ivaldiDir, workDir, false) + return uploadDoneMsg{result: result, err: err} + } +} + +func (m *RemoteModel) doSync() tea.Cmd { + ivaldiDir := m.ivaldiDir + workDir := m.workDir + return func() tea.Msg { + result, err := engine.SyncTimeline(ivaldiDir, workDir, "") + return syncDoneMsg{result: result, err: err} + } +} + +func (m *RemoteModel) doHarvestAll() tea.Cmd { + ivaldiDir := m.ivaldiDir + workDir := m.workDir + return func() tea.Msg { + result, err := engine.HarvestTimelines(ivaldiDir, workDir, nil) + return harvestDoneMsg{result: result, err: err} + } +} + +func (m *RemoteModel) doHarvestOne(name string) tea.Cmd { + ivaldiDir := m.ivaldiDir + workDir := m.workDir + return func() tea.Msg { + result, err := engine.HarvestTimelines(ivaldiDir, workDir, []string{name}) + return harvestDoneMsg{result: result, err: err} + } +} + +func (m *RemoteModel) setPortal(ownerRepo string) tea.Cmd { + ivaldiDir := m.ivaldiDir + return func() tea.Msg { + err := engine.SetPortal(ivaldiDir, ownerRepo) + return portalSetMsg{err: err} + } +} diff --git a/internal/tui/views/status.go b/internal/tui/views/status.go new file mode 100644 index 0000000..cc1c15d --- /dev/null +++ b/internal/tui/views/status.go @@ -0,0 +1,346 @@ +package views + +import ( + "fmt" + "strings" + + "github.com/charmbracelet/bubbles/key" + tea "github.com/charmbracelet/bubbletea" + "github.com/javanhut/Ivaldi-vcs/internal/engine" + "github.com/javanhut/Ivaldi-vcs/internal/tui/components" + "github.com/javanhut/Ivaldi-vcs/internal/tui/style" +) + +// statusKeyMap defines keybindings specific to the status view +type statusKeyMap struct { + GatherAll key.Binding + UngatherAll key.Binding + Refresh key.Binding + Seal key.Binding +} + +func defaultStatusKeyMap() statusKeyMap { + return statusKeyMap{ + GatherAll: key.NewBinding( + key.WithKeys("a"), + key.WithHelp("a", "gather all"), + ), + UngatherAll: key.NewBinding( + key.WithKeys("u"), + key.WithHelp("u", "ungather all"), + ), + Refresh: key.NewBinding( + key.WithKeys("r"), + key.WithHelp("r", "refresh"), + ), + Seal: key.NewBinding( + key.WithKeys("s"), + key.WithHelp("s", "seal (commit)"), + ), + } +} + +// StatusModel is the status view model +type StatusModel struct { + workDir string + ivaldiDir string + fileList components.FileList + dialog components.Dialog + keys statusKeyMap + theme style.Theme + width int + height int + status *engine.StatusResult + err error + loading bool + sealMsg string // success message after seal +} + +// statusLoadedMsg carries loaded status data +type statusLoadedMsg struct { + result *engine.StatusResult + err error +} + +// statusStagingDoneMsg signals staging operation completed +type statusStagingDoneMsg struct { + err error +} + +// sealDoneMsg signals seal operation completed +type sealDoneMsg struct { + result *engine.SealResult + err error +} + +// NewStatusModel creates a new status view +func NewStatusModel(workDir, ivaldiDir string) *StatusModel { + fl := components.NewFileList() + fl.Focus(true) + + return &StatusModel{ + workDir: workDir, + ivaldiDir: ivaldiDir, + fileList: fl, + dialog: components.NewDialog(), + keys: defaultStatusKeyMap(), + theme: style.DefaultTheme(), + loading: true, + } +} + +// Init loads the initial status data +func (m *StatusModel) Init() tea.Cmd { + m.loading = true + return m.loadStatus() +} + +// Update handles messages +func (m *StatusModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { + switch msg := msg.(type) { + case tea.WindowSizeMsg: + m.width = msg.Width + m.height = msg.Height + listHeight := msg.Height - 4 + if listHeight < 1 { + listHeight = 1 + } + m.fileList.SetSize(msg.Width, listHeight) + return m, nil + + case statusLoadedMsg: + m.loading = false + if msg.err != nil { + m.err = msg.err + return m, nil + } + m.status = msg.result + m.err = nil + m.rebuildFileList() + + return m, func() tea.Msg { + return style.StatusUpdateMsg{ + Timeline: msg.result.Timeline, + SealName: msg.result.SealName, + FileCount: msg.result.FileCount, + Staged: len(msg.result.Staged), + Modified: len(msg.result.Modified), + Untracked: len(msg.result.Untracked), + Deleted: len(msg.result.Deleted), + } + } + + case statusStagingDoneMsg: + if msg.err != nil { + m.err = msg.err + return m, nil + } + return m, m.loadStatus() + + case sealDoneMsg: + m.loading = false + if msg.err != nil { + m.err = msg.err + return m, nil + } + m.err = nil + m.sealMsg = fmt.Sprintf("Sealed '%s' on %s (%s)", msg.result.SealName, msg.result.Timeline, msg.result.Hash) + return m, m.loadStatus() + + case components.DialogSubmitMsg: + if msg.Type == components.DialogSeal { + m.loading = true + m.sealMsg = "" + return m, m.createSeal(msg.Value) + } + return m, nil + + case components.DialogCancelMsg: + return m, nil + + case tea.KeyMsg: + // Handle dialog input first + if m.dialog.IsActive() { + cmd, consumed := m.dialog.Update(msg) + if consumed { + return m, cmd + } + } + + switch { + case key.Matches(msg, m.keys.Refresh): + m.loading = true + m.sealMsg = "" + return m, m.loadStatus() + + case key.Matches(msg, m.keys.Seal): + if m.status != nil && len(m.status.Staged) > 0 { + cmd := m.dialog.Open(components.DialogSeal, "Seal message:", "describe your changes") + return m, cmd + } + m.err = fmt.Errorf("no files staged. Gather files first") + return m, nil + + case key.Matches(msg, m.keys.GatherAll): + return m, m.gatherAll() + + case key.Matches(msg, m.keys.UngatherAll): + return m, m.ungatherAll() + + case key.Matches(msg, m.fileList.Keys.Toggle): + item := m.fileList.SelectedItem() + if item != nil { + return m, m.toggleFile(item.Info) + } + } + + cmd := m.fileList.Update(msg) + return m, cmd + } + + return m, nil +} + +// View renders the status view +func (m *StatusModel) View() string { + if m.loading { + return m.theme.Dim.Render(" Loading status...") + } + + if m.err != nil { + return m.theme.Error.Render(fmt.Sprintf(" Error: %v", m.err)) + } + + if m.status == nil { + return m.theme.Dim.Render(" No status data") + } + + var b strings.Builder + + // Seal success message + if m.sealMsg != "" { + b.WriteString(" ") + b.WriteString(m.theme.Success.Render(m.sealMsg)) + b.WriteString("\n\n") + } + + total := len(m.status.Files) + if total == 0 { + b.WriteString(m.theme.Success.Render(" Working directory clean")) + b.WriteString("\n\n") + b.WriteString(m.theme.Dim.Render(" Press r to refresh")) + } else { + var parts []string + if len(m.status.Staged) > 0 { + parts = append(parts, m.theme.Staged.Render(fmt.Sprintf("%d staged", len(m.status.Staged)))) + } + if len(m.status.Modified) > 0 { + parts = append(parts, m.theme.Modified.Render(fmt.Sprintf("%d modified", len(m.status.Modified)))) + } + if len(m.status.Untracked) > 0 { + parts = append(parts, m.theme.Untracked.Render(fmt.Sprintf("%d untracked", len(m.status.Untracked)))) + } + if len(m.status.Deleted) > 0 { + parts = append(parts, m.theme.Deleted.Render(fmt.Sprintf("%d deleted", len(m.status.Deleted)))) + } + + b.WriteString(" ") + b.WriteString(strings.Join(parts, m.theme.Dim.Render(" | "))) + b.WriteString("\n\n") + + b.WriteString(m.fileList.View(m.theme)) + } + + // Dialog overlay + if m.dialog.IsActive() { + b.WriteString(m.dialog.View(m.theme)) + } + + return b.String() +} + +// ShortHelp returns a short help string for the status bar +func (m *StatusModel) ShortHelp() string { + return "j/k:nav space:toggle a:gather all u:ungather all s:seal r:refresh" +} + +// rebuildFileList rebuilds the file list from current status +func (m *StatusModel) rebuildFileList() { + if m.status == nil { + m.fileList.SetItems(nil) + return + } + + var items []components.FileItem + + for _, f := range m.status.Staged { + items = append(items, components.FileItem{Info: f}) + } + for _, f := range m.status.Modified { + items = append(items, components.FileItem{Info: f}) + } + for _, f := range m.status.Deleted { + items = append(items, components.FileItem{Info: f}) + } + for _, f := range m.status.Untracked { + items = append(items, components.FileItem{Info: f}) + } + + m.fileList.SetItems(items) +} + +// loadStatus loads status data asynchronously +func (m *StatusModel) loadStatus() tea.Cmd { + workDir := m.workDir + ivaldiDir := m.ivaldiDir + return func() tea.Msg { + result, err := engine.GetFileStatuses(workDir, ivaldiDir) + return statusLoadedMsg{result: result, err: err} + } +} + +// toggleFile toggles the staging state of a file +func (m *StatusModel) toggleFile(info engine.FileStatusInfo) tea.Cmd { + workDir := m.workDir + ivaldiDir := m.ivaldiDir + return func() tea.Msg { + var err error + switch info.Status { + case engine.StatusStaged, engine.StatusAdded: + err = engine.UngatherFiles(ivaldiDir, []string{info.Path}) + case engine.StatusModified, engine.StatusUntracked, engine.StatusDeleted: + err = engine.GatherFiles(workDir, ivaldiDir, []string{info.Path}) + } + return statusStagingDoneMsg{err: err} + } +} + +// gatherAll stages all unstaged files +func (m *StatusModel) gatherAll() tea.Cmd { + workDir := m.workDir + ivaldiDir := m.ivaldiDir + files := m.status.Files + return func() tea.Msg { + err := engine.GatherAllUnstaged(workDir, ivaldiDir, files) + return statusStagingDoneMsg{err: err} + } +} + +// ungatherAll removes all files from staging +func (m *StatusModel) ungatherAll() tea.Cmd { + ivaldiDir := m.ivaldiDir + return func() tea.Msg { + err := engine.UngatherAll(ivaldiDir) + return statusStagingDoneMsg{err: err} + } +} + +// createSeal creates a seal (commit) asynchronously +func (m *StatusModel) createSeal(message string) tea.Cmd { + workDir := m.workDir + ivaldiDir := m.ivaldiDir + return func() tea.Msg { + result, err := engine.CreateSeal(ivaldiDir, workDir, message) + return sealDoneMsg{result: result, err: err} + } +} diff --git a/internal/tui/views/timeline.go b/internal/tui/views/timeline.go new file mode 100644 index 0000000..83956df --- /dev/null +++ b/internal/tui/views/timeline.go @@ -0,0 +1,497 @@ +package views + +import ( + "fmt" + "strings" + + "github.com/charmbracelet/bubbles/key" + "github.com/charmbracelet/bubbles/textinput" + tea "github.com/charmbracelet/bubbletea" + "github.com/javanhut/Ivaldi-vcs/internal/engine" + "github.com/javanhut/Ivaldi-vcs/internal/tui/style" +) + +// timelineKeyMap defines keybindings for the timeline view +type timelineKeyMap struct { + Up key.Binding + Down key.Binding + Top key.Binding + Bottom key.Binding + Switch key.Binding + Create key.Binding + Remove key.Binding + Rename key.Binding + Refresh key.Binding +} + +func defaultTimelineKeyMap() timelineKeyMap { + return timelineKeyMap{ + Up: key.NewBinding( + key.WithKeys("up", "k"), + key.WithHelp("k/up", "move up"), + ), + Down: key.NewBinding( + key.WithKeys("down", "j"), + key.WithHelp("j/down", "move down"), + ), + Top: key.NewBinding( + key.WithKeys("g"), + key.WithHelp("g", "top"), + ), + Bottom: key.NewBinding( + key.WithKeys("G"), + key.WithHelp("G", "bottom"), + ), + Switch: key.NewBinding( + key.WithKeys("enter"), + key.WithHelp("enter", "switch to timeline"), + ), + Create: key.NewBinding( + key.WithKeys("c"), + key.WithHelp("c", "create timeline"), + ), + Remove: key.NewBinding( + key.WithKeys("d"), + key.WithHelp("d", "remove timeline"), + ), + Rename: key.NewBinding( + key.WithKeys("R"), + key.WithHelp("R", "rename timeline"), + ), + Refresh: key.NewBinding( + key.WithKeys("r"), + key.WithHelp("r", "refresh"), + ), + } +} + +// inputMode tracks which input dialog is active +type inputMode int + +const ( + inputNone inputMode = iota + inputCreate + inputRename + inputConfirmRemove +) + +// TimelineModel is the timeline view model +type TimelineModel struct { + workDir string + ivaldiDir string + keys timelineKeyMap + theme style.Theme + + result *engine.TimelineListResult + loading bool + err error + width int + height int + + cursor int // cursor position in local timelines list + + // Input dialog state + mode inputMode + textInput textinput.Model + renameOld string // old name when renaming +} + +// timelineLoadedMsg carries loaded timeline data +type timelineLoadedMsg struct { + result *engine.TimelineListResult + err error +} + +// timelineSwitchedMsg signals a timeline switch completed +type timelineSwitchedMsg struct { + name string + err error +} + +// timelineActionMsg signals a create/remove/rename completed +type timelineActionMsg struct { + action string + err error +} + +// NewTimelineModel creates a new timeline view +func NewTimelineModel(workDir, ivaldiDir string) *TimelineModel { + ti := textinput.New() + ti.CharLimit = 64 + ti.Width = 30 + + return &TimelineModel{ + workDir: workDir, + ivaldiDir: ivaldiDir, + keys: defaultTimelineKeyMap(), + theme: style.DefaultTheme(), + loading: true, + textInput: ti, + } +} + +// Init loads timeline data +func (m *TimelineModel) Init() tea.Cmd { + m.loading = true + return m.loadTimelines() +} + +// Update handles messages +func (m *TimelineModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { + switch msg := msg.(type) { + case tea.WindowSizeMsg: + m.width = msg.Width + m.height = msg.Height + return m, nil + + case timelineLoadedMsg: + m.loading = false + if msg.err != nil { + m.err = msg.err + return m, nil + } + m.result = msg.result + m.err = nil + // Clamp cursor + if m.result != nil && m.cursor >= len(m.result.Local) { + m.cursor = len(m.result.Local) - 1 + } + if m.cursor < 0 { + m.cursor = 0 + } + return m, nil + + case timelineSwitchedMsg: + if msg.err != nil { + m.err = msg.err + return m, nil + } + m.loading = true + return m, m.loadTimelines() + + case timelineActionMsg: + if msg.err != nil { + m.err = msg.err + return m, nil + } + m.loading = true + return m, m.loadTimelines() + + case tea.KeyMsg: + // Handle input mode first + if m.mode != inputNone { + return m.updateInput(msg) + } + + switch { + case key.Matches(msg, m.keys.Refresh): + m.loading = true + return m, m.loadTimelines() + + case key.Matches(msg, m.keys.Up): + if m.cursor > 0 { + m.cursor-- + } + return m, nil + + case key.Matches(msg, m.keys.Down): + if m.result != nil && m.cursor < len(m.result.Local)-1 { + m.cursor++ + } + return m, nil + + case key.Matches(msg, m.keys.Top): + m.cursor = 0 + return m, nil + + case key.Matches(msg, m.keys.Bottom): + if m.result != nil && len(m.result.Local) > 0 { + m.cursor = len(m.result.Local) - 1 + } + return m, nil + + case key.Matches(msg, m.keys.Switch): + if m.result != nil && len(m.result.Local) > 0 { + selected := m.result.Local[m.cursor] + if selected.IsCurrent { + return m, nil // already on this timeline + } + m.loading = true + return m, m.switchTimeline(selected.Name) + } + return m, nil + + case key.Matches(msg, m.keys.Create): + m.mode = inputCreate + m.textInput.Placeholder = "new timeline name" + m.textInput.SetValue("") + m.textInput.Focus() + return m, textinput.Blink + + case key.Matches(msg, m.keys.Remove): + if m.result != nil && len(m.result.Local) > 0 { + selected := m.result.Local[m.cursor] + if selected.IsCurrent { + m.err = fmt.Errorf("cannot remove current timeline") + return m, nil + } + m.mode = inputConfirmRemove + m.textInput.Placeholder = "type 'yes' to confirm" + m.textInput.SetValue("") + m.textInput.Focus() + return m, textinput.Blink + } + return m, nil + + case key.Matches(msg, m.keys.Rename): + if m.result != nil && len(m.result.Local) > 0 { + m.renameOld = m.result.Local[m.cursor].Name + m.mode = inputRename + m.textInput.Placeholder = "new name" + m.textInput.SetValue("") + m.textInput.Focus() + return m, textinput.Blink + } + return m, nil + } + } + + return m, nil +} + +// updateInput handles key events when an input dialog is active +func (m *TimelineModel) updateInput(msg tea.KeyMsg) (tea.Model, tea.Cmd) { + switch msg.String() { + case "esc": + m.mode = inputNone + m.textInput.Blur() + return m, nil + + case "enter": + value := strings.TrimSpace(m.textInput.Value()) + m.textInput.Blur() + mode := m.mode + m.mode = inputNone + + switch mode { + case inputCreate: + if value == "" { + return m, nil + } + m.loading = true + return m, m.createTimeline(value) + + case inputRename: + if value == "" { + return m, nil + } + m.loading = true + return m, m.renameTimeline(m.renameOld, value) + + case inputConfirmRemove: + if value == "yes" && m.result != nil && len(m.result.Local) > 0 { + selected := m.result.Local[m.cursor] + m.loading = true + return m, m.removeTimeline(selected.Name) + } + return m, nil + } + return m, nil + } + + // Forward to text input + var cmd tea.Cmd + m.textInput, cmd = m.textInput.Update(msg) + return m, cmd +} + +// View renders the timeline view +func (m *TimelineModel) View() string { + if m.loading { + return m.theme.Dim.Render(" Loading timelines...") + } + + if m.err != nil { + return m.theme.Error.Render(fmt.Sprintf(" Error: %v", m.err)) + } + + if m.result == nil { + return m.theme.Dim.Render(" No timeline data") + } + + var b strings.Builder + + // Header + b.WriteString(" ") + b.WriteString(m.theme.Title.Render("Timelines")) + b.WriteString(" ") + b.WriteString(m.theme.Dim.Render(fmt.Sprintf("(current: %s)", m.result.Current))) + b.WriteString("\n\n") + + // Local timelines + b.WriteString(" ") + b.WriteString(m.theme.SectionHead.Render("Local")) + b.WriteString("\n") + + if len(m.result.Local) == 0 { + b.WriteString(m.theme.Dim.Render(" No local timelines\n")) + } else { + for i, tl := range m.result.Local { + b.WriteString(" ") + + // Cursor + if i == m.cursor { + b.WriteString(m.theme.Cursor.Render("> ")) + } else { + b.WriteString(" ") + } + + // Current marker + if tl.IsCurrent { + b.WriteString(m.theme.Success.Render("* ")) + } else { + b.WriteString(" ") + } + + // Name + name := tl.Name + if tl.IsButterfly { + name += " [butterfly]" + } + + if i == m.cursor { + b.WriteString(m.theme.Selected.Render(padRight(name, 30))) + } else if tl.IsCurrent { + b.WriteString(m.theme.Success.Render(padRight(name, 30))) + } else { + b.WriteString(padRight(name, 30)) + } + + // Hash + if tl.Hash != "" { + b.WriteString(m.theme.Dim.Render(" " + tl.Hash)) + } + + // Description + if tl.Description != "" { + b.WriteString(m.theme.Dim.Render(" " + truncate(tl.Description, 40))) + } + + b.WriteString("\n") + } + } + + // Remote timelines + if len(m.result.Remote) > 0 { + b.WriteString("\n ") + b.WriteString(m.theme.SectionHead.Render("Remote")) + b.WriteString("\n") + for _, tl := range m.result.Remote { + b.WriteString(" ") + b.WriteString(m.theme.Info.Render(padRight(tl.Name, 30))) + if tl.Description != "" { + b.WriteString(m.theme.Dim.Render(" " + truncate(tl.Description, 40))) + } + b.WriteString("\n") + } + } + + // Tags + if len(m.result.Tags) > 0 { + b.WriteString("\n ") + b.WriteString(m.theme.SectionHead.Render("Tags")) + b.WriteString("\n") + for _, tl := range m.result.Tags { + b.WriteString(" ") + b.WriteString(m.theme.Warning.Render(padRight(tl.Name, 30))) + if tl.Description != "" { + b.WriteString(m.theme.Dim.Render(" " + truncate(tl.Description, 40))) + } + b.WriteString("\n") + } + } + + // Input dialog + if m.mode != inputNone { + b.WriteString("\n") + switch m.mode { + case inputCreate: + b.WriteString(m.theme.Title.Render(" Create timeline: ")) + case inputRename: + b.WriteString(m.theme.Title.Render(fmt.Sprintf(" Rename '%s' to: ", m.renameOld))) + case inputConfirmRemove: + if m.result != nil && m.cursor < len(m.result.Local) { + b.WriteString(m.theme.Warning.Render( + fmt.Sprintf(" Remove '%s'? ", m.result.Local[m.cursor].Name))) + } + } + b.WriteString(m.textInput.View()) + b.WriteString("\n") + b.WriteString(m.theme.Dim.Render(" (enter to confirm, esc to cancel)")) + } + + // Footer hint + b.WriteString("\n") + b.WriteString(m.theme.Dim.Render(" enter:switch c:create d:remove R:rename r:refresh")) + + return b.String() +} + +// ShortHelp returns a short help string +func (m *TimelineModel) ShortHelp() string { + return "j/k:navigate enter:switch c:create d:remove R:rename r:refresh" +} + +// loadTimelines loads timeline data asynchronously +func (m *TimelineModel) loadTimelines() tea.Cmd { + ivaldiDir := m.ivaldiDir + return func() tea.Msg { + result, err := engine.ListTimelines(ivaldiDir) + return timelineLoadedMsg{result: result, err: err} + } +} + +// switchTimeline switches to a timeline asynchronously +func (m *TimelineModel) switchTimeline(name string) tea.Cmd { + workDir := m.workDir + ivaldiDir := m.ivaldiDir + return func() tea.Msg { + err := engine.SwitchTimeline(ivaldiDir, workDir, name) + return timelineSwitchedMsg{name: name, err: err} + } +} + +// createTimeline creates a new timeline asynchronously +func (m *TimelineModel) createTimeline(name string) tea.Cmd { + ivaldiDir := m.ivaldiDir + return func() tea.Msg { + err := engine.CreateTimeline(ivaldiDir, name) + return timelineActionMsg{action: "create", err: err} + } +} + +// removeTimeline removes a timeline asynchronously +func (m *TimelineModel) removeTimeline(name string) tea.Cmd { + ivaldiDir := m.ivaldiDir + return func() tea.Msg { + err := engine.RemoveTimeline(ivaldiDir, name) + return timelineActionMsg{action: "remove", err: err} + } +} + +// renameTimeline renames a timeline asynchronously +func (m *TimelineModel) renameTimeline(oldName, newName string) tea.Cmd { + ivaldiDir := m.ivaldiDir + return func() tea.Msg { + err := engine.RenameTimeline(ivaldiDir, oldName, newName, false) + return timelineActionMsg{action: "rename", err: err} + } +} + +// truncate shortens a string to maxLen with ellipsis +func truncate(s string, maxLen int) string { + if len(s) <= maxLen { + return s + } + return s[:maxLen-3] + "..." +} From 8ec469369ffc3a64d8049e9d29fd2c7f85cf43c2 Mon Sep 17 00:00:00 2001 From: javanhut Date: Wed, 25 Feb 2026 10:09:50 +0000 Subject: [PATCH 11/12] feat: updated tui --- internal/tui/app.go | 39 +++++- internal/tui/components/filelist.go | 10 ++ internal/tui/components/statusbar.go | 6 +- internal/tui/components/tabs.go | 18 ++- internal/tui/keys.go | 5 + internal/tui/style/common.go | 1 + internal/tui/style/theme.go | 35 +++++ internal/tui/views/diff.go | 5 + internal/tui/views/fuse.go | 15 +-- internal/tui/views/help.go | 41 +++++- internal/tui/views/log.go | 5 + internal/tui/views/remote.go | 13 +- internal/tui/views/status.go | 187 +++++++++++++++++++++++---- internal/tui/views/timeline.go | 9 +- 14 files changed, 329 insertions(+), 60 deletions(-) diff --git a/internal/tui/app.go b/internal/tui/app.go index 3aa90c9..8ce2f64 100644 --- a/internal/tui/app.go +++ b/internal/tui/app.go @@ -83,8 +83,9 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { m.width = msg.Width m.height = msg.Height m.statusBar.Width = msg.Width + m.tabs.Width = msg.Width - contentHeight := msg.Height - 4 + contentHeight := msg.Height - 5 for id, v := range m.views { sizeMsg := tea.WindowSizeMsg{ @@ -101,6 +102,27 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { return m, tea.Batch(cmds...) case tea.KeyMsg: + // Handle Escape key with progressive behavior + if key.Matches(msg, m.keys.Escape) { + // If active view has input open, forward esc to it + if v, ok := m.views[m.activeTab]; ok && v.HasActiveInput() { + updated, cmd := v.Update(msg) + m.views[m.activeTab] = updated.(style.View) + return m, cmd + } + // If help is showing, dismiss it + if m.showHelp { + m.showHelp = false + return m, nil + } + // If not on Status tab, go to Status + if m.activeTab != style.TabStatus { + return m, m.switchTab(style.TabStatus) + } + // On Status tab with nothing active, quit + return m, tea.Quit + } + if key.Matches(msg, m.keys.Help) { m.showHelp = !m.showHelp return m, nil @@ -147,6 +169,7 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { m.statusBar.Staged = msg.Staged m.statusBar.Modified = msg.Modified m.statusBar.Untracked = msg.Untracked + m.statusBar.Deleted = msg.Deleted return m, nil case style.ErrMsg: @@ -178,8 +201,8 @@ func (m Model) View() string { b.WriteString(m.tabs.View(m.theme)) b.WriteString("\n") - // Content area - contentHeight := m.height - 4 + // Content area (tab bar + footer + status bar = 5 lines overhead) + contentHeight := m.height - 5 if contentHeight < 1 { contentHeight = 1 } @@ -208,6 +231,14 @@ func (m Model) View() string { b.WriteString(m.theme.Error.Render("Error: " + m.err.Error())) } + // Footer with key hints from active view + if !m.showHelp { + if v, ok := m.views[m.activeTab]; ok { + b.WriteString("\n") + b.WriteString(m.theme.Dim.Render(" " + v.ShortHelp())) + } + } + // Status bar b.WriteString("\n") b.WriteString(m.statusBar.View(m.theme)) @@ -226,7 +257,7 @@ func (m *Model) switchTab(tab style.TabID) tea.Cmd { func (m *Model) initActiveView() tea.Cmd { if v, ok := m.views[m.activeTab]; ok { if m.width > 0 && m.height > 0 { - contentHeight := m.height - 4 + contentHeight := m.height - 5 updated, cmd := v.Update(tea.WindowSizeMsg{ Width: m.width, Height: contentHeight, diff --git a/internal/tui/components/filelist.go b/internal/tui/components/filelist.go index c44211c..705cc00 100644 --- a/internal/tui/components/filelist.go +++ b/internal/tui/components/filelist.go @@ -98,6 +98,16 @@ func (f *FileList) Cursor() int { return f.cursor } +// Offset returns the current scroll offset +func (f *FileList) Offset() int { + return f.offset +} + +// VisibleHeight returns the visible height of the list +func (f *FileList) VisibleHeight() int { + return f.height +} + // SelectedItem returns the item under the cursor, or nil func (f *FileList) SelectedItem() *FileItem { if f.cursor >= 0 && f.cursor < len(f.Items) { diff --git a/internal/tui/components/statusbar.go b/internal/tui/components/statusbar.go index ae87f80..8cfc42f 100644 --- a/internal/tui/components/statusbar.go +++ b/internal/tui/components/statusbar.go @@ -14,6 +14,7 @@ type StatusBar struct { Staged int Modified int Untracked int + Deleted int Width int } @@ -47,6 +48,9 @@ func (s StatusBar) View(theme style.Theme) string { if s.Untracked > 0 { counts = append(counts, fmt.Sprintf("%d untracked", s.Untracked)) } + if s.Deleted > 0 { + counts = append(counts, fmt.Sprintf("%d deleted", s.Deleted)) + } if len(counts) > 0 { parts = append(parts, theme.StatusValue.Render(strings.Join(counts, ", "))) } @@ -55,7 +59,7 @@ func (s StatusBar) View(theme style.Theme) string { content := strings.Join(parts, divider) // Add help hint on the right - helpHint := theme.Help.Render("?=help") + helpHint := theme.Help.Render("esc=back ?=help") contentWidth := lipglossWidth(content) helpWidth := lipglossWidth(helpHint) diff --git a/internal/tui/components/tabs.go b/internal/tui/components/tabs.go index 055a117..d0f1356 100644 --- a/internal/tui/components/tabs.go +++ b/internal/tui/components/tabs.go @@ -1,6 +1,7 @@ package components import ( + "fmt" "strings" "github.com/javanhut/Ivaldi-vcs/internal/tui/style" @@ -10,6 +11,7 @@ import ( type TabBar struct { Labels []string Active int + Width int } // NewTabBar creates a new tab bar with the given labels @@ -30,18 +32,22 @@ func (t *TabBar) SetActive(index int) { // View renders the tab bar func (t TabBar) View(theme style.Theme) string { var tabs []string + + // Brand prefix + tabs = append(tabs, theme.Brand.Render("Ivaldi")) + tabs = append(tabs, theme.TabSeparator.Render("│")) + for i, label := range t.Labels { - prefix := " " - if i+1 <= 9 { - prefix = string(rune('0' + i + 1)) - } - display := prefix + ":" + label + display := fmt.Sprintf("%d %s", i+1, label) if i == t.Active { tabs = append(tabs, theme.ActiveTab.Render(display)) } else { tabs = append(tabs, theme.InactiveTab.Render(display)) } + if i < len(t.Labels)-1 { + tabs = append(tabs, theme.TabSeparator.Render("│")) + } } - row := strings.Join(tabs, " ") + row := strings.Join(tabs, "") return theme.TabBar.Render(row) } diff --git a/internal/tui/keys.go b/internal/tui/keys.go index ee64a37..6bf92ce 100644 --- a/internal/tui/keys.go +++ b/internal/tui/keys.go @@ -6,6 +6,7 @@ import "github.com/charmbracelet/bubbles/key" type KeyMap struct { Quit key.Binding Help key.Binding + Escape key.Binding Tab key.Binding ShiftTab key.Binding Tab1 key.Binding @@ -27,6 +28,10 @@ func DefaultKeyMap() KeyMap { key.WithKeys("?"), key.WithHelp("?", "help"), ), + Escape: key.NewBinding( + key.WithKeys("esc"), + key.WithHelp("esc", "back/dismiss/quit"), + ), Tab: key.NewBinding( key.WithKeys("tab"), key.WithHelp("tab", "next tab"), diff --git a/internal/tui/style/common.go b/internal/tui/style/common.go index 7aa312e..b238683 100644 --- a/internal/tui/style/common.go +++ b/internal/tui/style/common.go @@ -36,6 +36,7 @@ func AllTabs() []TabInfo { type View interface { tea.Model ShortHelp() string + HasActiveInput() bool } // Messages diff --git a/internal/tui/style/theme.go b/internal/tui/style/theme.go index e0a8baa..43ff17b 100644 --- a/internal/tui/style/theme.go +++ b/internal/tui/style/theme.go @@ -41,6 +41,19 @@ type Theme struct { // Viewport ViewportBorder lipgloss.Style + + // Timeline header (status view) + TimelineHeader lipgloss.Style + TimelineName lipgloss.Style + SealBadge lipgloss.Style + FileCountBadge lipgloss.Style + + // Separators + SectionDivider lipgloss.Style + TabSeparator lipgloss.Style + + // Brand + Brand lipgloss.Style } // DefaultTheme returns the default TUI theme @@ -129,5 +142,27 @@ func DefaultTheme() Theme { ViewportBorder: lipgloss.NewStyle(). BorderStyle(lipgloss.RoundedBorder()). BorderForeground(lipgloss.Color("#444444")), + + // Timeline header (status view) + TimelineHeader: lipgloss.NewStyle(). + BorderStyle(lipgloss.RoundedBorder()). + BorderForeground(lipgloss.Color("#7D56F4")). + Padding(0, 1), + TimelineName: lipgloss.NewStyle(). + Bold(true). + Foreground(lipgloss.Color("#7D56F4")), + SealBadge: lipgloss.NewStyle(). + Bold(true). + Foreground(lipgloss.Color("#55FFFF")), + FileCountBadge: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#888888")), + SectionDivider: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#444444")), + TabSeparator: lipgloss.NewStyle(). + Foreground(lipgloss.Color("#444444")), + Brand: lipgloss.NewStyle(). + Bold(true). + Foreground(lipgloss.Color("#7D56F4")). + Padding(0, 1), } } diff --git a/internal/tui/views/diff.go b/internal/tui/views/diff.go index 39d8d42..e53721f 100644 --- a/internal/tui/views/diff.go +++ b/internal/tui/views/diff.go @@ -252,6 +252,11 @@ func (m *DiffModel) ShortHelp() string { return "j/k:scroll n/p:next/prev file s:staged g/G:top/bottom r:refresh" } +// HasActiveInput returns whether the diff view has an active input dialog +func (m *DiffModel) HasActiveInput() bool { + return false +} + // loadDiff loads diff data asynchronously func (m *DiffModel) loadDiff() tea.Cmd { workDir := m.workDir diff --git a/internal/tui/views/fuse.go b/internal/tui/views/fuse.go index 0ef2294..ed47c74 100644 --- a/internal/tui/views/fuse.go +++ b/internal/tui/views/fuse.go @@ -323,16 +323,6 @@ func (m *FuseModel) View() string { b.WriteString("\n") } - // Footer - b.WriteString("\n") - if m.fuseStatus != nil && m.fuseStatus.InProgress { - b.WriteString(m.theme.Dim.Render(" a:abort s:retry strategy r:refresh")) - } else if m.state == fuseStateResult { - b.WriteString(m.theme.Dim.Render(" r:refresh")) - } else { - b.WriteString(m.theme.Dim.Render(" j/k:navigate enter/f:fuse s:strategy r:refresh")) - } - return b.String() } @@ -500,6 +490,11 @@ func (m *FuseModel) ShortHelp() string { return "j/k:navigate enter:fuse s:strategy a:abort r:refresh" } +// HasActiveInput returns whether the fuse view has an active input dialog +func (m *FuseModel) HasActiveInput() bool { + return false +} + // Async commands func (m *FuseModel) loadData() tea.Cmd { diff --git a/internal/tui/views/help.go b/internal/tui/views/help.go index f70c699..86cd50b 100644 --- a/internal/tui/views/help.go +++ b/internal/tui/views/help.go @@ -3,6 +3,7 @@ package views import ( "strings" + "github.com/charmbracelet/lipgloss" "github.com/javanhut/Ivaldi-vcs/internal/tui/style" ) @@ -30,6 +31,7 @@ func (h HelpModel) View(width, height int, theme style.Theme) string { { title: "Global", keys: []helpEntry{ + {"esc", "Back / dismiss / quit"}, {"q / ctrl+c", "Quit"}, {"?", "Toggle help"}, {"tab", "Next tab"}, @@ -45,6 +47,7 @@ func (h HelpModel) View(width, height int, theme style.Theme) string { {"a", "Gather all unstaged files"}, {"u", "Ungather all staged files"}, {"s", "Seal (commit) staged files"}, + {"i", "Show/hide ignored files"}, {"r", "Refresh status"}, {"g", "Jump to top"}, {"G", "Jump to bottom"}, @@ -109,6 +112,19 @@ func (h HelpModel) View(width, height int, theme style.Theme) string { }, } + glossary := []helpEntry{ + {"Timeline", "Branch — a line of development"}, + {"Seal", "Commit — a snapshot of changes"}, + {"Gather", "Stage — mark files for next seal"}, + {"Ungather", "Unstage — remove from staging"}, + {"Fuse", "Merge — combine two timelines"}, + {"Portal", "Remote — link to GitHub repository"}, + {"Upload", "Push — send changes to remote"}, + {"Sync", "Pull — fetch changes from remote"}, + {"Harvest", "Fetch — download remote timelines"}, + {"Scout", "List remote timeline information"}, + } + var b strings.Builder b.WriteString("\n") b.WriteString(theme.Title.Render(" Ivaldi TUI — Keybinding Reference")) @@ -128,9 +144,32 @@ func (h HelpModel) View(width, height int, theme style.Theme) string { b.WriteString("\n") } + // Glossary + b.WriteString(" ") + b.WriteString(theme.SectionHead.Render("Glossary — Ivaldi Naming Conventions")) + b.WriteString("\n") + for _, entry := range glossary { + b.WriteString(" ") + b.WriteString(theme.HelpKey.Render(padRight(entry.key, 12))) + b.WriteString(theme.HelpDesc.Render(entry.desc)) + b.WriteString("\n") + } + b.WriteString("\n") + b.WriteString(theme.Dim.Render(" Press any key to dismiss")) - return b.String() + // Wrap in bordered box + boxStyle := lipgloss.NewStyle(). + BorderStyle(lipgloss.RoundedBorder()). + BorderForeground(lipgloss.Color("#7D56F4")). + Padding(0, 1) + + innerWidth := width - 4 + if innerWidth < 40 { + innerWidth = 40 + } + + return boxStyle.Width(innerWidth).Render(b.String()) } // padRight pads a string to the given width diff --git a/internal/tui/views/log.go b/internal/tui/views/log.go index 4a84caf..50bcd2e 100644 --- a/internal/tui/views/log.go +++ b/internal/tui/views/log.go @@ -180,6 +180,11 @@ func (m *LogModel) ShortHelp() string { return "j/k:scroll o:oneline/full t:all timelines r:refresh" } +// HasActiveInput returns whether the log view has an active input dialog +func (m *LogModel) HasActiveInput() bool { + return false +} + // renderContent rebuilds the viewport content from commits func (m *LogModel) renderContent() { if !m.ready { diff --git a/internal/tui/views/remote.go b/internal/tui/views/remote.go index 735308d..5e4459c 100644 --- a/internal/tui/views/remote.go +++ b/internal/tui/views/remote.go @@ -423,14 +423,6 @@ func (m *RemoteModel) View() string { b.WriteString(m.dialog.View(m.theme)) } - // Footer - b.WriteString("\n") - if m.portal != nil { - b.WriteString(m.theme.Dim.Render(" s:scout u:upload y:sync h:harvest all enter:harvest selected p:portal r:refresh")) - } else { - b.WriteString(m.theme.Dim.Render(" p:set portal r:refresh")) - } - return b.String() } @@ -439,6 +431,11 @@ func (m *RemoteModel) ShortHelp() string { return "s:scout u:upload y:sync h:harvest p:portal r:refresh" } +// HasActiveInput returns whether the remote view has an active input dialog +func (m *RemoteModel) HasActiveInput() bool { + return m.dialog.IsActive() +} + // Async commands func (m *RemoteModel) loadPortal() tea.Cmd { diff --git a/internal/tui/views/status.go b/internal/tui/views/status.go index cc1c15d..b751b03 100644 --- a/internal/tui/views/status.go +++ b/internal/tui/views/status.go @@ -7,16 +7,18 @@ import ( "github.com/charmbracelet/bubbles/key" tea "github.com/charmbracelet/bubbletea" "github.com/javanhut/Ivaldi-vcs/internal/engine" + "github.com/javanhut/Ivaldi-vcs/internal/ignore" "github.com/javanhut/Ivaldi-vcs/internal/tui/components" "github.com/javanhut/Ivaldi-vcs/internal/tui/style" ) // statusKeyMap defines keybindings specific to the status view type statusKeyMap struct { - GatherAll key.Binding - UngatherAll key.Binding - Refresh key.Binding - Seal key.Binding + GatherAll key.Binding + UngatherAll key.Binding + Refresh key.Binding + Seal key.Binding + ToggleIgnored key.Binding } func defaultStatusKeyMap() statusKeyMap { @@ -37,6 +39,10 @@ func defaultStatusKeyMap() statusKeyMap { key.WithKeys("s"), key.WithHelp("s", "seal (commit)"), ), + ToggleIgnored: key.NewBinding( + key.WithKeys("i"), + key.WithHelp("i", "toggle ignored"), + ), } } @@ -54,12 +60,19 @@ type StatusModel struct { err error loading bool sealMsg string // success message after seal + + // Ignored files + showIgnored bool + ignorePatterns []string // user patterns from .ivaldiignore + ignoredFiles []string // files matched by ignore patterns } // statusLoadedMsg carries loaded status data type statusLoadedMsg struct { - result *engine.StatusResult - err error + result *engine.StatusResult + err error + ignorePatterns []string + ignoredFiles []string } // statusStagingDoneMsg signals staging operation completed @@ -101,7 +114,8 @@ func (m *StatusModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { case tea.WindowSizeMsg: m.width = msg.Width m.height = msg.Height - listHeight := msg.Height - 4 + // Account for timeline header (3 lines) + summary line + spacing + listHeight := msg.Height - 8 if listHeight < 1 { listHeight = 1 } @@ -116,6 +130,8 @@ func (m *StatusModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { } m.status = msg.result m.err = nil + m.ignorePatterns = msg.ignorePatterns + m.ignoredFiles = msg.ignoredFiles m.rebuildFileList() return m, func() tea.Msg { @@ -181,6 +197,10 @@ func (m *StatusModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { m.err = fmt.Errorf("no files staged. Gather files first") return m, nil + case key.Matches(msg, m.keys.ToggleIgnored): + m.showIgnored = !m.showIgnored + return m, nil + case key.Matches(msg, m.keys.GatherAll): return m, m.gatherAll() @@ -217,6 +237,10 @@ func (m *StatusModel) View() string { var b strings.Builder + // Timeline header panel + b.WriteString(m.renderTimelineHeader()) + b.WriteString("\n") + // Seal success message if m.sealMsg != "" { b.WriteString(" ") @@ -230,27 +254,16 @@ func (m *StatusModel) View() string { b.WriteString("\n\n") b.WriteString(m.theme.Dim.Render(" Press r to refresh")) } else { - var parts []string - if len(m.status.Staged) > 0 { - parts = append(parts, m.theme.Staged.Render(fmt.Sprintf("%d staged", len(m.status.Staged)))) - } - if len(m.status.Modified) > 0 { - parts = append(parts, m.theme.Modified.Render(fmt.Sprintf("%d modified", len(m.status.Modified)))) - } - if len(m.status.Untracked) > 0 { - parts = append(parts, m.theme.Untracked.Render(fmt.Sprintf("%d untracked", len(m.status.Untracked)))) - } - if len(m.status.Deleted) > 0 { - parts = append(parts, m.theme.Deleted.Render(fmt.Sprintf("%d deleted", len(m.status.Deleted)))) - } - - b.WriteString(" ") - b.WriteString(strings.Join(parts, m.theme.Dim.Render(" | "))) - b.WriteString("\n\n") + // Grouped summary header + b.WriteString(m.renderGroupedSummary()) + b.WriteString("\n") b.WriteString(m.fileList.View(m.theme)) } + // Ignored files section + b.WriteString(m.renderIgnoredSection()) + // Dialog overlay if m.dialog.IsActive() { b.WriteString(m.dialog.View(m.theme)) @@ -259,9 +272,114 @@ func (m *StatusModel) View() string { return b.String() } +// renderTimelineHeader renders a bordered panel showing timeline, seal, and file count +func (m *StatusModel) renderTimelineHeader() string { + timeline := m.status.Timeline + if timeline == "" { + timeline = "(none)" + } + seal := m.status.SealName + if seal == "" { + seal = "(no seals)" + } + + content := fmt.Sprintf("%s %s %s %s %s %s %s", + m.theme.StatusKey.Render("Timeline:"), + m.theme.TimelineName.Render(timeline), + m.theme.SectionDivider.Render("│"), + m.theme.StatusKey.Render("Seal:"), + m.theme.SealBadge.Render(seal), + m.theme.SectionDivider.Render("│"), + m.theme.FileCountBadge.Render(fmt.Sprintf("Tracking %d file(s)", m.status.FileCount)), + ) + + return m.theme.TimelineHeader.Width(m.width - 2).Render(content) +} + +// renderGroupedSummary renders category counts as distinct styled sections +func (m *StatusModel) renderGroupedSummary() string { + var parts []string + if len(m.status.Staged) > 0 { + parts = append(parts, m.theme.Staged.Render(fmt.Sprintf(" %d staged ", len(m.status.Staged)))) + } + if len(m.status.Modified) > 0 { + parts = append(parts, m.theme.Modified.Render(fmt.Sprintf(" %d modified ", len(m.status.Modified)))) + } + if len(m.status.Deleted) > 0 { + parts = append(parts, m.theme.Deleted.Render(fmt.Sprintf(" %d deleted ", len(m.status.Deleted)))) + } + if len(m.status.Untracked) > 0 { + parts = append(parts, m.theme.Untracked.Render(fmt.Sprintf(" %d untracked ", len(m.status.Untracked)))) + } + + return " " + strings.Join(parts, m.theme.SectionDivider.Render(" │ ")) +} + +// renderIgnoredSection renders the ignored files toggle section +func (m *StatusModel) renderIgnoredSection() string { + ignoredCount := len(m.ignoredFiles) + if ignoredCount == 0 && len(m.ignorePatterns) == 0 { + return "" + } + + var b strings.Builder + + if !m.showIgnored { + b.WriteString("\n") + b.WriteString(m.theme.Dim.Render(fmt.Sprintf(" %d ignored file(s) hidden — press i to show", ignoredCount))) + } else { + b.WriteString("\n\n ") + b.WriteString(m.theme.SectionHead.Render("Ignored Files")) + b.WriteString("\n") + + if len(m.ignoredFiles) > 0 { + for _, f := range m.ignoredFiles { + b.WriteString(" ") + b.WriteString(m.theme.Ignored.Render(f)) + b.WriteString("\n") + } + } else { + b.WriteString(" ") + b.WriteString(m.theme.Dim.Render("(no ignored files detected)")) + b.WriteString("\n") + } + + // Show patterns + b.WriteString("\n ") + b.WriteString(m.theme.SectionHead.Render("Built-in Patterns")) + b.WriteString("\n") + for _, p := range ignore.DefaultPatterns { + b.WriteString(" ") + b.WriteString(m.theme.Dim.Render(p)) + b.WriteString("\n") + } + + if len(m.ignorePatterns) > 0 { + b.WriteString("\n ") + b.WriteString(m.theme.SectionHead.Render(".ivaldiignore Patterns")) + b.WriteString("\n") + for _, p := range m.ignorePatterns { + b.WriteString(" ") + b.WriteString(m.theme.Dim.Render(p)) + b.WriteString("\n") + } + } + + b.WriteString("\n") + b.WriteString(m.theme.Dim.Render(" Press i to hide")) + } + + return b.String() +} + // ShortHelp returns a short help string for the status bar func (m *StatusModel) ShortHelp() string { - return "j/k:nav space:toggle a:gather all u:ungather all s:seal r:refresh" + return "j/k:nav space:toggle a:gather all u:ungather all s:seal i:ignored r:refresh" +} + +// HasActiveInput returns whether the status view has an active input dialog +func (m *StatusModel) HasActiveInput() bool { + return m.dialog.IsActive() } // rebuildFileList rebuilds the file list from current status @@ -295,7 +413,24 @@ func (m *StatusModel) loadStatus() tea.Cmd { ivaldiDir := m.ivaldiDir return func() tea.Msg { result, err := engine.GetFileStatuses(workDir, ivaldiDir) - return statusLoadedMsg{result: result, err: err} + + // Load ignore patterns + patterns, _ := ignore.LoadPatterns(workDir) + + // Collect ignored file paths from status if available + var ignoredFiles []string + if result != nil { + for _, f := range result.Ignored { + ignoredFiles = append(ignoredFiles, f.Path) + } + } + + return statusLoadedMsg{ + result: result, + err: err, + ignorePatterns: patterns, + ignoredFiles: ignoredFiles, + } } } diff --git a/internal/tui/views/timeline.go b/internal/tui/views/timeline.go index 83956df..ab6351d 100644 --- a/internal/tui/views/timeline.go +++ b/internal/tui/views/timeline.go @@ -430,10 +430,6 @@ func (m *TimelineModel) View() string { b.WriteString(m.theme.Dim.Render(" (enter to confirm, esc to cancel)")) } - // Footer hint - b.WriteString("\n") - b.WriteString(m.theme.Dim.Render(" enter:switch c:create d:remove R:rename r:refresh")) - return b.String() } @@ -442,6 +438,11 @@ func (m *TimelineModel) ShortHelp() string { return "j/k:navigate enter:switch c:create d:remove R:rename r:refresh" } +// HasActiveInput returns whether the timeline view has an active input dialog +func (m *TimelineModel) HasActiveInput() bool { + return m.mode != inputNone +} + // loadTimelines loads timeline data asynchronously func (m *TimelineModel) loadTimelines() tea.Cmd { ivaldiDir := m.ivaldiDir From a8f6b632dec7e91c9c0f3a13ce19f57347b23aec Mon Sep 17 00:00:00 2001 From: javanhut Date: Wed, 25 Feb 2026 10:18:49 +0000 Subject: [PATCH 12/12] feat: fix for static check --- internal/tui/views/log.go | 1 - 1 file changed, 1 deletion(-) diff --git a/internal/tui/views/log.go b/internal/tui/views/log.go index 50bcd2e..3001a8c 100644 --- a/internal/tui/views/log.go +++ b/internal/tui/views/log.go @@ -84,7 +84,6 @@ func (m *LogModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { m.height = msg.Height if !m.ready { m.viewport = viewport.New(msg.Width, msg.Height) - m.viewport.HighPerformanceRendering = false m.ready = true } else { m.viewport.Width = msg.Width