diff --git a/claude_code_log/cache.py b/claude_code_log/cache.py index 97aa7fdf..8e37d477 100644 --- a/claude_code_log/cache.py +++ b/claude_code_log/cache.py @@ -936,6 +936,105 @@ def get_archived_session_count(self, valid_session_ids: set[str]) -> int: 1 for row in cached_rows if row["session_id"] not in valid_session_ids ) + def get_archived_sessions( + self, valid_session_ids: set[str] + ) -> Dict[str, SessionCacheData]: + """Get session data for archived sessions (cached but JSONL deleted). + + Args: + valid_session_ids: Set of session IDs that currently exist in source data + + Returns: + Dict mapping session_id to SessionCacheData for archived sessions + """ + if self._project_id is None: + return {} + + archived_sessions: Dict[str, SessionCacheData] = {} + + with self._get_connection() as conn: + session_rows = conn.execute( + "SELECT * FROM sessions WHERE project_id = ?", + (self._project_id,), + ).fetchall() + + for row in session_rows: + session_id = row["session_id"] + if session_id not in valid_session_ids: + archived_sessions[session_id] = SessionCacheData( + session_id=session_id, + summary=row["summary"], + first_timestamp=row["first_timestamp"], + last_timestamp=row["last_timestamp"], + message_count=row["message_count"], + first_user_message=row["first_user_message"], + cwd=row["cwd"], + total_input_tokens=row["total_input_tokens"], + total_output_tokens=row["total_output_tokens"], + total_cache_creation_tokens=row["total_cache_creation_tokens"], + total_cache_read_tokens=row["total_cache_read_tokens"], + ) + + return archived_sessions + + def export_session_to_jsonl(self, session_id: str) -> List[str]: + """Export all message content JSONs for a session, for JSONL restoration. + + Args: + session_id: The session ID to export + + Returns: + List of JSON strings (one per line for JSONL file), compact format + """ + if self._project_id is None: + return [] + + with self._get_connection() as conn: + rows = conn.execute( + """SELECT content FROM messages + WHERE project_id = ? AND session_id = ? + ORDER BY timestamp NULLS LAST""", + (self._project_id, session_id), + ).fetchall() + + # Re-serialize to compact JSON format (no spaces after separators) + # to match original JSONL file format + result: List[str] = [] + for row in rows: + try: + parsed = json.loads(row["content"]) + compact = json.dumps(parsed, separators=(",", ":")) + result.append(compact) + except json.JSONDecodeError: + # If parsing fails, use original content + result.append(row["content"]) + return result + + def load_session_entries(self, session_id: str) -> List[TranscriptEntry]: + """Load transcript entries for a session from cache. + + Used for rendering archived sessions to HTML/Markdown when + the original JSONL file no longer exists. + + Args: + session_id: The session ID to load + + Returns: + List of TranscriptEntry objects for the session + """ + if self._project_id is None: + return [] + + with self._get_connection() as conn: + rows = conn.execute( + """SELECT content FROM messages + WHERE project_id = ? AND session_id = ? + ORDER BY timestamp NULLS LAST""", + (self._project_id, session_id), + ).fetchall() + + return [self._deserialize_entry(row) for row in rows] + # ========== Page Cache Methods (Pagination) ========== def get_page_size_config(self) -> Optional[int]: @@ -1224,6 +1323,115 @@ def get_page_count(self) -> int: return row["cnt"] if row else 0 + def delete_session(self, session_id: str) -> bool: + """Delete a session and its messages from cache. + + Args: + session_id: The session ID to delete + + Returns: + True if session was deleted, False if not found + """ + if self._project_id is None: + return False + + with self._get_connection() as conn: + # Check if session exists + row = conn.execute( + "SELECT id FROM sessions WHERE project_id = ? AND session_id = ?", + (self._project_id, session_id), + ).fetchone() + + if not row: + return False + + # Delete messages for this session + conn.execute( + "DELETE FROM messages WHERE project_id = ? AND session_id = ?", + (self._project_id, session_id), + ) + + # Delete HTML cache entries for this session + conn.execute( + "DELETE FROM html_cache WHERE project_id = ? AND source_session_id = ?", + (self._project_id, session_id), + ) + + # Delete the session record + conn.execute( + "DELETE FROM sessions WHERE project_id = ? AND session_id = ?", + (self._project_id, session_id), + ) + + self._update_last_updated(conn) + conn.commit() + + return True + + def delete_project(self) -> bool: + """Delete this project and all its data from cache. + + Returns: + True if project was deleted, False if not found + """ + if self._project_id is None: + return False + + with self._get_connection() as conn: + # Cascade delete handles messages, sessions, cached_files, html_cache, html_pages + conn.execute("DELETE FROM projects WHERE id = ?", (self._project_id,)) + conn.commit() + + self._project_id = None + return True + + +def get_all_cached_projects(projects_dir: Path) -> List[tuple[str, bool]]: + """Get all projects from cache, indicating which are archived. + + This is a standalone function that queries the cache.db directly + to find all project paths, without needing to instantiate CacheManager + for each project. + + Args: + projects_dir: Path to the projects directory (e.g., ~/.claude/projects) + + Returns: + List of (project_path, is_archived) tuples. + is_archived is True if the project has no JSONL files but exists in cache. + """ + db_path = projects_dir / "cache.db" + if not db_path.exists(): + return [] + + result: List[tuple[str, bool]] = [] + + try: + conn = sqlite3.connect(db_path, timeout=30.0) + conn.row_factory = sqlite3.Row + try: + rows = conn.execute( + "SELECT project_path FROM projects ORDER BY project_path" + ).fetchall() + + for row in rows: + project_path = Path(row["project_path"]) + # Check if project has JSONL files (non-archived) + has_jsonl = ( + bool(list(project_path.glob("*.jsonl"))) + if project_path.exists() + else False + ) + # is_archived = project exists in cache but has no JSONL files + is_archived = not has_jsonl + result.append((row["project_path"], is_archived)) + finally: + conn.close() + except Exception: + pass + + return result + __all__ = [ "CacheManager", @@ -1232,5 +1440,6 @@ def get_page_count(self) -> int: "PageCacheData", "ProjectCache", "SessionCacheData", + "get_all_cached_projects", "get_library_version", ] diff --git a/claude_code_log/cli.py b/claude_code_log/cli.py index 90f0e4b3..19b4bb9d 100644 --- a/claude_code_log/cli.py +++ b/claude_code_log/cli.py @@ -17,7 +17,7 @@ get_file_extension, process_projects_hierarchy, ) -from .cache import CacheManager, get_library_version +from .cache import CacheManager, get_all_cached_projects, get_library_version def get_default_projects_dir() -> Path: @@ -25,36 +25,75 @@ def get_default_projects_dir() -> Path: return Path.home() / ".claude" / "projects" -def _launch_tui_with_cache_check(project_path: Path) -> Optional[str]: +def _discover_projects( + projects_dir: Path, +) -> tuple[list[Path], set[Path]]: + """Discover active and archived projects in the projects directory. + + Returns: + Tuple of (all_project_dirs, archived_projects_set) + """ + # Find active projects (directories with JSONL files) + project_dirs = [ + d for d in projects_dir.iterdir() if d.is_dir() and list(d.glob("*.jsonl")) + ] + + # Find archived projects (in cache but without JSONL files) + archived_projects: set[Path] = set() + cached_projects = get_all_cached_projects(projects_dir) + active_project_paths = {str(p) for p in project_dirs} + for project_path_str, is_archived in cached_projects: + if is_archived and project_path_str not in active_project_paths: + archived_path = Path(project_path_str) + archived_projects.add(archived_path) + project_dirs.append(archived_path) + + return project_dirs, archived_projects + + +def _launch_tui_with_cache_check( + project_path: Path, is_archived: bool = False +) -> Optional[str]: """Launch TUI with proper cache checking and user feedback.""" click.echo("Checking cache and loading session data...") # Check if we need to rebuild cache cache_manager = CacheManager(project_path, get_library_version()) - jsonl_files = list(project_path.glob("*.jsonl")) - modified_files = cache_manager.get_modified_files(jsonl_files) project_cache = cache_manager.get_cached_project_data() - if not (project_cache and project_cache.sessions and not modified_files): - # Need to rebuild cache - if modified_files: + if is_archived: + # Archived projects have no JSONL files, just load from cache + if project_cache and project_cache.sessions: click.echo( - f"Found {len(modified_files)} modified files, rebuilding cache..." + f"[ARCHIVED] Found {len(project_cache.sessions)} sessions in cache. Launching TUI..." ) else: - click.echo("Building session cache...") - - # Pre-build the cache before launching TUI (no HTML generation) - try: - ensure_fresh_cache(project_path, cache_manager, silent=True) - click.echo("Cache ready! Launching TUI...") - except Exception as e: - click.echo(f"Error building cache: {e}", err=True) + click.echo("Error: No cached sessions found for archived project", err=True) return None else: - click.echo( - f"Cache up to date. Found {len(project_cache.sessions)} sessions. Launching TUI..." - ) + jsonl_files = list(project_path.glob("*.jsonl")) + modified_files = cache_manager.get_modified_files(jsonl_files) + + if not (project_cache and project_cache.sessions and not modified_files): + # Need to rebuild cache + if modified_files: + click.echo( + f"Found {len(modified_files)} modified files, rebuilding cache..." + ) + else: + click.echo("Building session cache...") + + # Pre-build the cache before launching TUI (no HTML generation) + try: + ensure_fresh_cache(project_path, cache_manager, silent=True) + click.echo("Cache ready! Launching TUI...") + except Exception as e: + click.echo(f"Error building cache: {e}", err=True) + return None + else: + click.echo( + f"Cache up to date. Found {len(project_cache.sessions)} sessions. Launching TUI..." + ) # Small delay to let user see the message before TUI clears screen import time @@ -63,7 +102,7 @@ def _launch_tui_with_cache_check(project_path: Path) -> Optional[str]: from .tui import run_session_browser - result = run_session_browser(project_path) + result = run_session_browser(project_path, is_archived=is_archived) return result @@ -511,11 +550,8 @@ def main( click.echo(f"Error: Projects directory not found: {input_path}") return - project_dirs = [ - d - for d in input_path.iterdir() - if d.is_dir() and list(d.glob("*.jsonl")) - ] + # Initial project discovery + project_dirs, archived_projects = _discover_projects(input_path) if not project_dirs: click.echo(f"No projects with JSONL files found in {input_path}") @@ -524,7 +560,7 @@ def main( # Try to find projects that match current working directory matching_projects = find_projects_by_cwd(input_path) - if len(project_dirs) == 1: + if len(project_dirs) == 1 and not archived_projects: # Only one project, open it directly result = _launch_tui_with_cache_check(project_dirs[0]) if result == "back_to_projects": @@ -532,14 +568,21 @@ def main( from .tui import run_project_selector while True: + # Re-discover projects (may have changed after restore) + project_dirs, archived_projects = _discover_projects( + input_path + ) selected_project = run_project_selector( - project_dirs, matching_projects + project_dirs, matching_projects, archived_projects ) if not selected_project: # User cancelled return - result = _launch_tui_with_cache_check(selected_project) + is_archived = selected_project in archived_projects + result = _launch_tui_with_cache_check( + selected_project, is_archived=is_archived + ) if result != "back_to_projects": # User quit normally return @@ -555,14 +598,21 @@ def main( from .tui import run_project_selector while True: + # Re-discover projects (may have changed after restore) + project_dirs, archived_projects = _discover_projects( + input_path + ) selected_project = run_project_selector( - project_dirs, matching_projects + project_dirs, matching_projects, archived_projects ) if not selected_project: # User cancelled return - result = _launch_tui_with_cache_check(selected_project) + is_archived = selected_project in archived_projects + result = _launch_tui_with_cache_check( + selected_project, is_archived=is_archived + ) if result != "back_to_projects": # User quit normally return @@ -572,14 +622,19 @@ def main( from .tui import run_project_selector while True: + # Re-discover projects each iteration (may have changed after restore) + project_dirs, archived_projects = _discover_projects(input_path) selected_project = run_project_selector( - project_dirs, matching_projects + project_dirs, matching_projects, archived_projects ) if not selected_project: # User cancelled return - result = _launch_tui_with_cache_check(selected_project) + is_archived = selected_project in archived_projects + result = _launch_tui_with_cache_check( + selected_project, is_archived=is_archived + ) if result != "back_to_projects": # User quit normally return diff --git a/claude_code_log/converter.py b/claude_code_log/converter.py index 07e31e00..6b5606f5 100644 --- a/claude_code_log/converter.py +++ b/claude_code_log/converter.py @@ -20,7 +20,12 @@ create_session_preview, get_warmup_session_ids, ) -from .cache import CacheManager, SessionCacheData, get_library_version +from .cache import ( + CacheManager, + SessionCacheData, + get_all_cached_projects, + get_library_version, +) from .parser import parse_timestamp from .factories import create_transcript_entry from .models import ( @@ -1477,6 +1482,49 @@ def _generate_individual_session_files( return regenerated_count +def _get_cleanup_period_days() -> Optional[int]: + """Read cleanupPeriodDays from Claude Code settings. + + Checks ~/.claude/settings.json for the cleanupPeriodDays setting. + + Returns: + The configured cleanup period in days, or None if not set/readable. + """ + import json + + settings_path = Path.home() / ".claude" / "settings.json" + if not settings_path.exists(): + return None + + try: + with open(settings_path, "r", encoding="utf-8") as f: + settings = json.load(f) + return settings.get("cleanupPeriodDays") + except (json.JSONDecodeError, OSError): + return None + + +def _print_archived_sessions_note(total_archived: int) -> None: + """Print a note about archived sessions and how to restore them. + + Args: + total_archived: Total number of archived sessions across all projects. + """ + cleanup_days = _get_cleanup_period_days() + cleanup_info = ( + f" (cleanupPeriodDays: {cleanup_days})" + if cleanup_days is not None + else " (cleanupPeriodDays: 30 default)" + ) + + print( + f"\nNote: {total_archived} archived session(s) found{cleanup_info}.\n" + " These sessions were cached before their JSONL files were deleted.\n" + " To restore them or adjust cleanup settings, see:\n" + " https://github.com/daaain/claude-code-log/blob/main/dev-docs/restoring-archived-sessions.md" + ) + + def process_projects_hierarchy( projects_path: Path, from_date: Optional[str] = None, @@ -1514,7 +1562,16 @@ def process_projects_hierarchy( if child.is_dir() and list(child.glob("*.jsonl")): project_dirs.append(child) - if not project_dirs: + # Find archived projects (projects in cache but without JSONL files) + archived_project_dirs: list[Path] = [] + if use_cache: + cached_projects = get_all_cached_projects(projects_path) + active_project_paths = {str(p) for p in project_dirs} + for project_path_str, is_archived in cached_projects: + if is_archived and project_path_str not in active_project_paths: + archived_project_dirs.append(Path(project_path_str)) + + if not project_dirs and not archived_project_dirs: raise FileNotFoundError( f"No project directories with JSONL files found in {projects_path}" ) @@ -1530,6 +1587,7 @@ def process_projects_hierarchy( total_projects = len(project_dirs) projects_with_updates = 0 total_sessions = 0 + total_archived = 0 # Per-project stats for summary output project_stats: List[tuple[str, GenerationStats]] = [] @@ -1571,6 +1629,7 @@ def process_projects_hierarchy( if cache_manager else 0 ) + total_archived += archived_count output_path = project_dir / "combined_transcripts.html" # Check combined_stale using the appropriate cache: # - Paginated projects store data in html_pages table (via save_page_cache) @@ -1681,6 +1740,7 @@ def process_projects_hierarchy( "latest_timestamp": cached_project_data.latest_timestamp, "earliest_timestamp": cached_project_data.earliest_timestamp, "working_directories": cache_manager.get_working_directories(), + "is_archived": False, "sessions": [ { "id": session_data.session_id, @@ -1790,6 +1850,7 @@ def process_projects_hierarchy( "working_directories": cache_manager.get_working_directories() if cache_manager else [], + "is_archived": False, "sessions": sessions_data, } ) @@ -1808,6 +1869,66 @@ def process_projects_hierarchy( ) continue + # Process archived projects (projects in cache but without JSONL files) + archived_project_count = 0 + for archived_dir in sorted(archived_project_dirs): + try: + # Initialize cache manager for archived project + cache_manager = CacheManager(archived_dir, library_version) + cached_project_data = cache_manager.get_cached_project_data() + + if cached_project_data is None: + continue + + archived_project_count += 1 + print( + f" {archived_dir.name}: [ARCHIVED] ({len(cached_project_data.sessions)} sessions)" + ) + + # Add archived project to summaries + project_summaries.append( + { + "name": archived_dir.name, + "path": archived_dir, + "html_file": f"{archived_dir.name}/combined_transcripts.html", + "jsonl_count": 0, + "message_count": cached_project_data.total_message_count, + "last_modified": 0.0, + "total_input_tokens": cached_project_data.total_input_tokens, + "total_output_tokens": cached_project_data.total_output_tokens, + "total_cache_creation_tokens": cached_project_data.total_cache_creation_tokens, + "total_cache_read_tokens": cached_project_data.total_cache_read_tokens, + "latest_timestamp": cached_project_data.latest_timestamp, + "earliest_timestamp": cached_project_data.earliest_timestamp, + "working_directories": cache_manager.get_working_directories(), + "is_archived": True, + "sessions": [ + { + "id": session_data.session_id, + "summary": session_data.summary, + "timestamp_range": format_timestamp_range( + session_data.first_timestamp, + session_data.last_timestamp, + ), + "first_timestamp": session_data.first_timestamp, + "last_timestamp": session_data.last_timestamp, + "message_count": session_data.message_count, + "first_user_message": session_data.first_user_message + or "[No user message found in session.]", + } + for session_data in cached_project_data.sessions.values() + if session_data.first_user_message + and session_data.first_user_message != "Warmup" + ], + } + ) + except Exception as e: + print(f"Warning: Failed to process archived project {archived_dir}: {e}") + continue + + # Update total projects count to include archived + total_projects = len(project_dirs) + archived_project_count + # Generate index (always regenerate if outdated) ext = get_file_extension(output_format) index_path = projects_path / f"index.{ext}" @@ -1846,4 +1967,8 @@ def process_projects_hierarchy( summary_parts.append(" Index regenerated") print("\n".join(summary_parts)) + # Show archived sessions note if any exist + if total_archived > 0: + _print_archived_sessions_note(total_archived) + return index_path diff --git a/claude_code_log/html/templates/components/project_card_styles.css b/claude_code_log/html/templates/components/project_card_styles.css index ebae9931..c6d40f2f 100644 --- a/claude_code_log/html/templates/components/project_card_styles.css +++ b/claude_code_log/html/templates/components/project_card_styles.css @@ -111,4 +111,28 @@ .project-sessions details[open] summary { margin-bottom: 10px; +} + +/* Archived project styling */ +.project-card.archived { + opacity: 0.6; + background-color: #f5f5f522; +} + +.project-card.archived:hover { + opacity: 0.8; +} + +.archived-badge { + display: inline-block; + background-color: #888; + color: white; + font-size: 0.65em; + font-weight: 600; + padding: 2px 8px; + border-radius: 4px; + margin-left: 10px; + vertical-align: middle; + text-transform: uppercase; + letter-spacing: 0.5px; } \ No newline at end of file diff --git a/claude_code_log/html/templates/index.html b/claude_code_log/html/templates/index.html index a539386a..4b2bf430 100644 --- a/claude_code_log/html/templates/index.html +++ b/claude_code_log/html/templates/index.html @@ -59,10 +59,14 @@

{{ title }}

{% for project in projects %} -
+
{{ project.display_name }} + {% if project.is_archived %} + Archived + {% else %} (← open combined transcript) + {% endif %}
📁 {{ project.jsonl_count }} transcript files
diff --git a/claude_code_log/tui.py b/claude_code_log/tui.py index 7f1b398a..63c1d5f3 100644 --- a/claude_code_log/tui.py +++ b/claude_code_log/tui.py @@ -41,7 +41,7 @@ class ProjectSelector(App[Path]): border: solid $primary; margin-bottom: 1; } - + DataTable { height: auto; } @@ -50,7 +50,11 @@ class ProjectSelector(App[Path]): TITLE = "Claude Code Log - Project Selector" BINDINGS: ClassVar[list[BindingType]] = [ Binding("q", "quit", "Quit"), + Binding("escape", "quit", "Quit", show=False), Binding("s", "select_project", "Select Project"), + Binding("a", "archive_project", "Archive Project"), + Binding("d", "delete_project", "Delete Project"), + Binding("r", "restore_project", "Restore Project"), ] selected_project_path: reactive[Optional[Path]] = reactive( @@ -58,13 +62,20 @@ class ProjectSelector(App[Path]): ) projects: list[Path] matching_projects: list[Path] - - def __init__(self, projects: list[Path], matching_projects: list[Path]): + archived_projects: set[Path] + + def __init__( + self, + projects: list[Path], + matching_projects: list[Path], + archived_projects: Optional[set[Path]] = None, + ): """Initialize the project selector.""" super().__init__() self.theme = "gruvbox" self.projects = projects self.matching_projects = matching_projects + self.archived_projects = archived_projects or set() def compose(self) -> ComposeResult: """Create the UI layout.""" @@ -105,18 +116,21 @@ def populate_table(self) -> None: # Add rows for project_path in self.projects: + is_archived = project_path in self.archived_projects try: cache_manager = CacheManager(project_path, get_library_version()) project_cache = cache_manager.get_cached_project_data() if not project_cache or not project_cache.sessions: - try: - ensure_fresh_cache(project_path, cache_manager, silent=True) - # Reload cache after ensuring it's fresh - project_cache = cache_manager.get_cached_project_data() - except Exception: - # If cache building fails, continue with empty cache - project_cache = None + if not is_archived: + # Only try to build cache for non-archived projects + try: + ensure_fresh_cache(project_path, cache_manager, silent=True) + # Reload cache after ensuring it's fresh + project_cache = cache_manager.get_cached_project_data() + except Exception: + # If cache building fails, continue with empty cache + project_cache = None # Get project info session_count = ( @@ -132,6 +146,10 @@ def populate_table(self) -> None: if project_path in self.matching_projects: project_display = f"→ {project_display[2:]}" + # Add archived indicator + if is_archived: + project_display = f"{project_display} [ARCHIVED]" + table.add_row( project_display, str(session_count), @@ -141,6 +159,8 @@ def populate_table(self) -> None: project_display = f" {project_path.name}" if project_path in self.matching_projects: project_display = f"→ {project_display[2:]}" + if is_archived: + project_display = f"{project_display} [ARCHIVED]" table.add_row( project_display, @@ -151,6 +171,10 @@ def on_data_table_row_highlighted(self, _event: DataTable.RowHighlighted) -> Non """Handle row highlighting (cursor movement) in the projects table.""" self._update_selected_project_from_cursor() + def on_data_table_row_selected(self, _event: DataTable.RowSelected) -> None: + """Handle row selection (Enter key) in the projects table.""" + self.action_select_project() + def _update_selected_project_from_cursor(self) -> None: """Update the selected project based on the current cursor position.""" try: @@ -164,6 +188,10 @@ def _update_selected_project_from_cursor(self) -> None: if project_display.startswith("→"): project_display = project_display[1:].strip() + # Remove the archived indicator if present + if project_display.endswith(" [ARCHIVED]"): + project_display = project_display[:-11].strip() + # Find the matching project path for project_path in self.projects: if project_path.name == project_display: @@ -186,6 +214,185 @@ async def action_quit(self) -> None: """Quit the application with proper cleanup.""" self.exit(None) + def _get_project_session_count(self, project_path: Path) -> int: + """Get the number of sessions in a project from cache.""" + try: + cache_manager = CacheManager(project_path, get_library_version()) + project_cache = cache_manager.get_cached_project_data() + if project_cache and project_cache.sessions: + return len(project_cache.sessions) + except Exception: + pass + return 0 + + def _is_project_archived(self, project_path: Path) -> bool: + """Check if a project is archived (no JSONL files exist).""" + return project_path in self.archived_projects + + def check_action( + self, + action: str, + parameters: tuple[object, ...], # noqa: ARG002 + ) -> bool | None: + """Control which actions are available based on context.""" + project_path = self.selected_project_path + is_archived = project_path in self.archived_projects if project_path else False + + if action == "archive_project": + # Can only archive non-archived projects + return project_path is not None and not is_archived + elif action == "restore_project": + # Can only restore archived projects + return project_path is not None and is_archived + elif action == "delete_project": + # Can delete any project + return project_path is not None + + # Allow all other actions (quit, select_project, etc.) + return True + + def action_archive_project(self) -> None: + """Archive all sessions in the selected project.""" + if not self.selected_project_path: + self.notify("No project selected", severity="warning") + return + + if self._is_project_archived(self.selected_project_path): + self.notify("Project is already archived", severity="warning") + return + + session_count = self._get_project_session_count(self.selected_project_path) + self.push_screen( + ArchiveProjectConfirmScreen(self.selected_project_path.name, session_count), + self._handle_archive_project_confirm, + ) + + def _handle_archive_project_confirm(self, confirmed: bool | None) -> None: + """Handle the result of the archive project confirmation dialog.""" + if not confirmed or not self.selected_project_path: + return + + project_path = self.selected_project_path + archived_count = 0 + + # Delete all JSONL files in the project + for jsonl_file in project_path.glob("*.jsonl"): + try: + jsonl_file.unlink() + archived_count += 1 + except Exception as e: + self.notify( + f"Failed to delete {jsonl_file.name}: {e}", severity="error" + ) + + if archived_count > 0: + self.notify(f"Archived {archived_count} sessions") + # Add to archived projects set + self.archived_projects.add(project_path) + self.populate_table() + + def action_delete_project(self) -> None: + """Delete the selected project from cache (and optionally JSONL files).""" + if not self.selected_project_path: + self.notify("No project selected", severity="warning") + return + + is_archived = self._is_project_archived(self.selected_project_path) + session_count = self._get_project_session_count(self.selected_project_path) + self.push_screen( + DeleteProjectConfirmScreen( + self.selected_project_path.name, session_count, is_archived + ), + self._handle_delete_project_confirm, + ) + + def _handle_delete_project_confirm(self, result: Optional[str]) -> None: + """Handle the result of the delete project confirmation dialog.""" + if not result or not self.selected_project_path: + return + + project_path = self.selected_project_path + + # Delete cache + cache_manager = CacheManager(project_path, get_library_version()) + cache_manager.clear_cache() + + # If deleting both, also delete JSONL files + if result == "both": + for jsonl_file in project_path.glob("*.jsonl"): + try: + jsonl_file.unlink() + except Exception as e: + self.notify( + f"Failed to delete {jsonl_file.name}: {e}", severity="error" + ) + + # Remove from projects list + if project_path in self.projects: + self.projects.remove(project_path) + if project_path in self.matching_projects: + self.matching_projects.remove(project_path) + if project_path in self.archived_projects: + self.archived_projects.discard(project_path) + + self.notify(f"Deleted project: {project_path.name}") + self.selected_project_path = None + self.populate_table() + + def action_restore_project(self) -> None: + """Restore all archived sessions in the selected project.""" + if not self.selected_project_path: + self.notify("No project selected", severity="warning") + return + + if not self._is_project_archived(self.selected_project_path): + self.notify("Project is not archived", severity="warning") + return + + session_count = self._get_project_session_count(self.selected_project_path) + self.push_screen( + RestoreProjectConfirmScreen(self.selected_project_path.name, session_count), + self._handle_restore_project_confirm, + ) + + def _handle_restore_project_confirm(self, confirmed: bool | None) -> None: + """Handle the result of the restore project confirmation dialog.""" + if not confirmed or not self.selected_project_path: + return + + project_path = self.selected_project_path + cache_manager = CacheManager(project_path, get_library_version()) + project_cache = cache_manager.get_cached_project_data() + + if not project_cache or not project_cache.sessions: + self.notify("No sessions to restore", severity="warning") + return + + # Ensure project directory exists + project_path.mkdir(parents=True, exist_ok=True) + + restored_count = 0 + for session_id in project_cache.sessions: + jsonl_path = project_path / f"{session_id}.jsonl" + if not jsonl_path.exists(): + try: + messages = cache_manager.export_session_to_jsonl(session_id) + if messages: + with open(jsonl_path, "w", encoding="utf-8") as f: + for msg in messages: + f.write(msg + "\n") + restored_count += 1 + except Exception as e: + self.notify( + f"Failed to restore {session_id}: {e}", severity="error" + ) + + if restored_count > 0: + self.notify(f"Restored {restored_count} sessions") + # Remove from archived projects set + self.archived_projects.discard(project_path) + self.populate_table() + class MarkdownViewerScreen(ModalScreen[None]): """Modal screen for viewing Markdown content with table of contents.""" @@ -309,6 +516,406 @@ async def action_dismiss(self, result: None = None) -> None: self.dismiss(result) +class ArchiveConfirmScreen(ModalScreen[bool]): + """Modal screen for confirming session archiving (delete JSONL, keep cache).""" + + CSS = """ + ArchiveConfirmScreen { + align: center middle; + } + + #archive-container { + width: 65; + height: auto; + border: solid $warning; + background: $surface; + padding: 1 2; + } + + #archive-title { + text-align: center; + text-style: bold; + color: $warning; + margin-bottom: 1; + } + + #archive-message { + margin-bottom: 1; + } + + #archive-info { + color: $text-muted; + margin-bottom: 1; + } + + #archive-buttons { + text-align: center; + height: auto; + } + """ + + BINDINGS: ClassVar[list[BindingType]] = [ + Binding("y", "confirm", "Yes"), + Binding("enter", "confirm", "Confirm", show=False), + Binding("n", "cancel", "No"), + Binding("escape", "cancel", "Cancel", show=False), + ] + + def __init__(self, session_id: str) -> None: + super().__init__() + self.session_id = session_id + + def compose(self) -> ComposeResult: + with Container(id="archive-container"): + yield Static("Archive Session", id="archive-title") + yield Static( + f"Session: {self.session_id[:8]}...", + id="archive-message", + ) + yield Static( + "This will delete the JSONL file.\n" + "The session will be archived and can be restored from cache.", + id="archive-info", + ) + yield Static("\\[Enter/y] Yes \\[Esc/n] No", id="archive-buttons") + + def action_confirm(self) -> None: + self.dismiss(True) + + def action_cancel(self) -> None: + self.dismiss(False) + + +class DeleteConfirmScreen(ModalScreen[Optional[str]]): + """Modal screen for confirming session deletion with smart options.""" + + CSS = """ + DeleteConfirmScreen { + align: center middle; + } + + #delete-container { + width: 65; + height: auto; + border: solid $error; + background: $surface; + padding: 1 2; + } + + #delete-title { + text-align: center; + text-style: bold; + color: $error; + margin-bottom: 1; + } + + #delete-message { + margin-bottom: 1; + } + + #delete-warning { + color: $warning; + margin-bottom: 1; + } + + #delete-buttons { + text-align: center; + height: auto; + } + """ + + BINDINGS: ClassVar[list[BindingType]] = [ + Binding("c", "delete_cache", "Cache only"), + Binding("b", "delete_both", "Both", show=False), + Binding("y", "delete_cache", "Yes", show=False), + Binding("enter", "delete_cache", "Confirm", show=False), + Binding("n", "cancel", "No"), + Binding("escape", "cancel", "Cancel", show=False), + ] + + def __init__(self, session_id: str, is_archived: bool = False) -> None: + super().__init__() + self.session_id = session_id + self.is_archived = is_archived + + def compose(self) -> ComposeResult: + with Container(id="delete-container"): + yield Static("Delete Session", id="delete-title") + yield Static( + f"Session: {self.session_id[:8]}...", + id="delete-message", + ) + if self.is_archived: + yield Static( + "This is an archived session with no JSONL file.\n" + "Deletion is PERMANENT and cannot be undone!", + id="delete-warning", + ) + yield Static( + "\\[Enter/y/c] Delete from cache \\[Esc/n] Cancel", + id="delete-buttons", + ) + else: + yield Static( + "Choose what to delete:\n" + "• Cache only: JSONL file remains, session can be re-parsed\n" + "• Both: Delete JSONL file AND cache (permanent!)", + id="delete-warning", + ) + yield Static( + "\\[c] Cache only \\[b] Both (permanent) \\[Esc/n] Cancel", + id="delete-buttons", + ) + + def action_delete_cache(self) -> None: + self.dismiss("cache_only") + + def action_delete_both(self) -> None: + if not self.is_archived: + self.dismiss("both") + + def action_cancel(self) -> None: + self.dismiss(None) + + +class ArchiveProjectConfirmScreen(ModalScreen[bool]): + """Modal screen for confirming project archival.""" + + CSS = """ + ArchiveProjectConfirmScreen { + align: center middle; + } + + #archive-project-container { + width: 65; + height: auto; + border: solid $warning; + background: $surface; + padding: 1 2; + } + + #archive-project-title { + text-align: center; + text-style: bold; + color: $warning; + margin-bottom: 1; + } + + #archive-project-message { + margin-bottom: 1; + } + + #archive-project-info { + color: $text-muted; + margin-bottom: 1; + } + + #archive-project-buttons { + text-align: center; + height: auto; + } + """ + + BINDINGS: ClassVar[list[BindingType]] = [ + Binding("y", "confirm", "Yes"), + Binding("enter", "confirm", "Confirm", show=False), + Binding("n", "cancel", "No"), + Binding("escape", "cancel", "Cancel", show=False), + ] + + def __init__(self, project_name: str, session_count: int) -> None: + super().__init__() + self.project_name = project_name + self.session_count = session_count + + def compose(self) -> ComposeResult: + with Container(id="archive-project-container"): + yield Static("Archive Project", id="archive-project-title") + yield Static( + f"Project: {self.project_name}\nSessions: {self.session_count}", + id="archive-project-message", + ) + yield Static( + "This will delete ALL JSONL files in the project.\n" + "Sessions will be archived and can be restored from cache.", + id="archive-project-info", + ) + yield Static("\\[Enter/y] Yes \\[Esc/n] No", id="archive-project-buttons") + + def action_confirm(self) -> None: + self.dismiss(True) + + def action_cancel(self) -> None: + self.dismiss(False) + + +class DeleteProjectConfirmScreen(ModalScreen[Optional[str]]): + """Modal screen for confirming project deletion with smart options.""" + + CSS = """ + DeleteProjectConfirmScreen { + align: center middle; + } + + #delete-project-container { + width: 65; + height: auto; + border: solid $error; + background: $surface; + padding: 1 2; + } + + #delete-project-title { + text-align: center; + text-style: bold; + color: $error; + margin-bottom: 1; + } + + #delete-project-message { + margin-bottom: 1; + } + + #delete-project-warning { + color: $warning; + margin-bottom: 1; + } + + #delete-project-buttons { + text-align: center; + height: auto; + } + """ + + BINDINGS: ClassVar[list[BindingType]] = [ + Binding("c", "delete_cache", "Cache only"), + Binding("b", "delete_both", "Both", show=False), + Binding("y", "delete_cache", "Yes", show=False), + Binding("enter", "delete_cache", "Confirm", show=False), + Binding("n", "cancel", "No"), + Binding("escape", "cancel", "Cancel", show=False), + ] + + def __init__( + self, project_name: str, session_count: int, is_archived: bool = False + ) -> None: + super().__init__() + self.project_name = project_name + self.session_count = session_count + self.is_archived = is_archived + + def compose(self) -> ComposeResult: + with Container(id="delete-project-container"): + yield Static("Delete Project", id="delete-project-title") + yield Static( + f"Project: {self.project_name}\nSessions: {self.session_count}", + id="delete-project-message", + ) + if self.is_archived: + yield Static( + "This is an archived project with no JSONL files.\n" + "Deletion is PERMANENT and cannot be undone!", + id="delete-project-warning", + ) + yield Static( + "\\[Enter/y/c] Delete from cache \\[Esc/n] Cancel", + id="delete-project-buttons", + ) + else: + yield Static( + "Choose what to delete:\n" + "• Cache only: JSONL files remain, sessions can be re-parsed\n" + "• Both: Delete ALL JSONL files AND cache (permanent!)", + id="delete-project-warning", + ) + yield Static( + "\\[c] Cache only \\[b] Both (permanent) \\[Esc/n] Cancel", + id="delete-project-buttons", + ) + + def action_delete_cache(self) -> None: + self.dismiss("cache_only") + + def action_delete_both(self) -> None: + if not self.is_archived: + self.dismiss("both") + + def action_cancel(self) -> None: + self.dismiss(None) + + +class RestoreProjectConfirmScreen(ModalScreen[bool]): + """Modal screen for confirming project restoration.""" + + CSS = """ + RestoreProjectConfirmScreen { + align: center middle; + } + + #restore-project-container { + width: 65; + height: auto; + border: solid $success; + background: $surface; + padding: 1 2; + } + + #restore-project-title { + text-align: center; + text-style: bold; + color: $success; + margin-bottom: 1; + } + + #restore-project-message { + margin-bottom: 1; + } + + #restore-project-info { + color: $text-muted; + margin-bottom: 1; + } + + #restore-project-buttons { + text-align: center; + height: auto; + } + """ + + BINDINGS: ClassVar[list[BindingType]] = [ + Binding("y", "confirm", "Yes"), + Binding("enter", "confirm", "Confirm", show=False), + Binding("n", "cancel", "No"), + Binding("escape", "cancel", "Cancel", show=False), + ] + + def __init__(self, project_name: str, session_count: int) -> None: + super().__init__() + self.project_name = project_name + self.session_count = session_count + + def compose(self) -> ComposeResult: + with Container(id="restore-project-container"): + yield Static("Restore Project", id="restore-project-title") + yield Static( + f"Project: {self.project_name}\n" + f"Archived sessions: {self.session_count}", + id="restore-project-message", + ) + yield Static( + "This will restore ALL archived sessions by writing JSONL files.\n" + "The project directory will be created if it doesn't exist.", + id="restore-project-info", + ) + yield Static("\\[Enter/y] Yes \\[Esc/n] No", id="restore-project-buttons") + + def action_confirm(self) -> None: + self.dismiss(True) + + def action_cancel(self) -> None: + self.dismiss(False) + + class SessionBrowser(App[Optional[str]]): """Interactive TUI for browsing and managing Claude Code Log sessions.""" @@ -349,6 +956,9 @@ class SessionBrowser(App[Optional[str]]): TITLE = "Claude Code Log - Session Browser" BINDINGS: ClassVar[list[BindingType]] = [ Binding("q", "quit", "Quit"), + Binding("escape", "back_to_projects", "Back", show=False), + Binding("enter", "export_selected", "Open HTML", show=False), + Binding("a", "archive_session", "Archive Session"), Binding("h", "export_selected", "Open HTML page"), Binding("m", "export_markdown", "Open Markdown"), Binding("v", "view_markdown", "View Markdown"), @@ -357,6 +967,8 @@ class SessionBrowser(App[Optional[str]]): Binding("M", "force_export_markdown", "Force Markdown", show=False), Binding("V", "force_view_markdown", "Force View", show=False), Binding("c", "resume_selected", "Resume in Claude Code"), + Binding("r", "restore_jsonl", "Restore JSONL"), + Binding("d", "delete_session", "Delete Session"), Binding("e", "toggle_expanded", "Toggle Expanded View"), Binding("p", "back_to_projects", "Open Project Selector"), Binding("?", "toggle_help", "Help"), @@ -367,14 +979,17 @@ class SessionBrowser(App[Optional[str]]): project_path: Path cache_manager: CacheManager sessions: dict[str, SessionCacheData] + archived_sessions: dict[str, SessionCacheData] - def __init__(self, project_path: Path): + def __init__(self, project_path: Path, is_archived: bool = False): """Initialize the session browser with a project path.""" super().__init__() self.theme = "gruvbox" self.project_path = project_path + self.is_archived_project = is_archived self.cache_manager = CacheManager(project_path, get_library_version()) self.sessions = {} + self.archived_sessions = {} def compose(self) -> ComposeResult: """Create the UI layout.""" @@ -407,8 +1022,32 @@ def on_resize(self) -> None: def load_sessions(self) -> None: """Load session information from cache or build cache if needed.""" + # For archived projects, just load from cache (no JSONL files to check) + if self.is_archived_project: + project_cache = self.cache_manager.get_cached_project_data() + if project_cache and project_cache.sessions: + # All sessions are "archived" for fully archived projects + self.sessions = {} + self.archived_sessions = project_cache.sessions + else: + self.sessions = {} + self.archived_sessions = {} + # Update UI + try: + self.populate_table() + self.update_stats() + except Exception: + pass + return + # Check if we need to rebuild cache by checking for modified files - jsonl_files = list(self.project_path.glob("*.jsonl")) + # Exclude agent files - they are loaded via session references + jsonl_files = [ + f + for f in self.project_path.glob("*.jsonl") + if not f.name.startswith("agent-") + ] + valid_session_ids = {f.stem for f in jsonl_files} modified_files = self.cache_manager.get_modified_files(jsonl_files) # Get cached project data @@ -434,6 +1073,24 @@ def load_sessions(self) -> None: # Don't show notification during startup - just return return + # Only compute archived sessions if there are JSONL files to compare against + # (in test environments, there may be cached sessions but no JSONL files) + if valid_session_ids: + # Load archived sessions (cached but JSONL deleted) + self.archived_sessions = self.cache_manager.get_archived_sessions( + valid_session_ids + ) + + # Filter current sessions to only those with existing JSONL files + self.sessions = { + sid: data + for sid, data in self.sessions.items() + if sid in valid_session_ids + } + else: + # No JSONL files to compare - treat all sessions as current + self.archived_sessions = {} + # Only update UI if we're in app context try: self.populate_table() @@ -473,13 +1130,20 @@ def populate_table(self) -> None: table.add_column("Messages", width=messages_width) table.add_column("Tokens", width=tokens_width) - # Sort sessions by start time (newest first) + # Combine current and archived sessions with archived flag + all_sessions: list[tuple[str, SessionCacheData, bool]] = [] + for session_id, session_data in self.sessions.items(): + all_sessions.append((session_id, session_data, False)) + for session_id, session_data in self.archived_sessions.items(): + all_sessions.append((session_id, session_data, True)) + + # Sort all sessions by start time (newest first) sorted_sessions = sorted( - self.sessions.items(), key=lambda x: x[1].first_timestamp, reverse=True + all_sessions, key=lambda x: x[1].first_timestamp, reverse=True ) # Add rows - for session_id, session_data in sorted_sessions: + for session_id, session_data, is_archived in sorted_sessions: # Format timestamps - use short format for narrow terminals use_short_format = terminal_width < 120 start_time = self.format_timestamp( @@ -501,7 +1165,9 @@ def populate_table(self) -> None: or session_data.first_user_message or "No preview available" ) - # Let Textual handle truncation based on column width + # Add [ARCHIVED] indicator for archived sessions + if is_archived: + preview = f"[ARCHIVED] {preview}" table.add_row( session_id[:8], @@ -514,10 +1180,12 @@ def populate_table(self) -> None: def update_stats(self) -> None: """Update the project statistics display.""" - total_sessions = len(self.sessions) - total_messages = sum(s.message_count for s in self.sessions.values()) + # Combine all sessions for stats + all_sessions = {**self.sessions, **self.archived_sessions} + total_sessions = len(all_sessions) + total_messages = sum(s.message_count for s in all_sessions.values()) total_tokens = sum( - s.total_input_tokens + s.total_output_tokens for s in self.sessions.values() + s.total_input_tokens + s.total_output_tokens for s in all_sessions.values() ) # Get project name using shared logic @@ -533,16 +1201,14 @@ def update_stats(self) -> None: ) # Find date range - if self.sessions: + if all_sessions: timestamps = [ - s.first_timestamp for s in self.sessions.values() if s.first_timestamp + s.first_timestamp for s in all_sessions.values() if s.first_timestamp ] earliest = min(timestamps) if timestamps else "" latest = ( - max( - s.last_timestamp for s in self.sessions.values() if s.last_timestamp - ) - if self.sessions + max(s.last_timestamp for s in all_sessions.values() if s.last_timestamp) + if all_sessions else "" ) @@ -560,8 +1226,17 @@ def update_stats(self) -> None: # Create spaced layout: Project (left), Sessions info (center), Date range (right) terminal_width = self.size.width + # Show archived count if any + archived_count = len(self.archived_sessions) + if archived_count > 0: + mode_indicator = f"({archived_count} archived)" + else: + mode_indicator = "" + # Project section (left aligned) - project_section = f"[bold]Project:[/bold] {project_name}" + project_section = ( + f"[bold]Project:[/bold] {project_name} {mode_indicator}".strip() + ) # Sessions info section (center) sessions_section = f"[bold]Sessions:[/bold] {total_sessions:,} | [bold]Messages:[/bold] {total_messages:,} | [bold]Tokens:[/bold] {total_tokens:,}" @@ -631,11 +1306,15 @@ def _update_selected_session_from_cursor(self) -> None: if row_data: # Extract session ID from the first column (now just first 8 chars) session_id_display = str(row_data[0]) - # Find the full session ID + # Find the full session ID in both dicts (current first, then archived) for full_session_id in self.sessions.keys(): if full_session_id.startswith(session_id_display): self.selected_session_id = full_session_id - break + return + for full_session_id in self.archived_sessions.keys(): + if full_session_id.startswith(session_id_display): + self.selected_session_id = full_session_id + return except Exception: # If widget not mounted yet or we can't get the row data, don't update selection pass @@ -764,14 +1443,17 @@ def _escape_rich_markup(self, text: str) -> str: def _update_expanded_content(self) -> None: """Update the expanded content for the currently selected session.""" - if ( - not self.selected_session_id - or self.selected_session_id not in self.sessions - ): + if not self.selected_session_id: + return + + # Get session data from either current or archived sessions + session_data = self.sessions.get( + self.selected_session_id + ) or self.archived_sessions.get(self.selected_session_id) + if not session_data: return expanded_content = self.query_one("#expanded-content", Static) - session_data = self.sessions[self.selected_session_id] # Build expanded content content_parts: list[str] = [] @@ -841,16 +1523,24 @@ def _ensure_session_file( if not needs_regeneration: return session_file - # Load messages from JSONL files + # Load messages - from cache for archived sessions, from JSONL otherwise try: - messages = load_directory_transcripts( - self.project_path, self.cache_manager, silent=True - ) + is_archived = session_id in self.archived_sessions + if is_archived: + # Load from cache for archived sessions + messages = self.cache_manager.load_session_entries(session_id) + else: + # Load from JSONL files for current sessions + messages = load_directory_transcripts( + self.project_path, self.cache_manager, silent=True + ) if not messages: return None - # Build session title - session_data = self.sessions.get(session_id) + # Build session title - check both dicts + session_data = self.sessions.get(session_id) or self.archived_sessions.get( + session_id + ) project_cache = self.cache_manager.get_cached_project_data() project_name = get_project_display_name( self.project_path.name, @@ -884,9 +1574,12 @@ def _ensure_session_file( def action_toggle_expanded(self) -> None: """Toggle the expanded view for the selected session.""" + if not self.selected_session_id: + return + # Check if session exists in either current or archived sessions if ( - not self.selected_session_id - or self.selected_session_id not in self.sessions + self.selected_session_id not in self.sessions + and self.selected_session_id not in self.archived_sessions ): return @@ -909,8 +1602,12 @@ def action_toggle_help(self) -> None: "Claude Code Log - Session Browser\n\n" "Navigation:\n" "- Use arrow keys to select sessions\n" - "- Expanded content updates automatically when visible\n\n" + "- Expanded content updates automatically when visible\n" + "- [ARCHIVED] sessions have no JSONL file (cache only)\n\n" "Actions:\n" + "- a: Archive session (delete JSONL, keep in cache)\n" + "- d: Delete session (with options)\n" + "- r: Restore archived session to JSONL\n" "- e: Toggle expanded view for session\n" "- h: Open selected session's HTML page\n" "- m: Open selected session's Markdown file (in browser)\n" @@ -921,6 +1618,181 @@ def action_toggle_help(self) -> None: ) self.notify(help_text, timeout=10) + def check_action(self, action: str, parameters: tuple[object, ...]) -> bool | None: + """Conditionally enable/disable actions based on selected session type.""" + if not self.selected_session_id: + return True # Allow action, it will handle missing selection + + is_archived = self.selected_session_id in self.archived_sessions + is_current = self.selected_session_id in self.sessions + + # Archive is only available for current sessions (has JSONL file) + if action == "archive_session" and not is_current: + return False + # Resume is only available for current sessions + if action == "resume_selected" and not is_current: + return False + # Restore is only available for archived sessions + if action == "restore_jsonl" and not is_archived: + return False + return True + + def action_restore_jsonl(self) -> None: + """Restore the selected archived session to a JSONL file.""" + if not self.selected_session_id: + self.notify("No session selected", severity="warning") + return + + if self.selected_session_id not in self.archived_sessions: + self.notify( + "Selected session not found in archived sessions", severity="error" + ) + return + + try: + # Export messages from cache + messages = self.cache_manager.export_session_to_jsonl( + self.selected_session_id + ) + if not messages: + self.notify("No messages found for session", severity="error") + return + + # Ensure project directory exists (may have been deleted) + self.project_path.mkdir(parents=True, exist_ok=True) + + # Write to JSONL file + output_path = self.project_path / f"{self.selected_session_id}.jsonl" + with open(output_path, "w", encoding="utf-8") as f: + for msg in messages: + f.write(msg + "\n") + + self.notify( + f"Restored {len(messages)} messages to {output_path.name}", + severity="information", + ) + + # Refresh to show the restored session as current + self._refresh_after_restore() + + except Exception as e: + self.notify(f"Error restoring session: {e}", severity="error") + + def _refresh_after_restore(self) -> None: + """Refresh sessions after restoring an archived session.""" + # If this was a fully archived project, it's no longer archived + # since we just restored a JSONL file + if self.is_archived_project: + self.is_archived_project = False + + # Reload sessions - this will now detect the restored JSONL file + self.load_sessions() + + self.notify( + "Session restored! It now appears as a current session.", + timeout=5, + ) + + def action_archive_session(self) -> None: + """Archive the selected session (delete JSONL file, keep in cache).""" + if not self.selected_session_id: + self.notify("No session selected", severity="warning") + return + + # Archive only works for current sessions (those with JSONL files) + if self.selected_session_id not in self.sessions: + self.notify( + "Only current sessions can be archived (already archived or not found)", + severity="warning", + ) + return + + # Push archive confirmation screen + self.push_screen( + ArchiveConfirmScreen(session_id=self.selected_session_id), + callback=self._on_archive_confirm, + ) + + def _on_archive_confirm(self, confirmed: Optional[bool]) -> None: + """Handle archive confirmation result.""" + if not confirmed or not self.selected_session_id: + return + + try: + # Delete the JSONL file + jsonl_path = self.project_path / f"{self.selected_session_id}.jsonl" + if jsonl_path.exists(): + jsonl_path.unlink() + self.notify( + f"Session {self.selected_session_id[:8]} archived", + severity="information", + ) + # Reload sessions - this will move the session to archived + self.load_sessions() + else: + self.notify("JSONL file not found", severity="error") + except Exception as e: + self.notify(f"Error archiving session: {e}", severity="error") + + def action_delete_session(self) -> None: + """Delete the selected session with smart options.""" + if not self.selected_session_id: + self.notify("No session selected", severity="warning") + return + + # Check if session exists in either current or archived sessions + if ( + self.selected_session_id not in self.sessions + and self.selected_session_id not in self.archived_sessions + ): + self.notify("Selected session not found", severity="error") + return + + # Determine if this is an archived session (no JSONL to fall back on) + is_archived_session = self.selected_session_id in self.archived_sessions + + # Push confirmation screen + self.push_screen( + DeleteConfirmScreen( + session_id=self.selected_session_id, + is_archived=is_archived_session, + ), + callback=self._on_delete_confirm, + ) + + def _on_delete_confirm(self, delete_option: Optional[str]) -> None: + """Handle deletion confirmation result.""" + if not delete_option or not self.selected_session_id: + return + + try: + deleted_what: list[str] = [] + + # Delete JSONL file if requested + if delete_option == "both": + jsonl_path = self.project_path / f"{self.selected_session_id}.jsonl" + if jsonl_path.exists(): + jsonl_path.unlink() + deleted_what.append("JSONL file") + + # Delete from cache + success = self.cache_manager.delete_session(self.selected_session_id) + if success: + deleted_what.append("cache") + + if deleted_what: + self.notify( + f"Session {self.selected_session_id[:8]} deleted ({', '.join(deleted_what)})", + severity="information", + ) + # Clear selection and reload + self.selected_session_id = None + self.load_sessions() + else: + self.notify("Failed to delete session", severity="error") + except Exception as e: + self.notify(f"Error deleting session: {e}", severity="error") + def action_back_to_projects(self) -> None: """Navigate to the project selector.""" # Exit with a special return value to signal we want to go to project selector @@ -932,14 +1804,16 @@ async def action_quit(self) -> None: def run_project_selector( - projects: list[Path], matching_projects: list[Path] + projects: list[Path], + matching_projects: list[Path], + archived_projects: Optional[set[Path]] = None, ) -> Optional[Path]: """Run the project selector TUI and return the selected project path.""" if not projects: print("Error: No projects provided") return None - app = ProjectSelector(projects, matching_projects) + app = ProjectSelector(projects, matching_projects, archived_projects) try: return app.run() except KeyboardInterrupt: @@ -948,9 +1822,20 @@ def run_project_selector( return None -def run_session_browser(project_path: Path) -> Optional[str]: +def run_session_browser(project_path: Path, is_archived: bool = False) -> Optional[str]: """Run the session browser TUI for the given project path.""" if not project_path.exists(): + # For archived projects, the directory may not exist but cache may + if is_archived: + # Try to load from cache + try: + cache_manager = CacheManager(project_path, get_library_version()) + project_cache = cache_manager.get_cached_project_data() + if project_cache and project_cache.sessions: + app = SessionBrowser(project_path, is_archived=True) + return app.run() + except Exception: + pass print(f"Error: Project path {project_path} does not exist") return None @@ -961,10 +1846,20 @@ def run_session_browser(project_path: Path) -> Optional[str]: # Check if there are any JSONL files jsonl_files = list(project_path.glob("*.jsonl")) if not jsonl_files: + # For archived projects, check if we have cached sessions + if is_archived: + try: + cache_manager = CacheManager(project_path, get_library_version()) + project_cache = cache_manager.get_cached_project_data() + if project_cache and project_cache.sessions: + app = SessionBrowser(project_path, is_archived=True) + return app.run() + except Exception: + pass print(f"Error: No JSONL transcript files found in {project_path}") return None - app = SessionBrowser(project_path) + app = SessionBrowser(project_path, is_archived=is_archived) try: return app.run() except KeyboardInterrupt: diff --git a/dev-docs/restoring-archived-sessions.md b/dev-docs/restoring-archived-sessions.md new file mode 100644 index 00000000..38582deb --- /dev/null +++ b/dev-docs/restoring-archived-sessions.md @@ -0,0 +1,100 @@ +# Restoring Archived Sessions + +When you run `claude-code-log`, you may see output like: + +```sh +project-name: cached, 3 archived (0.0s) +``` + +This indicates that 3 sessions exist in the cache whose source JSONL files have been deleted. + +## What Are Archived Sessions? + +Archived sessions are sessions preserved in the SQLite cache (`~/.claude/projects/cache.db`) even after their source JSONL files have been deleted. This happens when: + +1. Claude Code automatically deletes old JSONL files based on the `cleanupPeriodDays` setting +2. You manually delete JSONL files from `~/.claude/projects/*/` + +The cache stores the complete message data, so full restoration is possible. + +## Preventing Automatic Deletion + +Claude Code automatically deletes session logs after 30 days by default. To change this, add `cleanupPeriodDays` to your `~/.claude/settings.json`: + +```json +{ + "cleanupPeriodDays": 99999 +} +``` + +This effectively disables automatic cleanup (274 years). You can also set it to a specific number of days. + +See Claude Code's [settings documentation](https://docs.anthropic.com/en/docs/claude-code/settings) for more details. + +## Using the TUI to Manage Archived Sessions + +The easiest way to browse and restore archived sessions is through the interactive TUI. + +### Launch the TUI + +```bash +claude-code-log --tui +``` + +### Toggle Archived View + +Press `a` to toggle between current and archived sessions. The header shows the current mode: + +```text +┌─ Claude Code Log ─────────────────────────────────────────────────┐ +│ Project: my-project ARCHIVED (3) │ +│ Sessions: 3 │ Messages: 456 │ Tokens: 45,230 │ +├──────────┬───────────────────────────────────┬─────────┬──────────┤ +│ Session │ Title │ Start │ Messages │ +├──────────┼───────────────────────────────────┼─────────┼──────────┤ +│ abc123 │ Fix authentication bug │ 12-01 │ 45 │ +│ def456 │ Add user settings page │ 11-28 │ 123 │ +│ ghi789 │ Refactor database layer │ 11-15 │ 67 │ +└──────────┴───────────────────────────────────┴─────────┴──────────┘ + [a] Current [r] Restore [h] HTML [v] View [q] Quit +``` + +### Restore a Session + +1. Switch to archived view with `a` +2. Navigate to the session you want to restore +3. Press `r` to restore the session to a JSONL file +4. The session will be restored to `~/.claude/projects/{project}/{session-id}.jsonl` +5. Press `a` again to switch back to current sessions and see the restored session + +### View Archived Sessions + +You can also view archived sessions as HTML or Markdown without restoring them: + +- `h` - Open HTML in browser +- `m` - Open Markdown in browser +- `v` - View Markdown in embedded viewer + +## Limitations + +- **Message order**: Messages are ordered by timestamp, which may differ slightly from original file order for same-timestamp entries +- **Whitespace**: Original JSON formatting is not preserved (semantically identical) + +## Manual SQL Approach + +For advanced users, you can also query the cache database directly: + +```bash +sqlite3 ~/.claude/projects/cache.db +``` + +```sql +-- List all sessions +SELECT p.project_path, s.session_id, s.first_timestamp, s.message_count +FROM sessions s +JOIN projects p ON s.project_id = p.id +ORDER BY s.first_timestamp; + +-- Export a session's messages +SELECT content FROM messages WHERE session_id = 'your-session-id' ORDER BY timestamp; +``` diff --git a/test/__snapshots__/test_snapshot_html.ambr b/test/__snapshots__/test_snapshot_html.ambr index 65bf5df3..83cee00c 100644 --- a/test/__snapshots__/test_snapshot_html.ambr +++ b/test/__snapshots__/test_snapshot_html.ambr @@ -461,6 +461,30 @@ .project-sessions details[open] summary { margin-bottom: 10px; } + + /* Archived project styling */ + .project-card.archived { + opacity: 0.6; + background-color: #f5f5f522; + } + + .project-card.archived:hover { + opacity: 0.8; + } + + .archived-badge { + display: inline-block; + background-color: #888; + color: white; + font-size: 0.65em; + font-weight: 600; + padding: 2px 8px; + border-radius: 4px; + margin-left: 10px; + vertical-align: middle; + text-transform: uppercase; + letter-spacing: 0.5px; + } /* Search Bar Styles */ .search-container { position: relative; @@ -1655,7 +1679,9 @@
Users/test/project/beta + (← open combined transcript) +
📁 3 transcript files
@@ -1672,7 +1698,9 @@
alpha + (← open combined transcript) +
📁 5 transcript files
diff --git a/test/test_cache_integration.py b/test/test_cache_integration.py index 2bce66df..6af5b721 100644 --- a/test/test_cache_integration.py +++ b/test/test_cache_integration.py @@ -440,3 +440,424 @@ def test_cache_version_upgrade_scenario(self, setup_test_project): with patch("claude_code_log.cache.get_library_version", return_value="2.0.0"): output = convert_jsonl_to_html(input_path=project_dir, use_cache=True) assert output.exists() + + +class TestArchivedSessionsIntegration: + """Test archived sessions functionality - sessions cached but JSONL deleted.""" + + def test_get_archived_sessions_after_file_deletion( + self, temp_projects_dir, sample_jsonl_data + ): + """Test that sessions become archived when JSONL files are deleted.""" + project_dir = temp_projects_dir / "archived-test" + project_dir.mkdir() + + # Create JSONL file with session data + jsonl_file = project_dir / "session-1.jsonl" + with open(jsonl_file, "w") as f: + for entry in sample_jsonl_data: + f.write(json.dumps(entry) + "\n") + + # Process to populate cache + convert_jsonl_to_html(input_path=project_dir, use_cache=True) + + # Verify session is in cache + cache_manager = CacheManager(project_dir, "1.0.0") + cached_data = cache_manager.get_cached_project_data() + assert cached_data is not None + assert "session-1" in cached_data.sessions + + # Delete the JSONL file + jsonl_file.unlink() + + # Now session-1 should be archived (no valid session IDs) + valid_session_ids: set[str] = set() # No JSONL files left + archived = cache_manager.get_archived_sessions(valid_session_ids) + + assert "session-1" in archived + assert archived["session-1"].message_count > 0 + assert archived["session-1"].first_timestamp == "2023-01-01T10:00:00Z" + + def test_get_archived_sessions_with_some_files_remaining( + self, temp_projects_dir, sample_jsonl_data + ): + """Test archived sessions when only some JSONL files are deleted.""" + project_dir = temp_projects_dir / "partial-archived" + project_dir.mkdir() + + # Create two session files + for session_id in ["session-1", "session-2"]: + jsonl_file = project_dir / f"{session_id}.jsonl" + with open(jsonl_file, "w") as f: + for entry in sample_jsonl_data: + entry_copy = entry.copy() + if "sessionId" in entry_copy: + entry_copy["sessionId"] = session_id + f.write(json.dumps(entry_copy) + "\n") + + # Process to populate cache + convert_jsonl_to_html(input_path=project_dir, use_cache=True) + + # Delete only session-1 + (project_dir / "session-1.jsonl").unlink() + + # session-2 should be valid, session-1 should be archived + valid_session_ids = {"session-2"} + cache_manager = CacheManager(project_dir, "1.0.0") + archived = cache_manager.get_archived_sessions(valid_session_ids) + + assert "session-1" in archived + assert "session-2" not in archived + + def test_export_session_to_jsonl(self, temp_projects_dir, sample_jsonl_data): + """Test exporting session messages for JSONL restoration.""" + project_dir = temp_projects_dir / "export-test" + project_dir.mkdir() + + # Create JSONL file + jsonl_file = project_dir / "session-1.jsonl" + with open(jsonl_file, "w") as f: + for entry in sample_jsonl_data: + f.write(json.dumps(entry) + "\n") + + # Process to populate cache + convert_jsonl_to_html(input_path=project_dir, use_cache=True) + + # Export messages from cache + cache_manager = CacheManager(project_dir, "1.0.0") + exported_messages = cache_manager.export_session_to_jsonl("session-1") + + # Should have exported messages (not summary which has no sessionId) + assert len(exported_messages) >= 2 # user + assistant messages + + # Each message should be valid JSON + for msg_json in exported_messages: + parsed = json.loads(msg_json) + assert "type" in parsed + assert parsed["sessionId"] == "session-1" + + def test_load_session_entries_for_rendering( + self, temp_projects_dir, sample_jsonl_data + ): + """Test loading session entries from cache for HTML/Markdown rendering.""" + project_dir = temp_projects_dir / "load-entries-test" + project_dir.mkdir() + + # Create JSONL file + jsonl_file = project_dir / "session-1.jsonl" + with open(jsonl_file, "w") as f: + for entry in sample_jsonl_data: + f.write(json.dumps(entry) + "\n") + + # Process to populate cache + convert_jsonl_to_html(input_path=project_dir, use_cache=True) + + # Load entries from cache + cache_manager = CacheManager(project_dir, "1.0.0") + entries = cache_manager.load_session_entries("session-1") + + # Should have TranscriptEntry objects + assert len(entries) >= 2 + + # Check that entries are proper types + entry_types = [e.type for e in entries] + assert "user" in entry_types + assert "assistant" in entry_types + + def test_full_archive_and_restore_workflow( + self, temp_projects_dir, sample_jsonl_data + ): + """Test the full workflow: cache -> delete -> archive -> restore.""" + project_dir = temp_projects_dir / "full-workflow" + project_dir.mkdir() + + # Step 1: Create JSONL file and cache it + original_file = project_dir / "session-1.jsonl" + with open(original_file, "w") as f: + for entry in sample_jsonl_data: + f.write(json.dumps(entry) + "\n") + + convert_jsonl_to_html(input_path=project_dir, use_cache=True) + + # Verify cache populated + cache_manager = CacheManager(project_dir, "1.0.0") + cached_data = cache_manager.get_cached_project_data() + assert cached_data is not None + original_message_count = cached_data.sessions["session-1"].message_count + + # Step 2: Delete the JSONL file + original_file.unlink() + assert not original_file.exists() + + # Step 3: Verify session is now archived + archived = cache_manager.get_archived_sessions(set()) + assert "session-1" in archived + + # Step 4: Restore the session from cache + exported_messages = cache_manager.export_session_to_jsonl("session-1") + restored_file = project_dir / "session-1.jsonl" + with open(restored_file, "w") as f: + for msg in exported_messages: + f.write(msg + "\n") + + # Step 5: Verify the restored file exists and session is no longer archived + assert restored_file.exists() + + valid_session_ids = {"session-1"} + archived_after_restore = cache_manager.get_archived_sessions(valid_session_ids) + assert "session-1" not in archived_after_restore + + # Step 6: Verify restored content is valid by re-processing + convert_jsonl_to_html(input_path=project_dir, use_cache=True) + cached_data = cache_manager.get_cached_project_data() + # Message count should be preserved + assert cached_data is not None + assert cached_data.sessions["session-1"].message_count == original_message_count + + def test_archived_session_count_in_converter( + self, temp_projects_dir, sample_jsonl_data, capsys + ): + """Test that archived session count is reported in converter output.""" + project_dir = temp_projects_dir / "count-test" + project_dir.mkdir() + + # Create two sessions so one remains after deletion + for session_id in ["session-1", "session-2"]: + jsonl_file = project_dir / f"{session_id}.jsonl" + with open(jsonl_file, "w") as f: + for entry in sample_jsonl_data: + entry_copy = entry.copy() + if "sessionId" in entry_copy: + entry_copy["sessionId"] = session_id + f.write(json.dumps(entry_copy) + "\n") + + # Process to cache (as part of all-projects hierarchy) + process_projects_hierarchy(projects_path=temp_projects_dir, use_cache=True) + + # Delete only session-1, keeping session-2 so project is still found + (project_dir / "session-1.jsonl").unlink() + + # Process again - should report archived sessions + process_projects_hierarchy( + projects_path=temp_projects_dir, use_cache=True, silent=False + ) + + captured = capsys.readouterr() + # Output should mention archived sessions + assert "archived" in captured.out.lower() + + def test_load_entries_preserves_message_order( + self, temp_projects_dir, sample_jsonl_data + ): + """Test that loaded entries preserve chronological order.""" + project_dir = temp_projects_dir / "order-test" + project_dir.mkdir() + + # Create JSONL file + jsonl_file = project_dir / "session-1.jsonl" + with open(jsonl_file, "w") as f: + for entry in sample_jsonl_data: + f.write(json.dumps(entry) + "\n") + + # Process to populate cache + convert_jsonl_to_html(input_path=project_dir, use_cache=True) + + # Load entries from cache + cache_manager = CacheManager(project_dir, "1.0.0") + entries = cache_manager.load_session_entries("session-1") + + # Filter to entries with timestamps and extract them + timestamps: list[str] = [] + for e in entries: + if hasattr(e, "timestamp") and e.timestamp: + timestamps.append(str(e.timestamp)) + + # Verify chronological order (ISO timestamps are lexicographically sortable) + assert timestamps == sorted(timestamps) + + def test_export_empty_session_returns_empty_list(self, temp_projects_dir): + """Test that exporting a non-existent session returns empty list.""" + project_dir = temp_projects_dir / "empty-export" + project_dir.mkdir() + + # Create a dummy JSONL to initialize the project + jsonl_file = project_dir / "dummy.jsonl" + jsonl_file.write_text("{}\n") + + cache_manager = CacheManager(project_dir, "1.0.0") + + # Export non-existent session + exported = cache_manager.export_session_to_jsonl("non-existent-session") + assert exported == [] + + # Load entries for non-existent session + entries = cache_manager.load_session_entries("non-existent-session") + assert entries == [] + + def test_export_session_produces_compact_json( + self, temp_projects_dir, sample_jsonl_data + ): + """Test that exported JSONL has compact JSON format (no spaces after separators).""" + project_dir = temp_projects_dir / "compact-json-test" + project_dir.mkdir() + + # Create JSONL file + jsonl_file = project_dir / "session-1.jsonl" + with open(jsonl_file, "w") as f: + for entry in sample_jsonl_data: + f.write(json.dumps(entry) + "\n") + + # Process to populate cache + convert_jsonl_to_html(input_path=project_dir, use_cache=True) + + # Export messages + cache_manager = CacheManager(project_dir, "1.0.0") + exported_messages = cache_manager.export_session_to_jsonl("session-1") + + # Each message should be compact JSON (no spaces after : or ,) + for msg_json in exported_messages: + # Should not have ": " (colon-space) pattern except in string values + # Check by ensuring re-serialization produces same result + parsed = json.loads(msg_json) + compact_reserialized = json.dumps(parsed, separators=(",", ":")) + assert msg_json == compact_reserialized, ( + f"JSON should be compact format.\n" + f"Got: {msg_json[:100]}...\n" + f"Expected: {compact_reserialized[:100]}..." + ) + + def test_delete_session_from_cache(self, temp_projects_dir, sample_jsonl_data): + """Test deleting a session from cache.""" + project_dir = temp_projects_dir / "delete-session-test" + project_dir.mkdir() + + # Create JSONL file + jsonl_file = project_dir / "session-1.jsonl" + with open(jsonl_file, "w") as f: + for entry in sample_jsonl_data: + f.write(json.dumps(entry) + "\n") + + # Process to populate cache + convert_jsonl_to_html(input_path=project_dir, use_cache=True) + + # Verify session exists in cache + cache_manager = CacheManager(project_dir, "1.0.0") + cached_data = cache_manager.get_cached_project_data() + assert cached_data is not None + assert "session-1" in cached_data.sessions + + # Delete the session + result = cache_manager.delete_session("session-1") + assert result is True + + # Verify session is gone from cache + cached_data = cache_manager.get_cached_project_data() + assert cached_data is not None + assert "session-1" not in cached_data.sessions + + # Export should return empty + exported = cache_manager.export_session_to_jsonl("session-1") + assert exported == [] + + def test_delete_nonexistent_session(self, temp_projects_dir): + """Test deleting a session that doesn't exist returns False.""" + project_dir = temp_projects_dir / "delete-nonexistent" + project_dir.mkdir() + + # Create a dummy JSONL to initialize the project + jsonl_file = project_dir / "dummy.jsonl" + jsonl_file.write_text("{}\n") + + cache_manager = CacheManager(project_dir, "1.0.0") + + # Delete non-existent session + result = cache_manager.delete_session("non-existent-session") + assert result is False + + def test_delete_project_from_cache(self, temp_projects_dir, sample_jsonl_data): + """Test deleting an entire project from cache.""" + project_dir = temp_projects_dir / "delete-project-test" + project_dir.mkdir() + + # Create JSONL file + jsonl_file = project_dir / "session-1.jsonl" + with open(jsonl_file, "w") as f: + for entry in sample_jsonl_data: + f.write(json.dumps(entry) + "\n") + + # Process to populate cache + convert_jsonl_to_html(input_path=project_dir, use_cache=True) + + # Verify project exists in cache + cache_manager = CacheManager(project_dir, "1.0.0") + cached_data = cache_manager.get_cached_project_data() + assert cached_data is not None + + # Delete the project + result = cache_manager.delete_project() + assert result is True + + # Cache manager should no longer have valid project ID + cached_data = cache_manager.get_cached_project_data() + assert cached_data is None + + +class TestGetAllCachedProjects: + """Tests for get_all_cached_projects() function.""" + + def test_get_all_cached_projects_finds_active_and_archived( + self, temp_projects_dir, sample_jsonl_data + ): + """Test finding both active and archived projects.""" + from claude_code_log.cache import get_all_cached_projects + + # Create two projects - one active, one that will be archived + active_dir = temp_projects_dir / "active-project" + active_dir.mkdir() + archived_dir = temp_projects_dir / "archived-project" + archived_dir.mkdir() + + # Create JSONL files in both + for proj_dir in [active_dir, archived_dir]: + jsonl_file = proj_dir / "session-1.jsonl" + with open(jsonl_file, "w") as f: + for entry in sample_jsonl_data: + f.write(json.dumps(entry) + "\n") + + # Process both projects to populate cache + convert_jsonl_to_html(input_path=active_dir, use_cache=True) + convert_jsonl_to_html(input_path=archived_dir, use_cache=True) + + # Delete JSONL from "archived" project to simulate archival + (archived_dir / "session-1.jsonl").unlink() + + # Get all cached projects + projects = get_all_cached_projects(temp_projects_dir) + + # Should find both projects + project_paths = {p[0] for p in projects} + assert str(active_dir) in project_paths + assert str(archived_dir) in project_paths + + # Check is_archived flag + for project_path, is_archived in projects: + if project_path == str(active_dir): + assert is_archived is False + elif project_path == str(archived_dir): + assert is_archived is True + + def test_get_all_cached_projects_empty_dir(self, temp_projects_dir): + """Test get_all_cached_projects with no cache.""" + from claude_code_log.cache import get_all_cached_projects + + # No cache.db exists + projects = get_all_cached_projects(temp_projects_dir) + assert projects == [] + + def test_get_all_cached_projects_nonexistent_dir(self, tmp_path): + """Test get_all_cached_projects with nonexistent directory.""" + from claude_code_log.cache import get_all_cached_projects + + nonexistent = tmp_path / "does-not-exist" + projects = get_all_cached_projects(nonexistent) + assert projects == [] diff --git a/test/test_tui.py b/test/test_tui.py index 30856317..9009a490 100644 --- a/test/test_tui.py +++ b/test/test_tui.py @@ -13,7 +13,7 @@ from textual.widgets import DataTable, Label from claude_code_log.cache import CacheManager, SessionCacheData -from claude_code_log.tui import SessionBrowser, run_session_browser +from claude_code_log.tui import ProjectSelector, SessionBrowser, run_session_browser @pytest.fixture @@ -87,11 +87,23 @@ def temp_project_dir(): }, ] - # Write test data to JSONL file - jsonl_file = project_path / "test-transcript.jsonl" - with open(jsonl_file, "w", encoding="utf-8") as f: + # Write test data to JSONL files - one per session (matching real-world usage) + # Session 123 entries + session_123_file = project_path / "session-123.jsonl" + with open(session_123_file, "w", encoding="utf-8") as f: for entry in test_data: - f.write(json.dumps(entry) + "\n") + if entry.get("sessionId") == "session-123": + f.write(json.dumps(entry) + "\n") + + # Session 456 entries (includes summary) + session_456_file = project_path / "session-456.jsonl" + with open(session_456_file, "w", encoding="utf-8") as f: + for entry in test_data: + if ( + entry.get("sessionId") == "session-456" + or entry.get("type") == "summary" + ): + f.write(json.dumps(entry) + "\n") yield project_path @@ -907,3 +919,1017 @@ async def test_empty_project_handling(self): stats = cast(Label, app.query_one("#stats")) stats_text = str(stats.content) assert "Sessions:[/bold] 0" in stats_text + + @pytest.mark.asyncio + async def test_archived_project_loads_archived_sessions(self): + """Test that an archived project (no JSONL files) loads sessions in archived_sessions.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) + + # Create empty JSONL file to initialize + jsonl_file = project_path / "session-123.jsonl" + jsonl_file.touch() + + # Create app with is_archived=True (simulating archived project) + app = SessionBrowser(project_path, is_archived=True) + + # Mock the cache manager to return some sessions + mock_session_data = { + "session-123": SessionCacheData( + session_id="session-123", + summary="Archived session", + first_timestamp="2025-01-01T10:00:00Z", + last_timestamp="2025-01-01T10:01:00Z", + message_count=5, + first_user_message="Hello from archived", + total_input_tokens=100, + total_output_tokens=200, + ), + } + + with ( + patch.object( + app.cache_manager, "get_cached_project_data" + ) as mock_cache, + ): + mock_cache.return_value = Mock( + sessions=mock_session_data, + working_directories=[str(project_path)], + ) + + async with app.run_test() as pilot: + await pilot.pause(0.2) + + # Manually call load_sessions (since mocking) + app.load_sessions() + + # Sessions should be in archived_sessions, not sessions + assert len(app.archived_sessions) > 0 + assert len(app.sessions) == 0 + + # Stats should show "archived" count + stats = cast(Label, app.query_one("#stats")) + stats_text = str(stats.content) + assert "archived" in stats_text.lower() + + +@pytest.mark.tui +class TestUnifiedSessionList: + """Tests for the unified session list showing both current and archived sessions.""" + + @pytest.mark.asyncio + async def test_unified_list_shows_both_current_and_archived(self): + """Test that both current and archived sessions appear in the same list.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) + jsonl_file = project_path / "session-current.jsonl" + jsonl_file.write_text('{"type":"user"}\n', encoding="utf-8") + + app = SessionBrowser(project_path) + + current_session = { + "session-current": SessionCacheData( + session_id="session-current", + first_timestamp="2025-01-02T10:00:00Z", + last_timestamp="2025-01-02T10:01:00Z", + message_count=1, + first_user_message="Current session", + total_input_tokens=10, + total_output_tokens=10, + ), + } + archived_session = { + "session-archived": SessionCacheData( + session_id="session-archived", + first_timestamp="2025-01-01T10:00:00Z", + last_timestamp="2025-01-01T10:01:00Z", + message_count=1, + first_user_message="Archived session", + total_input_tokens=10, + total_output_tokens=10, + ), + } + + async with app.run_test() as pilot: + await pilot.pause(0.2) + + app.sessions = current_session + app.archived_sessions = archived_session + app.populate_table() + + # Get the table + table = cast(DataTable, app.query_one("#sessions-table")) + + # Should have 2 rows (both sessions in one list) + assert table.row_count == 2 + + @pytest.mark.asyncio + async def test_unified_list_sorted_by_timestamp_newest_first(self): + """Test that sessions are sorted by timestamp with newest first.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) + jsonl_file = project_path / "session-old.jsonl" + jsonl_file.write_text('{"type":"user"}\n', encoding="utf-8") + + app = SessionBrowser(project_path) + + # Create sessions with different timestamps + old_session = { + "session-old": SessionCacheData( + session_id="session-old", + first_timestamp="2025-01-01T10:00:00Z", + last_timestamp="2025-01-01T10:01:00Z", + message_count=1, + first_user_message="Old session", + total_input_tokens=10, + total_output_tokens=10, + ), + } + new_archived_session = { + "session-new": SessionCacheData( + session_id="session-new", + first_timestamp="2025-01-03T10:00:00Z", + last_timestamp="2025-01-03T10:01:00Z", + message_count=1, + first_user_message="New archived session", + total_input_tokens=10, + total_output_tokens=10, + ), + } + + async with app.run_test() as pilot: + await pilot.pause(0.2) + + app.sessions = old_session + app.archived_sessions = new_archived_session + app.populate_table() + + table = cast(DataTable, app.query_one("#sessions-table")) + + # Get first row - should be the newest (archived) session + first_row = table.get_row_at(0) + # Session ID column shows first 8 chars + assert str(first_row[0]).startswith("session-") + # Title should have [ARCHIVED] prefix since newest is archived + assert "[ARCHIVED]" in str(first_row[1]) + + @pytest.mark.asyncio + async def test_archived_sessions_have_archived_indicator(self): + """Test that archived sessions display [ARCHIVED] indicator in title.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) + jsonl_file = project_path / "session-current.jsonl" + jsonl_file.write_text('{"type":"user"}\n', encoding="utf-8") + + app = SessionBrowser(project_path) + + current_session = { + "session-current": SessionCacheData( + session_id="session-current", + first_timestamp="2025-01-02T10:00:00Z", + last_timestamp="2025-01-02T10:01:00Z", + message_count=1, + first_user_message="Current session message", + total_input_tokens=10, + total_output_tokens=10, + ), + } + archived_session = { + "session-archived": SessionCacheData( + session_id="session-archived", + first_timestamp="2025-01-01T10:00:00Z", + last_timestamp="2025-01-01T10:01:00Z", + message_count=1, + first_user_message="Archived session message", + total_input_tokens=10, + total_output_tokens=10, + ), + } + + async with app.run_test() as pilot: + await pilot.pause(0.2) + + app.sessions = current_session + app.archived_sessions = archived_session + app.populate_table() + + table = cast(DataTable, app.query_one("#sessions-table")) + + # Check both rows + found_archived_indicator = False + found_current_without_indicator = False + + for row_idx in range(table.row_count): + row = table.get_row_at(row_idx) + title = str(row[1]) + if "[ARCHIVED]" in title: + found_archived_indicator = True + assert "Archived session message" in title + else: + found_current_without_indicator = True + assert "Current session message" in title + + assert found_archived_indicator, ( + "Archived session should have [ARCHIVED] indicator" + ) + assert found_current_without_indicator, ( + "Current session should not have [ARCHIVED] indicator" + ) + + @pytest.mark.asyncio + async def test_stats_show_combined_totals(self): + """Test that stats display combined totals from both current and archived sessions.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) + jsonl_file = project_path / "session-current.jsonl" + jsonl_file.write_text('{"type":"user"}\n', encoding="utf-8") + + app = SessionBrowser(project_path) + + current_session = { + "session-current": SessionCacheData( + session_id="session-current", + first_timestamp="2025-01-02T10:00:00Z", + last_timestamp="2025-01-02T10:01:00Z", + message_count=5, + first_user_message="Current", + total_input_tokens=100, + total_output_tokens=200, + ), + } + archived_session = { + "session-archived": SessionCacheData( + session_id="session-archived", + first_timestamp="2025-01-01T10:00:00Z", + last_timestamp="2025-01-01T10:01:00Z", + message_count=3, + first_user_message="Archived", + total_input_tokens=50, + total_output_tokens=100, + ), + } + + async with app.run_test() as pilot: + await pilot.pause(0.2) + + app.sessions = current_session + app.archived_sessions = archived_session + app.update_stats() + + stats = cast(Label, app.query_one("#stats")) + stats_text = str(stats.content) + + # Should show combined sessions count (2) + assert "Sessions:[/bold] 2" in stats_text + # Should show combined messages count (5 + 3 = 8) + assert "Messages:[/bold] 8" in stats_text + # Should show combined tokens (100+200+50+100 = 450) + assert "Tokens:[/bold] 450" in stats_text + # Should indicate archived count + assert "1 archived" in stats_text + + +@pytest.mark.tui +class TestArchiveConfirmScreen: + """Tests for archive confirmation via the archive action.""" + + @pytest.mark.asyncio + async def test_archive_confirm_y_key_deletes_file(self): + """Test confirming archive with 'y' key deletes the JSONL file.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) + jsonl_file = project_path / "session-123.jsonl" + jsonl_file.write_text('{"type":"user"}\n', encoding="utf-8") + + app = SessionBrowser(project_path) + + mock_session_data = { + "session-123": SessionCacheData( + session_id="session-123", + first_timestamp="2025-01-01T10:00:00Z", + last_timestamp="2025-01-01T10:01:00Z", + message_count=1, + first_user_message="Test", + total_input_tokens=10, + total_output_tokens=10, + ), + } + + async with app.run_test() as pilot: + await pilot.pause(0.2) + + app.sessions = mock_session_data + app.selected_session_id = "session-123" + + assert jsonl_file.exists() + + # Trigger archive (opens modal) + await pilot.press("a") + await pilot.pause(0.1) + + # Confirm with 'y' + await pilot.press("y") + await pilot.pause(0.1) + + assert not jsonl_file.exists() + + @pytest.mark.asyncio + async def test_archive_confirm_enter_key_deletes_file(self): + """Test confirming archive with Enter key deletes the JSONL file.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) + jsonl_file = project_path / "session-123.jsonl" + jsonl_file.write_text('{"type":"user"}\n', encoding="utf-8") + + app = SessionBrowser(project_path) + + mock_session_data = { + "session-123": SessionCacheData( + session_id="session-123", + first_timestamp="2025-01-01T10:00:00Z", + last_timestamp="2025-01-01T10:01:00Z", + message_count=1, + first_user_message="Test", + total_input_tokens=10, + total_output_tokens=10, + ), + } + + async with app.run_test() as pilot: + await pilot.pause(0.2) + + app.sessions = mock_session_data + app.selected_session_id = "session-123" + + assert jsonl_file.exists() + + # Trigger archive (opens modal) + await pilot.press("a") + await pilot.pause(0.1) + + # Confirm with Enter + await pilot.press("enter") + await pilot.pause(0.1) + + assert not jsonl_file.exists() + + @pytest.mark.asyncio + async def test_archive_cancel_n_key_keeps_file(self): + """Test cancelling archive with 'n' key keeps the JSONL file.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) + jsonl_file = project_path / "session-123.jsonl" + jsonl_file.write_text('{"type":"user"}\n', encoding="utf-8") + + app = SessionBrowser(project_path) + + mock_session_data = { + "session-123": SessionCacheData( + session_id="session-123", + first_timestamp="2025-01-01T10:00:00Z", + last_timestamp="2025-01-01T10:01:00Z", + message_count=1, + first_user_message="Test", + total_input_tokens=10, + total_output_tokens=10, + ), + } + + async with app.run_test() as pilot: + await pilot.pause(0.2) + + app.sessions = mock_session_data + app.selected_session_id = "session-123" + + # Trigger archive (opens modal) + await pilot.press("a") + await pilot.pause(0.1) + + # Cancel with 'n' + await pilot.press("n") + await pilot.pause(0.1) + + # File should still exist + assert jsonl_file.exists() + + @pytest.mark.asyncio + async def test_archive_cancel_escape_key_keeps_file(self): + """Test cancelling archive with Escape key keeps the JSONL file.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) + jsonl_file = project_path / "session-123.jsonl" + jsonl_file.write_text('{"type":"user"}\n', encoding="utf-8") + + app = SessionBrowser(project_path) + + mock_session_data = { + "session-123": SessionCacheData( + session_id="session-123", + first_timestamp="2025-01-01T10:00:00Z", + last_timestamp="2025-01-01T10:01:00Z", + message_count=1, + first_user_message="Test", + total_input_tokens=10, + total_output_tokens=10, + ), + } + + async with app.run_test() as pilot: + await pilot.pause(0.2) + + app.sessions = mock_session_data + app.selected_session_id = "session-123" + + # Trigger archive (opens modal) + await pilot.press("a") + await pilot.pause(0.1) + + # Cancel with Escape + await pilot.press("escape") + await pilot.pause(0.1) + + # File should still exist + assert jsonl_file.exists() + + +@pytest.mark.tui +class TestDeleteConfirmScreen: + """Tests for delete confirmation with smart options.""" + + @pytest.mark.asyncio + async def test_delete_current_session_cache_only_keeps_jsonl(self): + """Test delete with 'c' (cache only) keeps JSONL file.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) + jsonl_file = project_path / "session-123.jsonl" + jsonl_file.write_text('{"type":"user"}\n', encoding="utf-8") + + app = SessionBrowser(project_path) + + mock_session_data = { + "session-123": SessionCacheData( + session_id="session-123", + first_timestamp="2025-01-01T10:00:00Z", + last_timestamp="2025-01-01T10:01:00Z", + message_count=1, + first_user_message="Test", + total_input_tokens=10, + total_output_tokens=10, + ), + } + + with patch.object( + app.cache_manager, "delete_session", return_value=True + ) as mock_delete: + async with app.run_test() as pilot: + await pilot.pause(0.2) + + app.sessions = mock_session_data + app.selected_session_id = "session-123" + + # Trigger delete (opens modal) + await pilot.press("d") + await pilot.pause(0.1) + + # Choose cache only with 'c' + await pilot.press("c") + await pilot.pause(0.1) + + # JSONL should still exist + assert jsonl_file.exists() + mock_delete.assert_called_once_with("session-123") + + @pytest.mark.asyncio + async def test_delete_current_session_both_deletes_jsonl(self): + """Test delete with 'b' (both) deletes JSONL file.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) + jsonl_file = project_path / "session-123.jsonl" + jsonl_file.write_text('{"type":"user"}\n', encoding="utf-8") + + app = SessionBrowser(project_path) + + mock_session_data = { + "session-123": SessionCacheData( + session_id="session-123", + first_timestamp="2025-01-01T10:00:00Z", + last_timestamp="2025-01-01T10:01:00Z", + message_count=1, + first_user_message="Test", + total_input_tokens=10, + total_output_tokens=10, + ), + } + + with patch.object( + app.cache_manager, "delete_session", return_value=True + ) as mock_delete: + async with app.run_test() as pilot: + await pilot.pause(0.2) + + app.sessions = mock_session_data + app.selected_session_id = "session-123" + + assert jsonl_file.exists() + + # Trigger delete (opens modal) + await pilot.press("d") + await pilot.pause(0.1) + + # Choose both with 'b' + await pilot.press("b") + await pilot.pause(0.1) + + # JSONL should be deleted + assert not jsonl_file.exists() + mock_delete.assert_called_once_with("session-123") + + @pytest.mark.asyncio + async def test_delete_archived_session_with_enter_key(self): + """Test deleting archived session with Enter key.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) + jsonl_file = project_path / "session-123.jsonl" + jsonl_file.write_text('{"type":"user"}\n', encoding="utf-8") + + app = SessionBrowser(project_path) + + mock_archived_data = { + "session-archived": SessionCacheData( + session_id="session-archived", + first_timestamp="2025-01-01T10:00:00Z", + last_timestamp="2025-01-01T10:01:00Z", + message_count=1, + first_user_message="Test", + total_input_tokens=10, + total_output_tokens=10, + ), + } + + with patch.object( + app.cache_manager, "delete_session", return_value=True + ) as mock_delete: + async with app.run_test() as pilot: + await pilot.pause(0.2) + + app.sessions = {} + app.archived_sessions = mock_archived_data + app.selected_session_id = "session-archived" + + # Trigger delete (opens modal) + await pilot.press("d") + await pilot.pause(0.1) + + # Confirm with Enter (for archived sessions) + await pilot.press("enter") + await pilot.pause(0.1) + + mock_delete.assert_called_once_with("session-archived") + + @pytest.mark.asyncio + async def test_delete_cancel_n_key(self): + """Test cancelling delete with 'n' key.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) + jsonl_file = project_path / "session-123.jsonl" + jsonl_file.write_text('{"type":"user"}\n', encoding="utf-8") + + app = SessionBrowser(project_path) + + mock_session_data = { + "session-123": SessionCacheData( + session_id="session-123", + first_timestamp="2025-01-01T10:00:00Z", + last_timestamp="2025-01-01T10:01:00Z", + message_count=1, + first_user_message="Test", + total_input_tokens=10, + total_output_tokens=10, + ), + } + + with patch.object( + app.cache_manager, "delete_session", return_value=True + ) as mock_delete: + async with app.run_test() as pilot: + await pilot.pause(0.2) + + app.sessions = mock_session_data + app.selected_session_id = "session-123" + + # Trigger delete (opens modal) + await pilot.press("d") + await pilot.pause(0.1) + + # Cancel with 'n' + await pilot.press("n") + await pilot.pause(0.1) + + # Should not have deleted + mock_delete.assert_not_called() + assert jsonl_file.exists() + + +@pytest.mark.tui +class TestArchiveActionEdgeCases: + """Edge case tests for the archive session action.""" + + @pytest.mark.asyncio + async def test_archive_action_no_selection(self): + """Test archive action with no session selected shows warning.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) + jsonl_file = project_path / "session-123.jsonl" + jsonl_file.write_text('{"type":"user"}\n', encoding="utf-8") + + app = SessionBrowser(project_path) + + async with app.run_test() as pilot: + await pilot.pause(0.2) + + # Ensure no session is selected + app.selected_session_id = None + + # Try to archive - should notify warning + await pilot.press("a") + await pilot.pause(0.1) + + # No modal should be pushed (we can't easily check notifications) + # but at least verify no crash occurred + + @pytest.mark.asyncio + async def test_archive_action_on_archived_session_shows_warning(self): + """Test archive action on already archived session shows warning.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) + jsonl_file = project_path / "session-123.jsonl" + jsonl_file.write_text('{"type":"user"}\n', encoding="utf-8") + + app = SessionBrowser(project_path) + + mock_session_data = { + "session-archived": SessionCacheData( + session_id="session-archived", + first_timestamp="2025-01-01T10:00:00Z", + last_timestamp="2025-01-01T10:01:00Z", + message_count=1, + first_user_message="Test", + total_input_tokens=10, + total_output_tokens=10, + ), + } + + async with app.run_test() as pilot: + await pilot.pause(0.2) + + # Set up archived session + app.archived_sessions = mock_session_data + app.sessions = {} + app.selected_session_id = "session-archived" + + # Try to archive - should notify warning (already archived) + await pilot.press("a") + await pilot.pause(0.1) + + +@pytest.mark.tui +class TestDeleteActionEdgeCases: + """Edge case tests for the delete session action.""" + + @pytest.mark.asyncio + async def test_delete_action_no_selection(self): + """Test delete action with no session selected shows warning.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) + jsonl_file = project_path / "session-123.jsonl" + jsonl_file.write_text('{"type":"user"}\n', encoding="utf-8") + + app = SessionBrowser(project_path) + + async with app.run_test() as pilot: + await pilot.pause(0.2) + + # Ensure no session is selected + app.selected_session_id = None + + # Try to delete - should notify warning + await pilot.press("d") + await pilot.pause(0.1) + + +@pytest.mark.tui +class TestRestoreWithMkdir: + """Tests for restore action creating directory if needed.""" + + @pytest.mark.asyncio + async def test_restore_creates_directory_if_missing(self): + """Test that restore creates the project directory if it was deleted.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) / "deleted_project" + # Don't create the directory - it should be created on restore + + app = SessionBrowser(project_path, is_archived=True) + + mock_session_data = { + "session-123": SessionCacheData( + session_id="session-123", + first_timestamp="2025-01-01T10:00:00Z", + last_timestamp="2025-01-01T10:01:00Z", + message_count=1, + first_user_message="Test", + total_input_tokens=10, + total_output_tokens=10, + ), + } + + with ( + patch.object( + app.cache_manager, + "export_session_to_jsonl", + return_value=['{"type":"user"}'], + ), + patch.object( + app.cache_manager, "get_cached_project_data" + ) as mock_cache, + patch.object( + app.cache_manager, "get_archived_sessions", return_value={} + ), + ): + mock_cache.return_value = Mock( + sessions=mock_session_data, + working_directories=[str(project_path)], + ) + + async with app.run_test() as pilot: + await pilot.pause(0.2) + + # Set up archived session + app.archived_sessions = mock_session_data + app.selected_session_id = "session-123" + + # Directory should not exist + assert not project_path.exists() + + # Trigger restore + app.action_restore_jsonl() + await pilot.pause(0.1) + + # Directory should now exist + assert project_path.exists() + + # JSONL file should be created + assert (project_path / "session-123.jsonl").exists() + + +@pytest.mark.tui +class TestProjectSelector: + """Tests for the ProjectSelector TUI.""" + + @pytest.mark.asyncio + async def test_enter_key_selects_project(self): + """Test that Enter key selects the highlighted project.""" + with tempfile.TemporaryDirectory() as temp_dir: + project1 = Path(temp_dir) / "project1" + project1.mkdir() + (project1 / "session-1.jsonl").write_text('{"type":"user"}\n') + + project2 = Path(temp_dir) / "project2" + project2.mkdir() + (project2 / "session-2.jsonl").write_text('{"type":"user"}\n') + + app = ProjectSelector( + projects=[project1, project2], + matching_projects=[], + archived_projects=set(), + ) + + async with app.run_test() as pilot: + await pilot.pause(0.2) + + # Select first project and press Enter + await pilot.press("enter") + await pilot.pause(0.1) + + @pytest.mark.asyncio + async def test_escape_key_quits(self): + """Test that Escape key quits the application.""" + with tempfile.TemporaryDirectory() as temp_dir: + project1 = Path(temp_dir) / "project1" + project1.mkdir() + + app = ProjectSelector( + projects=[project1], + matching_projects=[], + archived_projects=set(), + ) + + async with app.run_test() as pilot: + await pilot.pause(0.2) + + # Press Escape to quit + await pilot.press("escape") + await pilot.pause(0.1) + + @pytest.mark.asyncio + async def test_archive_project_action(self): + """Test archiving a project deletes JSONL files.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) / "project1" + project_path.mkdir() + jsonl1 = project_path / "session-1.jsonl" + jsonl2 = project_path / "session-2.jsonl" + jsonl1.write_text('{"type":"user"}\n') + jsonl2.write_text('{"type":"user"}\n') + + app = ProjectSelector( + projects=[project_path], + matching_projects=[], + archived_projects=set(), + ) + + async with app.run_test() as pilot: + await pilot.pause(0.2) + + # Select the project + app.selected_project_path = project_path + + # Both JSONL files should exist + assert jsonl1.exists() + assert jsonl2.exists() + + # Press 'a' to archive and then confirm + await pilot.press("a") + await pilot.pause(0.1) + await pilot.press("y") + await pilot.pause(0.1) + + # JSONL files should be deleted + assert not jsonl1.exists() + assert not jsonl2.exists() + + # Project should now be in archived set + assert project_path in app.archived_projects + + @pytest.mark.asyncio + async def test_archive_project_already_archived_shows_warning(self): + """Test archiving an already archived project shows warning.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) / "project1" + project_path.mkdir() + + app = ProjectSelector( + projects=[project_path], + matching_projects=[], + archived_projects={project_path}, # Already archived + ) + + async with app.run_test() as pilot: + await pilot.pause(0.2) + + # Select the archived project + app.selected_project_path = project_path + + # Try to archive - should show warning + await pilot.press("a") + await pilot.pause(0.1) + + @pytest.mark.asyncio + async def test_delete_project_cache_only(self): + """Test deleting project cache only keeps JSONL files.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) / "project1" + project_path.mkdir() + jsonl = project_path / "session-1.jsonl" + jsonl.write_text('{"type":"user"}\n') + + app = ProjectSelector( + projects=[project_path], + matching_projects=[], + archived_projects=set(), + ) + + with patch.object(CacheManager, "clear_cache"): + async with app.run_test() as pilot: + await pilot.pause(0.2) + + # Select the project + app.selected_project_path = project_path + + # Press 'd' to delete and choose cache only + await pilot.press("d") + await pilot.pause(0.1) + await pilot.press("c") # Cache only + await pilot.pause(0.1) + + # JSONL file should still exist + assert jsonl.exists() + + @pytest.mark.asyncio + async def test_delete_project_both(self): + """Test deleting project cache and JSONL files.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) / "project1" + project_path.mkdir() + jsonl = project_path / "session-1.jsonl" + jsonl.write_text('{"type":"user"}\n') + + app = ProjectSelector( + projects=[project_path], + matching_projects=[], + archived_projects=set(), + ) + + with patch.object(CacheManager, "clear_cache"): + async with app.run_test() as pilot: + await pilot.pause(0.2) + + # Select the project + app.selected_project_path = project_path + + assert jsonl.exists() + + # Press 'd' to delete and choose both + await pilot.press("d") + await pilot.pause(0.1) + await pilot.press("b") # Both + await pilot.pause(0.1) + + # JSONL file should be deleted + assert not jsonl.exists() + + @pytest.mark.asyncio + async def test_restore_project_creates_directory(self): + """Test restoring a project creates directory if missing.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) / "deleted_project" + # Don't create the directory + + mock_session_data = { + "session-123": SessionCacheData( + session_id="session-123", + first_timestamp="2025-01-01T10:00:00Z", + last_timestamp="2025-01-01T10:01:00Z", + message_count=1, + first_user_message="Test", + total_input_tokens=10, + total_output_tokens=10, + ), + } + + app = ProjectSelector( + projects=[project_path], + matching_projects=[], + archived_projects={project_path}, # Archived project + ) + + with ( + patch.object(CacheManager, "get_cached_project_data") as mock_cache, + patch.object( + CacheManager, + "export_session_to_jsonl", + return_value=['{"type":"user"}'], + ), + ): + mock_cache.return_value = Mock(sessions=mock_session_data) + + async with app.run_test() as pilot: + await pilot.pause(0.2) + + # Select the project + app.selected_project_path = project_path + + # Directory should not exist + assert not project_path.exists() + + # Press 'r' to restore and confirm + await pilot.press("r") + await pilot.pause(0.1) + await pilot.press("y") + await pilot.pause(0.1) + + # Directory should now exist + assert project_path.exists() + + @pytest.mark.asyncio + async def test_restore_project_not_archived_shows_warning(self): + """Test restoring a non-archived project shows warning.""" + with tempfile.TemporaryDirectory() as temp_dir: + project_path = Path(temp_dir) / "project1" + project_path.mkdir() + (project_path / "session-1.jsonl").write_text('{"type":"user"}\n') + + app = ProjectSelector( + projects=[project_path], + matching_projects=[], + archived_projects=set(), # Not archived + ) + + async with app.run_test() as pilot: + await pilot.pause(0.2) + + # Select the non-archived project + app.selected_project_path = project_path + + # Try to restore - should show warning + await pilot.press("r") + await pilot.pause(0.1)