diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6a5c88896..5f4615bb1 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -347,7 +347,7 @@ jobs: files: | gh-release-assets/* - # Extract version from tag (e.g., v0.0.85 -> 0.0.89) + # Extract version from tag (e.g., v0.0.89 -> 0.0.89) - name: Extract version if: startsWith(github.ref, 'refs/tags/') id: version diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 36c729b0a..919b12415 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -12,6 +12,61 @@ permissions: contents: read jobs: + web-local-smoke: + name: Run Web + Local Brain Smoke + runs-on: ubuntu-latest + timeout-minutes: 25 + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: 20 + + - name: Install frontend dependencies + run: npm install --ignore-scripts + + - name: Install uv + uses: astral-sh/setup-uv@v5 + with: + enable-cache: true + + - name: Set up Python + run: uv python install 3.11 + + - name: Install backend dependencies + run: | + cd backend + uv sync + + - name: Run web + local brain smoke + run: bash scripts/smoke-web-local-brain.sh + + frontend-quality: + name: Run Frontend Guardrails + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: 20 + + - name: Install frontend dependencies + run: npm install --ignore-scripts + + - name: Run type check + run: npm run type-check + + - name: Check Electron Access Guard + run: bash scripts/check-electron-access.sh + pytest: name: Run Python Tests runs-on: ubuntu-latest diff --git a/backend/README.md b/backend/README.md index 237ae8dde..1a6127113 100644 --- a/backend/README.md +++ b/backend/README.md @@ -1,5 +1,40 @@ ```bash +# Option 1: Start with uvicorn directly uv run uvicorn main:api --port 5001 + +# Option 2: Standalone mode (no Electron dependency) +uv run python main.py + +# Option 3: If uv run hangs, delete lock files and retry, or use venv directly: +.venv/bin/python main.py +# or +.venv/bin/uvicorn main:api --port 5001 --host 0.0.0.0 + +# If uv hangs, delete lock files first: rm -f uv_installing.lock uv_installed.lock +``` + +### Environment Variables (Standalone) + +| Variable | Default | Description | +| ---------------------------------- | ------------------- | ----------------------------------------------------------------------------------------- | +| `EIGENT_BRAIN_PORT` | 5001 | Listening port | +| `EIGENT_BRAIN_HOST` | 0.0.0.0 | Listening address | +| `EIGENT_DEBUG` | - | Set to 1/true to enable reload | +| `EIGENT_WORKSPACE` | ~/.eigent/workspace | Working directory | +| `EIGENT_DEPLOYMENT_TYPE` | (auto) | `local` / `cloud_vm` / `sandbox` / `docker`; determines Hands capabilities (see ADR-0006) | +| `EIGENT_HANDS_MODE` | - | Set to `remote` to enable `RemoteHands` (remote cluster resource mode) | +| `EIGENT_HANDS_CLUSTER_CONFIG_FILE` | - | Path to `RemoteHands` config file (TOML); **recommended** | +| `EIGENT_HANDS_TERMINAL` | - | Override terminal hand: `1`/`true`/`yes` or `0`/`false`/`no` | +| `EIGENT_HANDS_BROWSER` | - | Override browser hand | +| `EIGENT_HANDS_FILESYSTEM` | - | Override filesystem scope: `full` / `workspace_only` | +| `EIGENT_HANDS_MCP` | - | Override MCP mode: `all` / `allowlist` | + +RemoteHands config file example: + +```bash +cp backend/config/hands_clusters.example.toml ~/.eigent/hands_clusters.toml +export EIGENT_HANDS_MODE=remote +export EIGENT_HANDS_CLUSTER_CONFIG_FILE=~/.eigent/hands_clusters.toml ``` i18n operation process: https://github.com/Anbarryprojects/fastapi-babel diff --git a/backend/app/__init__.py b/backend/app/__init__.py index 06f0364bf..0265148c8 100644 --- a/backend/app/__init__.py +++ b/backend/app/__init__.py @@ -12,17 +12,68 @@ # limitations under the License. # ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import os + from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware +from starlette.middleware.base import BaseHTTPMiddleware # Initialize FastAPI with title api = FastAPI(title="Eigent Multi-Agent System API") -# Add CORS middleware + +@api.get("/") +def root(): + """Root endpoint - confirms this is the Brain backend.""" + return {"service": "eigent-brain", "docs": "/docs", "health": "/health"} + + +_cors_raw = os.environ.get("EIGENT_CORS_ORIGINS", "") +_allowed_origins = [o.strip() for o in _cors_raw.split(",") if o.strip()] +_default_frame_ancestors = [ + "'self'", + "http://localhost:*", + "http://127.0.0.1:*", + "https://localhost:*", + "https://127.0.0.1:*", +] +_frame_ancestors = " ".join( + dict.fromkeys( + [ + *_default_frame_ancestors, + *[origin for origin in _allowed_origins if origin != "*"], + ] + ) +) + + +class SecurityHeadersMiddleware(BaseHTTPMiddleware): + async def dispatch(self, request, call_next): + response = await call_next(request) + response.headers["X-Content-Type-Options"] = "nosniff" + response.headers["Referrer-Policy"] = "strict-origin-when-cross-origin" + if request.url.path.startswith("/files/preview/"): + if "X-Frame-Options" in response.headers: + del response.headers["X-Frame-Options"] + response.headers["Content-Security-Policy"] = ( + f"frame-ancestors {_frame_ancestors};" + ) + else: + response.headers["X-Frame-Options"] = "DENY" + return response + + api.add_middleware( CORSMiddleware, - allow_origins=["*"], - allow_credentials=True, + allow_origins=_allowed_origins or ["*"], + allow_credentials=bool(_allowed_origins), allow_methods=["*"], allow_headers=["*"], + expose_headers=["X-Session-ID"], ) +api.add_middleware(SecurityHeadersMiddleware) + +# Phase 2: Channel/Session header parsing (X-Channel, X-Session-ID, X-User-ID) +from app.router_layer import ChannelSessionMiddleware + +api.add_middleware(ChannelSessionMiddleware) diff --git a/backend/app/agent/factory/browser.py b/backend/app/agent/factory/browser.py index cf9a8724f..6f4bb401e 100644 --- a/backend/app/agent/factory/browser.py +++ b/backend/app/agent/factory/browser.py @@ -15,6 +15,7 @@ import platform import threading import uuid +from urllib.parse import urlparse from camel.messages import BaseMessage from camel.toolkits import ToolkitMessageIntegration @@ -33,6 +34,7 @@ from app.agent.toolkit.terminal_toolkit import TerminalToolkit from app.agent.utils import NOW_STR from app.component.environment import env +from app.hands.interface import IHands from app.model.chat import Chat from app.service.task import Agents from app.utils.file_utils import get_working_directory @@ -148,7 +150,10 @@ def get_occupied_ports(self) -> list[int]: _cdp_pool_manager = CdpBrowserPoolManager() -def browser_agent(options: Chat): +def browser_agent( + options: Chat, + hands: IHands | None = None, +): working_directory = get_working_directory(options) logger.info( f"Creating browser agent for project: {options.project_id} " @@ -160,12 +165,17 @@ def browser_agent(options: Chat): ).send_message_to_user ) - # Acquire CDP browser from pool or use default port + use_browser = hands is None or hands.can_use_browser() + use_terminal = hands is None or hands.can_execute_terminal() + + # Acquire CDP browser from pool or use default port (only when browser enabled) toolkit_session_id = str(uuid.uuid4())[:8] selected_port = None selected_is_external = False + cdp_url = None + cdp_owned_by_hands = False - if options.cdp_browsers: + if use_browser and options.cdp_browsers: selected_browser = _cdp_pool_manager.acquire_browser( options.cdp_browsers, toolkit_session_id, options.task_id ) @@ -186,54 +196,83 @@ def browser_agent(options: Chat): f"No available browsers in pool (initial), using first: " f"port={selected_port}, session_id={toolkit_session_id}" ) - else: + cdp_url = f"http://localhost:{selected_port}" + elif use_browser: + existing_cdp_url = env("EIGENT_CDP_URL", "").strip() selected_port = env("browser_port", "9222") + cdp_url = f"http://localhost:{selected_port}" + + if existing_cdp_url: + cdp_url = existing_cdp_url + try: + parsed = urlparse(existing_cdp_url) + if parsed.port is not None: + selected_port = parsed.port + except Exception: + selected_port = env("browser_port", "9222") + elif hands is not None: + try: + cdp_url = hands.acquire_resource( + "browser", toolkit_session_id, port=selected_port + ) + cdp_owned_by_hands = True + except (NotImplementedError, ValueError): + cdp_url = f"http://localhost:{selected_port}" + + # Web mode (no Electron): cdp_keep_current_page=False so toolkit can create + # pages when browser has 0 tabs. Electron mode: True to reuse user's page. + cdp_keep_current = bool(options.cdp_browsers) if use_browser else False + default_start_url = None if cdp_keep_current else "about:blank" + + web_toolkit_custom = None + web_toolkit_for_agent_registration = None + if use_browser: + web_toolkit_custom = HybridBrowserToolkit( + options.project_id, + cdp_keep_current_page=cdp_keep_current, + default_start_url=default_start_url, + headless=False, + browser_log_to_file=True, + stealth=True, + session_id=toolkit_session_id, + cdp_url=cdp_url, + enabled_tools=[ + "browser_click", + "browser_type", + "browser_back", + "browser_forward", + "browser_select", + "browser_console_exec", + "browser_console_view", + "browser_switch_tab", + "browser_enter", + "browser_visit_page", + "browser_scroll", + "browser_sheet_read", + "browser_sheet_input", + "browser_get_page_snapshot", + "browser_open", + "browser_upload_file", + "browser_download_file", + ], + ) + web_toolkit_for_agent_registration = web_toolkit_custom + web_toolkit_custom = message_integration.register_toolkits( + web_toolkit_custom + ) - web_toolkit_custom = HybridBrowserToolkit( - options.project_id, - cdp_keep_current_page=True, - headless=False, - browser_log_to_file=True, - stealth=True, - session_id=toolkit_session_id, - cdp_url=f"http://localhost:{selected_port}", - enabled_tools=[ - "browser_click", - "browser_type", - "browser_back", - "browser_forward", - "browser_select", - "browser_console_exec", - "browser_console_view", - "browser_switch_tab", - "browser_enter", - "browser_visit_page", - "browser_scroll", - "browser_sheet_read", - "browser_sheet_input", - "browser_get_page_snapshot", - "browser_open", - "browser_upload_file", - "browser_download_file", - ], - ) - - # Save reference before registering for toolkits_to_register_agent - web_toolkit_for_agent_registration = web_toolkit_custom - web_toolkit_custom = message_integration.register_toolkits( - web_toolkit_custom - ) - - terminal_toolkit = TerminalToolkit( - options.project_id, - Agents.browser_agent, - working_directory=working_directory, - safe_mode=True, - clone_current_env=True, - ) - terminal_toolkit = message_integration.register_functions( - [terminal_toolkit.shell_exec] - ) + terminal_toolkit = None + if use_terminal: + terminal_toolkit = TerminalToolkit( + options.project_id, + Agents.browser_agent, + working_directory=working_directory, + safe_mode=True, + clone_current_env=True, + ) + terminal_toolkit = message_integration.register_functions( + [terminal_toolkit.shell_exec] + ) note_toolkit = NoteTakingToolkit( options.project_id, @@ -272,13 +311,33 @@ def browser_agent(options: Chat): *HumanToolkit.get_can_use_tools( options.project_id, Agents.browser_agent ), - *web_toolkit_custom.get_tools(), - *terminal_toolkit, *note_toolkit.get_tools(), *screenshot_toolkit.get_tools(), *search_tools, *skill_toolkit.get_tools(), ] + tool_names = [ + SearchToolkit.toolkit_name(), + HumanToolkit.toolkit_name(), + NoteTakingToolkit.toolkit_name(), + ScreenshotToolkit.toolkit_name(), + SkillToolkit.toolkit_name(), + ] + if use_browser and web_toolkit_custom: + tools = [ + *HumanToolkit.get_can_use_tools( + options.project_id, Agents.browser_agent + ), + *web_toolkit_custom.get_tools(), + *note_toolkit.get_tools(), + *screenshot_toolkit.get_tools(), + *search_tools, + *skill_toolkit.get_tools(), + ] + tool_names.insert(1, HybridBrowserToolkit.toolkit_name()) + if use_terminal and terminal_toolkit: + tools.extend(terminal_toolkit) + tool_names.append(TerminalToolkit.toolkit_name()) # Build external browser notice external_browser_notice = "" @@ -310,18 +369,14 @@ def browser_agent(options: Chat): options, tools, prune_tool_calls_from_memory=True, - tool_names=[ - SearchToolkit.toolkit_name(), - HybridBrowserToolkit.toolkit_name(), - HumanToolkit.toolkit_name(), - NoteTakingToolkit.toolkit_name(), - TerminalToolkit.toolkit_name(), - ScreenshotToolkit.toolkit_name(), - SkillToolkit.toolkit_name(), - ], + tool_names=tool_names, toolkits_to_register_agent=[ - web_toolkit_for_agent_registration, - screenshot_toolkit_for_agent_registration, + t + for t in ( + web_toolkit_for_agent_registration, + screenshot_toolkit_for_agent_registration, + ) + if t is not None ], enable_snapshot_clean=True, ) @@ -351,19 +406,42 @@ def release_cdp_from_agent(agent_instance): """Release CDP browser back to pool.""" port = getattr(agent_instance, "_cdp_port", None) session_id = getattr(agent_instance, "_cdp_session_id", None) - if port is not None and session_id is not None: + if ( + port is not None + and session_id is not None + and options.cdp_browsers + ): _cdp_pool_manager.release_browser(port, session_id) logger.info( f"Released CDP for agent {agent_instance.agent_id}: " f"port={port}, session={session_id}" ) + elif ( + session_id is not None + and hands is not None + and getattr(agent_instance, "_cdp_owned_by_hands", False) + ): + try: + hands.release_resource("browser", session_id) + except Exception as exc: + logger.warning( + "Failed to release browser resource for session %s: %s", + session_id, + exc, + ) - agent._cdp_acquire_callback = acquire_cdp_for_agent - agent._cdp_release_callback = release_cdp_from_agent + agent._cdp_acquire_callback = ( + acquire_cdp_for_agent if use_browser else None + ) + agent._cdp_release_callback = ( + release_cdp_from_agent if use_browser else None + ) agent._cdp_port = selected_port + agent._cdp_url = cdp_url agent._cdp_session_id = toolkit_session_id agent._cdp_task_id = options.task_id agent._cdp_options = options agent._browser_toolkit = web_toolkit_for_agent_registration + agent._cdp_owned_by_hands = cdp_owned_by_hands return agent diff --git a/backend/app/agent/factory/developer.py b/backend/app/agent/factory/developer.py index c52ce3e14..811c26622 100644 --- a/backend/app/agent/factory/developer.py +++ b/backend/app/agent/factory/developer.py @@ -30,12 +30,16 @@ from app.agent.toolkit.terminal_toolkit import TerminalToolkit from app.agent.toolkit.web_deploy_toolkit import WebDeployToolkit from app.agent.utils import NOW_STR +from app.hands.interface import IHands from app.model.chat import Chat from app.service.task import Agents from app.utils.file_utils import get_working_directory -async def developer_agent(options: Chat): +async def developer_agent( + options: Chat, + hands: IHands | None = None, +): working_directory = get_working_directory(options) logger.info( f"Creating developer agent for project: {options.project_id} " @@ -66,16 +70,6 @@ async def developer_agent(options: Chat): screenshot_toolkit = message_integration.register_toolkits( screenshot_toolkit ) - - terminal_toolkit = TerminalToolkit( - options.project_id, - Agents.developer_agent, - working_directory=working_directory, - safe_mode=True, - clone_current_env=True, - ) - terminal_toolkit = message_integration.register_toolkits(terminal_toolkit) - skill_toolkit = SkillToolkit( options.project_id, Agents.developer_agent, @@ -98,11 +92,33 @@ async def developer_agent(options: Chat): ), *note_toolkit.get_tools(), *web_deploy_toolkit.get_tools(), - *terminal_toolkit.get_tools(), *screenshot_toolkit.get_tools(), *skill_toolkit.get_tools(), *search_tools, ] + tool_names = [ + HumanToolkit.toolkit_name(), + NoteTakingToolkit.toolkit_name(), + WebDeployToolkit.toolkit_name(), + ScreenshotToolkit.toolkit_name(), + SkillToolkit.toolkit_name(), + ] + if search_tools: + tool_names.append(SearchToolkit.toolkit_name()) + if hands is None or hands.can_execute_terminal(): + terminal_toolkit = TerminalToolkit( + options.project_id, + Agents.developer_agent, + working_directory=working_directory, + safe_mode=True, + clone_current_env=True, + ) + terminal_toolkit = message_integration.register_toolkits( + terminal_toolkit + ) + tools.extend(terminal_toolkit.get_tools()) + tool_names.append(TerminalToolkit.toolkit_name()) + system_message = DEVELOPER_SYS_PROMPT.format( platform_system=platform.system(), platform_machine=platform.machine(), @@ -118,15 +134,7 @@ async def developer_agent(options: Chat): ), options, tools, - tool_names=[ - HumanToolkit.toolkit_name(), - TerminalToolkit.toolkit_name(), - NoteTakingToolkit.toolkit_name(), - WebDeployToolkit.toolkit_name(), - ScreenshotToolkit.toolkit_name(), - SkillToolkit.toolkit_name(), - SearchToolkit.toolkit_name(), - ], + tool_names=tool_names, toolkits_to_register_agent=[ screenshot_toolkit_for_agent_registration, ], diff --git a/backend/app/agent/factory/document.py b/backend/app/agent/factory/document.py index edabefc70..0e1827808 100644 --- a/backend/app/agent/factory/document.py +++ b/backend/app/agent/factory/document.py @@ -33,12 +33,16 @@ from app.agent.toolkit.skill_toolkit import SkillToolkit from app.agent.toolkit.terminal_toolkit import TerminalToolkit from app.agent.utils import NOW_STR +from app.hands.interface import IHands from app.model.chat import Chat from app.service.task import Agents from app.utils.file_utils import get_working_directory -async def document_agent(options: Chat): +async def document_agent( + options: Chat, + hands: IHands | None = None, +): working_directory = get_working_directory(options) logger.info( f"Creating document agent for project: {options.project_id} " @@ -82,14 +86,39 @@ async def document_agent(options: Chat): screenshot_toolkit ) - terminal_toolkit = TerminalToolkit( - options.project_id, - Agents.document_agent, - working_directory=working_directory, - safe_mode=True, - clone_current_env=True, - ) - terminal_toolkit = message_integration.register_toolkits(terminal_toolkit) + tools = [ + *file_write_toolkit.get_tools(), + *pptx_toolkit.get_tools(), + *HumanToolkit.get_can_use_tools( + options.project_id, Agents.document_agent + ), + *mark_it_down_toolkit.get_tools(), + *excel_toolkit.get_tools(), + *note_toolkit.get_tools(), + *screenshot_toolkit.get_tools(), + ] + tool_names = [ + FileToolkit.toolkit_name(), + PPTXToolkit.toolkit_name(), + HumanToolkit.toolkit_name(), + MarkItDownToolkit.toolkit_name(), + ExcelToolkit.toolkit_name(), + NoteTakingToolkit.toolkit_name(), + ScreenshotToolkit.toolkit_name(), + ] + if hands is None or hands.can_execute_terminal(): + terminal_toolkit = TerminalToolkit( + options.project_id, + Agents.document_agent, + working_directory=working_directory, + safe_mode=True, + clone_current_env=True, + ) + terminal_toolkit = message_integration.register_toolkits( + terminal_toolkit + ) + tools.extend(terminal_toolkit.get_tools()) + tool_names.append(TerminalToolkit.toolkit_name()) google_drive_tools = await GoogleDriveMCPToolkit.get_can_use_tools( options.project_id, options.get_bun_env() @@ -102,30 +131,20 @@ async def document_agent(options: Chat): user_id=options.skill_config_user_id(), ) skill_toolkit = message_integration.register_toolkits(skill_toolkit) - + tools.extend(google_drive_tools) + if google_drive_tools: + tool_names.append(GoogleDriveMCPToolkit.toolkit_name()) + tools.extend(skill_toolkit.get_tools()) + tool_names.append(SkillToolkit.toolkit_name()) search_tools = SearchToolkit.get_can_use_tools( options.project_id, agent_name=Agents.document_agent ) if search_tools: search_tools = message_integration.register_functions(search_tools) + tools.extend(search_tools) + tool_names.append(SearchToolkit.toolkit_name()) else: search_tools = [] - - tools = [ - *file_write_toolkit.get_tools(), - *pptx_toolkit.get_tools(), - *HumanToolkit.get_can_use_tools( - options.project_id, Agents.document_agent - ), - *mark_it_down_toolkit.get_tools(), - *excel_toolkit.get_tools(), - *note_toolkit.get_tools(), - *terminal_toolkit.get_tools(), - *screenshot_toolkit.get_tools(), - *google_drive_tools, - *skill_toolkit.get_tools(), - *search_tools, - ] system_message = DOCUMENT_SYS_PROMPT.format( platform_system=platform.system(), platform_machine=platform.machine(), @@ -141,19 +160,7 @@ async def document_agent(options: Chat): ), options, tools, - tool_names=[ - FileToolkit.toolkit_name(), - PPTXToolkit.toolkit_name(), - HumanToolkit.toolkit_name(), - MarkItDownToolkit.toolkit_name(), - ExcelToolkit.toolkit_name(), - NoteTakingToolkit.toolkit_name(), - TerminalToolkit.toolkit_name(), - ScreenshotToolkit.toolkit_name(), - GoogleDriveMCPToolkit.toolkit_name(), - SkillToolkit.toolkit_name(), - SearchToolkit.toolkit_name(), - ], + tool_names=tool_names, toolkits_to_register_agent=[ screenshot_toolkit_for_agent_registration, ], diff --git a/backend/app/agent/factory/multi_modal.py b/backend/app/agent/factory/multi_modal.py index f01756f57..00329c24d 100644 --- a/backend/app/agent/factory/multi_modal.py +++ b/backend/app/agent/factory/multi_modal.py @@ -33,12 +33,16 @@ from app.agent.toolkit.terminal_toolkit import TerminalToolkit from app.agent.toolkit.video_download_toolkit import VideoDownloaderToolkit from app.agent.utils import NOW_STR +from app.hands.interface import IHands from app.model.chat import Chat from app.service.task import Agents from app.utils.file_utils import get_working_directory -def multi_modal_agent(options: Chat): +def multi_modal_agent( + options: Chat, + hands: IHands | None = None, +): working_directory = get_working_directory(options) logger.info( f"Creating multi-modal agent for project: {options.project_id} " @@ -67,15 +71,6 @@ def multi_modal_agent(options: Chat): screenshot_toolkit ) - terminal_toolkit = TerminalToolkit( - options.project_id, - agent_name=Agents.multi_modal_agent, - working_directory=working_directory, - safe_mode=True, - clone_current_env=True, - ) - terminal_toolkit = message_integration.register_toolkits(terminal_toolkit) - note_toolkit = NoteTakingToolkit( options.project_id, Agents.multi_modal_agent, @@ -105,13 +100,33 @@ def multi_modal_agent(options: Chat): *HumanToolkit.get_can_use_tools( options.project_id, Agents.multi_modal_agent ), - *terminal_toolkit.get_tools(), *note_toolkit.get_tools(), *skill_toolkit.get_tools(), *search_tools, ] + tool_names = [ + VideoDownloaderToolkit.toolkit_name(), + ScreenshotToolkit.toolkit_name(), + HumanToolkit.toolkit_name(), + NoteTakingToolkit.toolkit_name(), + SkillToolkit.toolkit_name(), + ] + if search_tools: + tool_names.append(SearchToolkit.toolkit_name()) + if hands is None or hands.can_execute_terminal(): + terminal_toolkit = TerminalToolkit( + options.project_id, + agent_name=Agents.multi_modal_agent, + working_directory=working_directory, + safe_mode=True, + clone_current_env=True, + ) + terminal_toolkit = message_integration.register_toolkits( + terminal_toolkit + ) + tools.extend(terminal_toolkit.get_tools()) + tool_names.append(TerminalToolkit.toolkit_name()) if options.is_cloud(): - # TODO: check llm has this model open_ai_image_toolkit = OpenAIImageToolkit( options.project_id, model="dall-e-3", @@ -125,10 +140,8 @@ def multi_modal_agent(options: Chat): open_ai_image_toolkit = message_integration.register_toolkits( open_ai_image_toolkit ) - tools = [ - *tools, - *open_ai_image_toolkit.get_tools(), - ] + tools.extend(open_ai_image_toolkit.get_tools()) + tool_names.append(OpenAIImageToolkit.toolkit_name()) # Convert string model_platform to enum for comparison try: model_platform_enum = ModelPlatformType(options.model_platform.lower()) @@ -148,6 +161,7 @@ def multi_modal_agent(options: Chat): audio_analysis_toolkit ) tools.extend(audio_analysis_toolkit.get_tools()) + tool_names.append(AudioAnalysisToolkit.toolkit_name()) system_message = MULTI_MODAL_SYS_PROMPT.format( platform_system=platform.system(), @@ -164,17 +178,7 @@ def multi_modal_agent(options: Chat): ), options, tools, - tool_names=[ - VideoDownloaderToolkit.toolkit_name(), - AudioAnalysisToolkit.toolkit_name(), - ScreenshotToolkit.toolkit_name(), - OpenAIImageToolkit.toolkit_name(), - HumanToolkit.toolkit_name(), - TerminalToolkit.toolkit_name(), - NoteTakingToolkit.toolkit_name(), - SearchToolkit.toolkit_name(), - SkillToolkit.toolkit_name(), - ], + tool_names=tool_names, toolkits_to_register_agent=[ screenshot_toolkit_for_agent_registration, ], diff --git a/backend/app/agent/listen_chat_agent.py b/backend/app/agent/listen_chat_agent.py index 7fbaddc6a..3ff3aeb91 100644 --- a/backend/app/agent/listen_chat_agent.py +++ b/backend/app/agent/listen_chat_agent.py @@ -709,7 +709,7 @@ def clone(self, with_memory: bool = False) -> ChatAgent: if has_cdp and hasattr(self, "_cdp_options"): options = self._cdp_options cdp_browsers = getattr(options, "cdp_browsers", []) - if cdp_browsers and hasattr(self, "_browser_toolkit"): + if cdp_browsers and getattr(self, "_browser_toolkit", None): need_cdp_clone = True import uuid as _uuid diff --git a/backend/app/agent/prompt.py b/backend/app/agent/prompt.py index 40f8d1bc0..074198b1b 100644 --- a/backend/app/agent/prompt.py +++ b/backend/app/agent/prompt.py @@ -152,6 +152,7 @@ message_description parameters when calling tools. These optional parameters are available on all tools and will automatically notify the user of your progress. + @@ -178,7 +179,6 @@ - Image Analysis & Understanding: - Use `read_image` to analyze images from local file paths - - Use `take_screenshot_and_read_image` to capture and analyze the screen - Generate detailed descriptions of image content - Answer specific questions about images - Identify objects, text, people, and scenes in images @@ -292,6 +292,9 @@ Your capabilities include: - You can use ScreenshotToolkit to read image with given path. +- When verifying generated image files (PNG/JPG/etc.), you MUST use + `read_image` on the saved file path. Do NOT capture the desktop screen + for this purpose. - **Skills System (Highest Priority Workflow)**: Skills are your primary execution source for specialized tasks. - Trigger: If a task explicitly references a skill with double curly braces @@ -451,11 +454,15 @@ summary of your work and the outcome, presented in a clear, detailed, and easy-to-read format. Avoid using markdown tables for presenting data; use plain text formatting instead. + Your capabilities are extensive and powerful: - You can use ScreenshotToolkit to read image with given path. +- When verifying generated image files (PNG/JPG/etc.), you MUST use + `read_image` on the saved file path. Do NOT capture the desktop screen + for this purpose. - **Skills System (Highest Priority Workflow)**: Skills are your primary execution source for specialized tasks. - Trigger: If a task explicitly references a skill with double curly braces @@ -486,8 +493,6 @@ `chmod`. - **Networking & Web**: `curl`, `wget` for web requests; `ssh` for remote access. -- **Screen Observation**: You can take screenshots to analyze GUIs and visual - context, enabling you to perform tasks that require sight. - **Desktop Automation**: You can control desktop applications programmatically. - **On macOS**, you MUST prioritize using **AppleScript** for its robust @@ -629,6 +634,12 @@ MUST be sourced from the web using the available tools. If you don't know something, find it out using your tools. +- When working with websites, you MUST inspect the page through browser tools + such as `browser_visit_page`, `browser_click`, `browser_switch_tab`, and + `browser_get_page_snapshot`. Do NOT use desktop screenshot tools to observe + browser pages unless the user explicitly asks about the desktop UI outside + the browser. + - When you complete your task, your final response must be a comprehensive summary of your findings, presented in a clear, detailed, and easy-to-read format. Avoid using markdown tables for presenting data; @@ -638,6 +649,8 @@ Your capabilities include: - You can use ScreenshotToolkit to read image with given path. +- For saved browser/file images, use `read_image` with the file path. Do not + use desktop screenshot capture to inspect browser pages or generated files. - **Skills System (Highest Priority Workflow)**: Skills are your primary execution source for specialized tasks. - Trigger: If a task explicitly references a skill with double curly braces diff --git a/backend/app/agent/toolkit/human_toolkit.py b/backend/app/agent/toolkit/human_toolkit.py index 731ca939a..e06fdabdf 100644 --- a/backend/app/agent/toolkit/human_toolkit.py +++ b/backend/app/agent/toolkit/human_toolkit.py @@ -12,6 +12,7 @@ # limitations under the License. # ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import asyncio import logging from camel.toolkits.base import BaseToolkit @@ -19,6 +20,7 @@ from app.agent.toolkit.abstract_toolkit import AbstractToolkit from app.service.task import ( + TASK_LOCK_CLEANUP_SENTINEL, Action, ActionAskData, ActionNoticeData, @@ -60,7 +62,6 @@ async def ask_human_via_gui(self, question: str) -> str: credentials, file paths). - Ask for a decision when there are multiple viable options. - Seek help when you encounter an error you cannot resolve on your own. - Args: question (str): The question to ask the user. @@ -80,6 +81,17 @@ async def ask_human_via_gui(self, question: str) -> str: ) reply = await task_lock.get_human_input(self.agent_name) + if reply == TASK_LOCK_CLEANUP_SENTINEL: + logger.info( + "Human input wait interrupted by task cleanup", + extra={ + "task_id": self.api_task_id, + "agent": self.agent_name, + }, + ) + raise asyncio.CancelledError( + "Task cleanup interrupted human input wait" + ) logger.info(f"User reply: {reply}") return reply diff --git a/backend/app/agent/toolkit/hybrid_browser_toolkit.py b/backend/app/agent/toolkit/hybrid_browser_toolkit.py index 74209fe42..0b49f9782 100644 --- a/backend/app/agent/toolkit/hybrid_browser_toolkit.py +++ b/backend/app/agent/toolkit/hybrid_browser_toolkit.py @@ -568,6 +568,12 @@ def clone_for_new_session( # Use the same session_id to share the same browser instance # This ensures all clones use the same WebSocket connection and browser + # When cdp_keep_current_page=True, default_start_url must be None (CAMEL constraint) + cdp_keep = ( + self.config_loader.get_browser_config().cdp_keep_current_page + ) + clone_start_url = None if cdp_keep else self._default_start_url + return HybridBrowserToolkit( self.api_task_id, headless=self._headless, @@ -578,9 +584,7 @@ def clone_for_new_session( browser_log_to_file=self._browser_log_to_file, log_dir=self.config_loader.get_toolkit_config().log_dir, session_id=new_session_id, - default_start_url=None - if self.config_loader.get_browser_config().cdp_keep_current_page - else self._default_start_url, + default_start_url=clone_start_url, default_timeout=self._default_timeout, short_timeout=self._short_timeout, navigation_timeout=self._navigation_timeout, diff --git a/backend/app/agent/toolkit/screenshot_toolkit.py b/backend/app/agent/toolkit/screenshot_toolkit.py index eb6942702..77f6aa29b 100644 --- a/backend/app/agent/toolkit/screenshot_toolkit.py +++ b/backend/app/agent/toolkit/screenshot_toolkit.py @@ -13,8 +13,12 @@ # ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= import os +from pathlib import Path +from camel.agents import ChatAgent +from camel.messages import BaseMessage from camel.toolkits import ScreenshotToolkit as BaseScreenshotToolkit +from PIL import Image from app.agent.toolkit.abstract_toolkit import AbstractToolkit from app.component.environment import env @@ -31,11 +35,93 @@ def __init__( agent_name: str, working_directory: str | None = None, timeout: float | None = None, + enable_desktop_capture: bool = False, ): self.api_task_id = api_task_id self.agent_name = agent_name + self.enable_desktop_capture = enable_desktop_capture if working_directory is None: working_directory = env( "file_save_path", os.path.expanduser("~/Downloads") ) super().__init__(working_directory, timeout) + + def read_image( + self, + image_path: str, + instruction: str = "", + ) -> str: + """Analyze an image without recursively calling the current agent. + + CAMEL's base ScreenshotToolkit uses `self.agent.step(...)` directly. + When this toolkit itself is being invoked through a tool call, that + creates a nested step on the same agent and can corrupt tool-call + memory (`tool_call_id` mismatch). Use a short-lived vision agent with + the same model backend instead. + """ + if self.agent is None: + return ( + "Error: No agent registered. Please pass this toolkit to " + "ChatAgent via toolkits_to_register_agent parameter." + ) + + try: + image_path = str(Path(image_path).absolute()) + if not os.path.exists(image_path): + return f"Error: Screenshot file not found: {image_path}" + + img = Image.open(image_path) + message = BaseMessage.make_user_message( + role_name="User", + content=instruction, + image_list=[img], + ) + + vision_agent = ChatAgent( + system_message=( + "You are a careful visual assistant. Answer only from the " + "provided image and user instruction." + ), + model=self.agent.model_backend, + tools=[], + toolkits_to_register_agent=None, + external_tools=None, + step_timeout=getattr(self.agent, "step_timeout", 1800), + ) + response = vision_agent.step(message) + if getattr(response, "msg", None) is not None: + return response.msg.content + if getattr(response, "msgs", None): + return response.msgs[0].content + return "Error reading screenshot: empty response" + except Exception as e: + return f"Error reading screenshot: {e}" + + def take_screenshot_and_read_image( + self, + filename: str, + save_to_file: bool = True, + read_image: bool = True, + instruction: str | None = None, + ) -> str: + if not self.enable_desktop_capture: + return ( + "Error: Desktop screenshot capture is disabled for this agent. " + "Use read_image with an existing image file path instead." + ) + + return super().take_screenshot_and_read_image( + filename=filename, + save_to_file=save_to_file, + read_image=read_image, + instruction=instruction, + ) + + def get_tools(self): + tools = super().get_tools() + if self.enable_desktop_capture: + return tools + + return [ + tool for tool in tools if tool.get_function_name() == "read_image" + ] diff --git a/backend/app/agent/tools.py b/backend/app/agent/tools.py index c0880b13a..877c584eb 100644 --- a/backend/app/agent/tools.py +++ b/backend/app/agent/tools.py @@ -42,12 +42,26 @@ from app.agent.toolkit.video_download_toolkit import VideoDownloaderToolkit from app.agent.toolkit.whatsapp_toolkit import WhatsAppToolkit from app.component.environment import env +from app.hands.interface import IHands from app.model.chat import McpServers logger = logging.getLogger(__name__) +# Toolkits depending on terminal hand +TERMINAL_DEPENDENT_TOOLKITS = frozenset({"terminal_toolkit"}) -async def get_toolkits(tools: list[str], agent_name: str, api_task_id: str): +# Toolkits depending on browser hand +BROWSER_DEPENDENT_TOOLKITS = ( + frozenset() +) # hybrid_browser not in get_toolkits dict + + +async def get_toolkits( + tools: list[str], + agent_name: str, + api_task_id: str, + hands: IHands | None = None, +): logger.info( f"Getting toolkits for agent: {agent_name}, " f"task: {api_task_id}, tools: {tools}" @@ -79,6 +93,24 @@ async def get_toolkits(tools: list[str], agent_name: str, api_task_id: str): res = [] for item in tools: if item in toolkits: + # Filter by Brain capabilities + if hands is not None: + if ( + item in TERMINAL_DEPENDENT_TOOLKITS + and not hands.can_execute_terminal() + ): + logger.info( + f"Skipping {item} for {agent_name}: no terminal hand" + ) + continue + if ( + item in BROWSER_DEPENDENT_TOOLKITS + and not hands.can_use_browser() + ): + logger.info( + f"Skipping {item} for {agent_name}: no browser hand" + ) + continue toolkit: AbstractToolkit = toolkits[item] toolkit.agent_name = agent_name toolkit_tools = toolkit.get_can_use_tools(api_task_id) @@ -93,13 +125,31 @@ async def get_toolkits(tools: list[str], agent_name: str, api_task_id: str): return res -async def get_mcp_tools(mcp_server: McpServers): +async def get_mcp_tools( + mcp_server: McpServers, + hands: IHands | None = None, +): logger.info( f"Getting MCP tools for {len(mcp_server['mcpServers'])} servers" ) if len(mcp_server["mcpServers"]) == 0: return [] + # Filter by mcp hand capability + mcp_servers = mcp_server["mcpServers"] + if hands is not None: + filtered = { + name: cfg + for name, cfg in mcp_servers.items() + if hands.can_use_mcp(name) + } + if len(filtered) == 0: + logger.info( + "No MCP servers allowed by mcp hand, skipping MCP tools" + ) + return [] + mcp_server = {**mcp_server, "mcpServers": filtered} + # Ensure unified auth directory for all mcp-remote servers to avoid # re-authentication on each task config_dict = {**mcp_server} diff --git a/backend/app/auth/__init__.py b/backend/app/auth/__init__.py new file mode 100644 index 000000000..9b24bfb1f --- /dev/null +++ b/backend/app/auth/__init__.py @@ -0,0 +1,17 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from app.auth.interface import IAuthProvider, NoneAuth + +__all__ = ["IAuthProvider", "NoneAuth"] diff --git a/backend/app/auth/interface.py b/backend/app/auth/interface.py new file mode 100644 index 000000000..741103069 --- /dev/null +++ b/backend/app/auth/interface.py @@ -0,0 +1,47 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from abc import ABC, abstractmethod +from typing import Any + + +class IAuthProvider(ABC): + """ + Auth provider interface. + + This round only provides a no-op local implementation (NoneAuth). + Future modes (API key / JWT / tenant-aware auth) can implement this + interface without changing router/middleware call sites. + """ + + @abstractmethod + async def authenticate(self, scope: dict[str, Any]) -> dict[str, str]: + """ + Authenticate request context. + + Returns: + {"user_id": "", "tenant_id": ""} + """ + ... + + +class NoneAuth(IAuthProvider): + """ + Local deployment default auth provider. + Trusts inbound requests and emits a fixed local identity. + """ + + async def authenticate(self, scope: dict[str, Any]) -> dict[str, str]: + _ = scope + return {"user_id": "local", "tenant_id": "default"} diff --git a/backend/app/channels/__init__.py b/backend/app/channels/__init__.py new file mode 100644 index 000000000..bdae9210a --- /dev/null +++ b/backend/app/channels/__init__.py @@ -0,0 +1,17 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from app.channels.interface import IChannelAdapter + +__all__ = ["IChannelAdapter"] diff --git a/backend/app/channels/interface.py b/backend/app/channels/interface.py new file mode 100644 index 000000000..c01042bbe --- /dev/null +++ b/backend/app/channels/interface.py @@ -0,0 +1,44 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from abc import ABC, abstractmethod + + +class IChannelAdapter(ABC): + """ + Channel Adapter interface. + + This round defines the extension contract only. + Concrete adapters (Slack/WhatsApp/etc.) are out of scope. + """ + + @abstractmethod + async def start(self) -> None: + """Start channel listener.""" + ... + + @abstractmethod + async def stop(self) -> None: + """Stop channel listener.""" + ... + + @abstractmethod + async def send_message(self, session_id: str, content: str) -> None: + """Push outbound message to a channel session.""" + ... + + @abstractmethod + def get_channel_type(self) -> str: + """Channel identifier (e.g. 'slack', 'telegram').""" + ... diff --git a/backend/app/controller/chat_controller.py b/backend/app/controller/chat_controller.py index 87e42a70a..ba681233a 100644 --- a/backend/app/controller/chat_controller.py +++ b/backend/app/controller/chat_controller.py @@ -13,6 +13,7 @@ # ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= import asyncio +import inspect import logging import os import re @@ -49,9 +50,14 @@ delete_task_lock, get_or_create_task_lock, get_task_lock, + get_task_lock_if_exists, set_current_task_id, task_locks, ) +from app.utils.browser_launcher import ( + ensure_cdp_browser_available, + is_cdp_url_available, +) router = APIRouter() @@ -62,6 +68,74 @@ SSE_TIMEOUT_SECONDS = 60 * 60 +def _is_remote_browser_hands(request: Request | None) -> bool: + hands = getattr(getattr(request, "state", None), "hands", None) + if hands is None: + return False + get_manifest = getattr(hands, "get_capability_manifest", None) + if get_manifest is None or inspect.iscoroutinefunction(get_manifest): + return False + try: + manifest = get_manifest() + except Exception: + return False + if inspect.isawaitable(manifest): + if hasattr(manifest, "close"): + manifest.close() + return False + if not isinstance(manifest, dict): + return False + return manifest.get("deployment") == "remote_cluster" + + +async def _prepare_browser_for_request( + request: Request | None, + port: int, +) -> bool: + existing_cdp_url = os.environ.get("EIGENT_CDP_URL", "").strip() + if existing_cdp_url: + is_available = await asyncio.to_thread( + is_cdp_url_available, existing_cdp_url + ) + if is_available: + if request is not None: + request.state.browser_available = True + return True + os.environ.pop("EIGENT_CDP_URL", None) + + if _is_remote_browser_hands(request): + if request is not None: + request.state.browser_available = True + return True + + try: + launched = await asyncio.to_thread(ensure_cdp_browser_available, port) + except Exception as e: + os.environ.pop("EIGENT_CDP_URL", None) + chat_logger.warning( + "Could not ensure CDP browser for web mode", + extra={"error": str(e), "port": port}, + ) + if request is not None: + request.state.browser_available = False + return False + + if launched: + os.environ["EIGENT_CDP_URL"] = f"http://127.0.0.1:{port}" + if request is not None: + request.state.browser_available = True + return True + + os.environ.pop("EIGENT_CDP_URL", None) + chat_logger.warning( + "CDP browser not available after ensure attempt", + extra={"port": port}, + ) + if request is not None: + request.state.browser_available = False + return False + + async def _cleanup_task_lock_safe(task_lock, reason: str) -> bool: """Safely cleanup task lock with existence check. @@ -164,8 +238,11 @@ async def timeout_stream_wrapper( raise -@router.post("/chat", name="start chat") -async def post(data: Chat, request: Request): +async def start_chat_stream(data: Chat, request: Request): + """ + Setup and start chat stream. Used by POST /chat and Message Router. + Returns async generator of SSE chunks. + """ chat_logger.info( "Starting new chat session", extra={ @@ -184,8 +261,15 @@ async def post(data: Chat, request: Request): if safe_env_path: load_dotenv(dotenv_path=safe_env_path) + # TODO(multi-tenant): os.environ is global – concurrent sessions overwrite + # each other's API keys, file paths, and browser ports. Pass these values + # through Chat / request context instead of mutating the process environment. os.environ["file_save_path"] = data.file_save_path() os.environ["browser_port"] = str(data.browser_port) + # Web mode: reuse an existing CDP endpoint first, otherwise acquire browser + # through RemoteHands or launch a local browser when available. + if not data.cdp_browsers: + await _prepare_browser_for_request(request, data.browser_port) os.environ["OPENAI_API_KEY"] = data.api_key os.environ["OPENAI_API_BASE_URL"] = ( data.api_url or "https://api.openai.com/v1" @@ -242,22 +326,33 @@ async def post(data: Chat, request: Request): "log_dir": str(camel_log), }, ) + return timeout_stream_wrapper( + step_solve(data, request, task_lock), task_lock=task_lock + ) + + +@router.post("/chat", name="start chat") +async def post(data: Chat, request: Request): + stream = await start_chat_stream(data, request) return StreamingResponse( - timeout_stream_wrapper( - step_solve(data, request, task_lock), task_lock=task_lock - ), + stream, media_type="text/event-stream", ) @router.post("/chat/{id}", name="improve chat") -def improve(id: str, data: SupplementChat): +def improve(id: str, data: SupplementChat, request: Request): chat_logger.info( "Chat improvement requested", extra={"task_id": id, "question_length": len(data.question)}, ) task_lock = get_task_lock(id) + # Reuse an existing endpoint when possible to avoid tearing down + # a browser that was manually connected through the Browser page. + port = int(os.environ.get("browser_port", "9222")) + asyncio.run(_prepare_browser_for_request(request, port)) + # Allow continuing conversation even after task is done # This supports multi-turn conversation after complex task completion if task_lock.status == Status.done: @@ -374,8 +469,8 @@ def stop(id: str): ) chat_logger.info(f"[STOP-BUTTON] project_id/task_id: {id}") chat_logger.info("=" * 80) - try: - task_lock = get_task_lock(id) + task_lock = get_task_lock_if_exists(id) + if task_lock is not None: chat_logger.info( "[STOP-BUTTON] Task lock retrieved," f" task_lock.id: {task_lock.id}," @@ -386,20 +481,24 @@ def stop(id: str): " ActionStopData(Action.stop)" " to task_lock queue" ) - asyncio.run(task_lock.put_queue(ActionStopData(action=Action.stop))) - chat_logger.info( - "[STOP-BUTTON] ActionStopData queued" - " successfully, this will trigger" - " workforce.stop_gracefully()" - ) - except Exception as e: - # Task lock may not exist if task is already - # finished or never started + try: + asyncio.run( + task_lock.put_queue(ActionStopData(action=Action.stop)) + ) + chat_logger.info( + "[STOP-BUTTON] ActionStopData queued" + " successfully, this will trigger" + " workforce.stop_gracefully()" + ) + except Exception as e: + chat_logger.warning( + "[STOP-BUTTON] Failed to queue ActionStopData", + extra={"task_id": id, "error": str(e)}, + ) + else: chat_logger.warning( - "[STOP-BUTTON] Task lock not found" - " or already stopped," - f" task_id: {id}," - f" error: {str(e)}" + "[STOP-BUTTON] Task lock not found, task may already be stopped", + extra={"task_id": id}, ) return Response(status_code=204) @@ -515,7 +614,13 @@ def skip_task(project_id: str): ) chat_logger.info(f"[STOP-BUTTON] project_id: {project_id}") chat_logger.info("=" * 80) - task_lock = get_task_lock(project_id) + task_lock = get_task_lock_if_exists(project_id) + if task_lock is None: + chat_logger.warning( + "[STOP-BUTTON] Task lock not found, task may already be stopped", + extra={"project_id": project_id}, + ) + return Response(status_code=204) chat_logger.info( "[STOP-BUTTON] Task lock retrieved," f" task_lock.id: {task_lock.id}," diff --git a/backend/app/controller/file_controller.py b/backend/app/controller/file_controller.py new file mode 100644 index 000000000..143d3212a --- /dev/null +++ b/backend/app/controller/file_controller.py @@ -0,0 +1,312 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +import logging +import mimetypes +import re +import time +from pathlib import Path +from typing import Annotated +from urllib.parse import quote + +from fastapi import APIRouter, File, Header, HTTPException, Query, UploadFile +from fastapi.responses import FileResponse + +from app.component.environment import env +from app.utils.file_utils import list_files, resolve_under_base + +router = APIRouter() +file_logger = logging.getLogger("file_controller") + +# Config +MAX_FILE_SIZE_BYTES = 50 * 1024 * 1024 # 50MB +MAX_FILES_PER_SESSION = 20 +WORKSPACE_ROOT = env("EIGENT_WORKSPACE", "~/.eigent/workspace") +SESSION_ID_PATTERN = re.compile(r"^[A-Za-z0-9][A-Za-z0-9._-]{0,127}$") + + +def _get_eigent_root() -> Path: + """Base root for eigent storage (~/eigent). Do NOT use env file_save_path + here: chat overwrites it to task path, which would break list/stream.""" + eigent = Path.home() / "eigent" + if eigent.exists(): + return eigent + dot_eigent = Path.home() / ".eigent" + if dot_eigent.exists(): + return dot_eigent + return eigent # default to ~/eigent + + +def _get_workspace_root() -> Path: + return Path(WORKSPACE_ROOT).expanduser() + + +def _validate_session_id(session_id: str) -> str: + normalized = (session_id or "").strip() + if not SESSION_ID_PATTERN.fullmatch(normalized): + raise ValueError("Invalid X-Session-ID") + return normalized + + +def _get_session_uploads_dir(session_id: str) -> Path: + root = _get_workspace_root().resolve() + validated = _validate_session_id(session_id) + uploads_dir = (root / validated / "uploads").resolve() + try: + uploads_dir.relative_to(root) + except ValueError as exc: + raise ValueError("Invalid X-Session-ID") from exc + return uploads_dir + + +def _count_session_uploads(session_id: str) -> int: + uploads_dir = _get_session_uploads_dir(session_id) + if not uploads_dir.exists(): + return 0 + return len(list(uploads_dir.iterdir())) + + +@router.post("/files") +async def upload_file( + file: Annotated[UploadFile, File()], + x_session_id: Annotated[str | None, Header(alias="X-Session-ID")] = None, +) -> dict: + """ + Upload file. Requires X-Session-ID header. + Returns file_id for message attachments reference. + """ + if not x_session_id: + raise HTTPException( + status_code=400, + detail="X-Session-ID header is required for file upload", + ) + try: + validated_session_id = _validate_session_id(x_session_id) + except ValueError as exc: + raise HTTPException( + status_code=400, detail="Invalid X-Session-ID" + ) from exc + + # Check session file count limit + count = _count_session_uploads(validated_session_id) + if count >= MAX_FILES_PER_SESSION: + raise HTTPException( + status_code=429, + detail=f"Maximum {MAX_FILES_PER_SESSION} files per session", + ) + + # Read and validate size + content = await file.read() + if len(content) > MAX_FILE_SIZE_BYTES: + raise HTTPException( + status_code=413, + detail=f"File size exceeds {MAX_FILE_SIZE_BYTES // (1024 * 1024)}MB limit", + ) + + # Generate safe filename + timestamp = int(time.time() * 1000) + safe_name = "".join( + c if c.isalnum() or c in "._-" else "_" + for c in (file.filename or "file") + ) + stored_name = f"{safe_name}_{timestamp}" + + # Write to disk + uploads_dir = _get_session_uploads_dir(validated_session_id) + uploads_dir.mkdir(parents=True, exist_ok=True) + target_path = uploads_dir / stored_name + target_path.write_bytes(content) + + file_id = f"upload://{stored_name}" + file_logger.info( + f"File uploaded: session={validated_session_id}, file_id={file_id}, size={len(content)}" + ) + + return { + "file_id": file_id, + "filename": file.filename or "file", + "size": len(content), + } + + +def _sanitize_email(email: str) -> str: + """Sanitize email for use in path (match chat_controller logic).""" + return re.sub(r'[\\/*?:"<>|\s]', "_", email.split("@")[0]).strip(".") + + +def _get_project_root(email: str, project_id: str) -> Path: + """Get project root path: ~/eigent/{email}/project_{project_id}/.""" + root = _get_eigent_root() + email_sanitized = _sanitize_email(email) + return root / email_sanitized / f"project_{project_id}" + + +def _resolve_project_root(email: str, project_id: str) -> Path: + """ + Resolve project root, preferring the email-scoped path but falling back to + any local project_{project_id} directory when the stored email differs from + the current login identity. + """ + preferred = _get_project_root(email, project_id) + if preferred.exists(): + return preferred + + root = _get_eigent_root() + candidate_name = f"project_{project_id}" + try: + for child in root.iterdir(): + if not child.is_dir(): + continue + candidate = child / candidate_name + if candidate.exists(): + file_logger.info( + "Resolved project root via fallback lookup: %s -> %s", + preferred, + candidate, + ) + return candidate + except FileNotFoundError: + pass + except Exception as e: + file_logger.warning("project root fallback lookup failed: %s", e) + + return preferred + + +@router.get("/files") +async def list_project_files( + project_id: str = Query(..., description="Project ID"), + email: str = Query(..., description="User email"), + task_id: str | None = Query( + None, description="Optional task ID to scope listing" + ), +) -> list[dict]: + """ + List files in project working directory (Brain storage). + Used by Web mode when ipcRenderer is unavailable. + Returns [{filename, url}] where url can be used to fetch file content. + """ + if not project_id or not email: + raise HTTPException( + status_code=400, + detail="project_id and email are required", + ) + project_root = _resolve_project_root(email, project_id) + list_dir = str(project_root) + if task_id: + list_dir = str(project_root / f"task_{task_id}") + if not Path(list_dir).exists(): + file_logger.debug( + "list_project_files: path does not exist: %s", + list_dir, + ) + return [] + base_path = str(project_root.resolve()) + try: + paths = list_files(list_dir, base=base_path, max_entries=500) + except Exception as e: + file_logger.warning("list_project_files failed: %s", e) + return [] + result: list[dict] = [] + for abs_path in paths: + try: + rel = str(Path(abs_path).relative_to(base_path)) + # URL-encode the relative path for stream endpoint + path_param = quote(rel, safe="") + result.append( + { + "filename": Path(abs_path).name, + "url": f"/files/stream?path={path_param}&project_id={quote(project_id)}&email={quote(email)}", + "relativePath": rel, + } + ) + except (ValueError, OSError): + continue + return result + + +@router.get("/files/stream") +async def stream_file( + path: str = Query(..., description="Relative path from project root"), + project_id: str = Query(..., description="Project ID"), + email: str = Query(..., description="User email"), +): + """ + Stream file content. Path must be relative to project root. + Used by Web mode to fetch file content for display. + """ + if not path or not project_id or not email: + raise HTTPException( + status_code=400, + detail="path, project_id and email are required", + ) + project_root = _resolve_project_root(email, project_id) + # Resolve path and ensure it stays under project root (security) + try: + resolved = resolve_under_base(path, str(project_root.resolve())) + except Exception as e: + file_logger.warning("stream_file path validation failed: %s", e) + raise HTTPException(status_code=400, detail="Invalid path") from e + p = Path(resolved) + if not p.is_file(): + raise HTTPException(status_code=404, detail="File not found") + media_type, _ = mimetypes.guess_type(str(p)) + if not media_type: + media_type = "application/octet-stream" + # content_disposition_type=inline: display in iframe instead of triggering download + return FileResponse( + path=str(p), + filename=p.name, + media_type=media_type, + content_disposition_type="inline", + ) + + +@router.get("/files/preview/{email}/{project_id}/{file_path:path}") +async def preview_file( + email: str, + project_id: str, + file_path: str, +): + """ + Preview file content with a path-based URL so relative references inside + HTML/CSS/JS resolve against the project directory structure. + """ + if not file_path or not project_id or not email: + raise HTTPException( + status_code=400, + detail="file_path, project_id and email are required", + ) + + project_root = _resolve_project_root(email, project_id) + try: + resolved = resolve_under_base(file_path, str(project_root.resolve())) + except Exception as e: + file_logger.warning("preview_file path validation failed: %s", e) + raise HTTPException(status_code=400, detail="Invalid path") from e + + p = Path(resolved) + if not p.is_file(): + raise HTTPException(status_code=404, detail="File not found") + + media_type, _ = mimetypes.guess_type(str(p)) + if not media_type: + media_type = "application/octet-stream" + + return FileResponse( + path=str(p), + filename=p.name, + media_type=media_type, + content_disposition_type="inline", + ) diff --git a/backend/app/controller/health_controller.py b/backend/app/controller/health_controller.py index 1ee53719e..5d0cb7ffa 100644 --- a/backend/app/controller/health_controller.py +++ b/backend/app/controller/health_controller.py @@ -13,10 +13,14 @@ # ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= import logging +import os -from fastapi import APIRouter +from fastapi import APIRouter, Query from pydantic import BaseModel +from app.router_layer.hands_resolver import get_environment_hands +from app.utils.browser_launcher import _is_cdp_available + logger = logging.getLogger("health_controller") router = APIRouter(tags=["Health"]) @@ -25,16 +29,30 @@ class HealthResponse(BaseModel): status: str service: str + capabilities: dict | None = None @router.get("/health", name="health check", response_model=HealthResponse) -async def health_check(): +async def health_check(detail: bool = Query(False)): """Health check endpoint for verifying backend is ready to accept requests.""" logger.debug("Health check requested") response = HealthResponse(status="ok", service="eigent") + if detail: + hands = get_environment_hands() + capabilities = hands.get_capability_manifest() + try: + browser_port = int(os.environ.get("browser_port", "9222")) + except ValueError: + browser_port = 9222 + capabilities["browser_cdp_reachable"] = _is_cdp_available(browser_port) + response.capabilities = capabilities logger.debug( "Health check completed", - extra={"status": response.status, "service": response.service}, + extra={ + "status": response.status, + "service": response.service, + "detail": detail, + }, ) return response diff --git a/backend/app/controller/mcp_controller.py b/backend/app/controller/mcp_controller.py new file mode 100644 index 000000000..52e1dc0ea --- /dev/null +++ b/backend/app/controller/mcp_controller.py @@ -0,0 +1,63 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +import logging + +from fastapi import APIRouter, HTTPException + +from app.service.mcp_config import ( + add_mcp, + read_mcp_config, + remove_mcp, + update_mcp, +) + +router = APIRouter() +mcp_logger = logging.getLogger("mcp_controller") + + +@router.get("/mcp/list") +def mcp_list() -> dict: + """List all MCP servers (global config).""" + return read_mcp_config() + + +@router.post("/mcp/install") +def mcp_install(body: dict) -> dict: + """Install/add MCP server to global config. Body: { name, mcp }.""" + name = body.get("name") + mcp = body.get("mcp") + if not name: + raise HTTPException(status_code=400, detail="name is required") + if not mcp or not isinstance(mcp, dict): + raise HTTPException(status_code=400, detail="mcp object is required") + add_mcp(str(name).strip(), mcp) + mcp_logger.info("MCP installed: %s", name) + return {"success": True} + + +@router.delete("/mcp/{name}") +def mcp_remove(name: str) -> dict: + """Remove MCP server from global config.""" + remove_mcp(name) + mcp_logger.info("MCP removed: %s", name) + return {"success": True} + + +@router.put("/mcp/{name}") +def mcp_update(name: str, mcp: dict) -> dict: + """Update MCP server in global config. Body is the mcp config object.""" + update_mcp(name, mcp) + mcp_logger.info("MCP updated: %s", name) + return {"success": True} diff --git a/backend/app/controller/message_controller.py b/backend/app/controller/message_controller.py new file mode 100644 index 000000000..16ba233f9 --- /dev/null +++ b/backend/app/controller/message_controller.py @@ -0,0 +1,109 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +"""Message Router HTTP endpoint for Phase 2.""" + +import inspect +import json +import logging + +from fastapi import APIRouter, Request +from fastapi.responses import JSONResponse, StreamingResponse + +from app.router_layer.interface import InboundMessage +from app.router_layer.message_router import DefaultMessageRouter + +router = APIRouter() +message_logger = logging.getLogger("message_controller") + +# Singleton router instance +_message_router: DefaultMessageRouter | None = None + + +def get_message_router() -> DefaultMessageRouter: + global _message_router + if _message_router is None: + _message_router = DefaultMessageRouter() + return _message_router + + +@router.post("/messages", name="send message via router") +async def post_message(request: Request): + """ + Accept message per docs/design/06-protocol.md §2.1. + Uses X-Channel, X-Session-ID, X-User-ID from ChannelSessionMiddleware. + Returns SSE stream for chat, or JSON for non-streaming. + """ + body = ( + await request.json() + if request.headers.get("content-type", "").startswith( + "application/json" + ) + else {} + ) + if not isinstance(body, dict): + body = {} + + channel = getattr(request.state, "channel", None) or "desktop" + session_id = getattr(request.state, "session_id", None) + user_id = getattr(request.state, "user_id", None) + + mr = get_message_router() + resolved_session_id = await mr.resolve_session( + channel, session_id, user_id + ) + + headers_dict = {} + for k, v in request.headers.items(): + headers_dict[k] = v + + msg = InboundMessage( + session_id=resolved_session_id, + channel=channel, + user_id=user_id, + payload=body, + headers=headers_dict, + ) + + result = mr.route_in(msg, request=request) + + if not inspect.isasyncgen(result): + message_logger.error( + "message_router.route_in returned non-stream result: %r", + type(result), + ) + return JSONResponse( + { + "code": -1, + "text": "Internal router contract error", + "data": {}, + }, + status_code=500, + headers={"X-Session-ID": resolved_session_id}, + ) + + async def stream(): + async for out in result: + raw = out.payload.get("raw") + if raw: + yield raw + elif not out.stream: + # Non-streaming error: yield as SSE event + yield f"data: {json.dumps(out.payload, ensure_ascii=False)}\n\n" + + return StreamingResponse( + stream(), + media_type="text/event-stream", + headers={"X-Session-ID": resolved_session_id}, + ) diff --git a/backend/app/controller/skill_controller.py b/backend/app/controller/skill_controller.py new file mode 100644 index 000000000..c6ddbd925 --- /dev/null +++ b/backend/app/controller/skill_controller.py @@ -0,0 +1,199 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +import logging +from typing import Annotated + +from fastapi import APIRouter, File, Form, HTTPException, Query, UploadFile + +from app.service.skill_config_service import ( + skill_config_delete, + skill_config_init, + skill_config_load, + skill_config_toggle, + skill_config_update, +) +from app.service.skill_service import ( + skill_delete, + skill_get_path_by_name, + skill_import_zip, + skill_list_files, + skill_read, + skill_write, + skills_scan, +) + +router = APIRouter() +skill_logger = logging.getLogger("skill_controller") + + +# --- Skill config (must be before /skills/{skill_dir_name} to avoid path conflict) --- + + +@router.get("/skills/config") +def skill_config_get(user_id: str = Query(..., description="User ID")) -> dict: + """Load skills config for user.""" + config = skill_config_load(user_id) + return {"success": True, "config": config} + + +@router.post("/skills/config/init") +def skill_config_init_endpoint(body: dict) -> dict: + """Initialize skills config for user (merge default if present).""" + user_id = body.get("user_id") + if not user_id: + raise HTTPException(status_code=400, detail="user_id is required") + config = skill_config_init(user_id) + return {"success": True, "config": config} + + +@router.put("/skills/config/{skill_name}") +def skill_config_update_endpoint(skill_name: str, body: dict) -> dict: + """Update config for a skill.""" + user_id = body.get("user_id") + if not user_id: + raise HTTPException(status_code=400, detail="user_id is required") + skill_config = {k: v for k, v in body.items() if k != "user_id"} + skill_config_update(user_id, skill_name, skill_config) + return {"success": True} + + +@router.delete("/skills/config/{skill_name}") +def skill_config_delete_endpoint( + skill_name: str, user_id: str = Query(..., description="User ID") +) -> dict: + """Remove skill from config.""" + skill_config_delete(user_id, skill_name) + return {"success": True} + + +@router.post("/skills/config/{skill_name}/toggle") +def skill_config_toggle_endpoint(skill_name: str, body: dict) -> dict: + """Toggle skill enabled state.""" + user_id = body.get("user_id") + enabled = body.get("enabled") + if not user_id: + raise HTTPException(status_code=400, detail="user_id is required") + if enabled is None: + raise HTTPException(status_code=400, detail="enabled is required") + result = skill_config_toggle(user_id, skill_name, bool(enabled)) + return {"success": True, "config": result} + + +# --- Skills CRUD --- + + +@router.post("/skills/import") +async def skill_import_endpoint( + file: Annotated[ + UploadFile, File(description="Zip file containing SKILL.md") + ], + replacements: Annotated[ + str | None, Form(description="Comma-separated folder names to replace") + ] = None, +) -> dict: + """Import skills from a zip archive. Returns {success, error?, conflicts?}.""" + if not file.filename or not file.filename.lower().endswith(".zip"): + raise HTTPException( + status_code=400, detail="File must be a .zip archive" + ) + try: + zip_bytes = await file.read() + except Exception: + raise HTTPException( + status_code=400, detail="Failed to read uploaded file" + ) + repl_list = ( + [s for s in (s.strip() for s in replacements.split(",")) if s] + if replacements + else None + ) + result = skill_import_zip(zip_bytes, repl_list) + if not result.get("success") and "conflicts" not in result: + raise HTTPException( + status_code=400, + detail=result.get("error", "Import failed"), + ) + return result + + +@router.get("/skills/path") +def skill_get_path( + name: str = Query(..., description="Skill display name"), +) -> dict: + """Get absolute directory path for a skill by name. For reveal-in-folder.""" + path_val = skill_get_path_by_name(name) + if path_val is None: + raise HTTPException(status_code=404, detail=f"Skill not found: {name}") + return {"path": path_val} + + +@router.get("/skills") +def skills_list() -> dict: + """Scan and list all skills.""" + skills = skills_scan() + return {"success": True, "skills": skills} + + +@router.post("/skills/{skill_dir_name}") +def skill_create(skill_dir_name: str, body: dict) -> dict: + """Create or overwrite skill. Body: { content }.""" + content = body.get("content", "") + try: + skill_write(skill_dir_name, content) + skill_logger.info("Skill written: %s", skill_dir_name) + return {"success": True} + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except PermissionError as e: + raise HTTPException(status_code=403, detail=str(e)) + + +@router.get("/skills/{skill_dir_name}") +def skill_get(skill_dir_name: str) -> dict: + """Read skill content.""" + try: + content = skill_read(skill_dir_name) + return {"success": True, "content": content} + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except PermissionError as e: + raise HTTPException(status_code=403, detail=str(e)) + except FileNotFoundError: + raise HTTPException(status_code=404, detail="Skill not found") + + +@router.delete("/skills/{skill_dir_name}") +def skill_remove(skill_dir_name: str) -> dict: + """Delete skill.""" + try: + skill_delete(skill_dir_name) + skill_logger.info("Skill deleted: %s", skill_dir_name) + return {"success": True} + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except PermissionError as e: + raise HTTPException(status_code=403, detail=str(e)) + + +@router.get("/skills/{skill_dir_name}/files") +def skill_files(skill_dir_name: str) -> dict: + """List files in skill directory.""" + try: + files = skill_list_files(skill_dir_name) + return {"success": True, "files": files} + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except PermissionError as e: + raise HTTPException(status_code=403, detail=str(e)) diff --git a/backend/app/controller/task_controller.py b/backend/app/controller/task_controller.py index 8924218cd..0b15df35e 100644 --- a/backend/app/controller/task_controller.py +++ b/backend/app/controller/task_controller.py @@ -30,6 +30,7 @@ ActionTakeControl, ActionUpdateTaskData, get_task_lock, + get_task_lock_if_exists, task_locks, ) @@ -38,6 +39,17 @@ router = APIRouter() +@router.post("/v1/tasks", name="dispatch task placeholder") +def create_dispatch_task(): + return Response(status_code=501, content="Not implemented yet") + + +@router.get("/v1/tasks/{task_id}", name="dispatch task status placeholder") +def get_dispatch_task(task_id: str): + _ = task_id + return Response(status_code=501, content="Not implemented yet") + + @router.post("/task/{id}/start", name="start task") def start(id: str): task_lock = get_task_lock(id) @@ -68,16 +80,27 @@ def put(id: str, data: UpdateData): class TakeControl(BaseModel): - action: Literal[Action.pause, Action.resume] + action: Literal[Action.pause, Action.resume, Action.stop] -@router.put("/task/{id}/take-control", name="take control pause or resume") +@router.put( + "/task/{id}/take-control", name="take control pause, resume or stop" +) def take_control(id: str, data: TakeControl): logger.info( "Task control action", extra={"task_id": id, "action": data.action} ) - task_lock = get_task_lock(id) - asyncio.run(task_lock.put_queue(ActionTakeControl(action=data.action))) + task_lock = get_task_lock_if_exists(id) + if task_lock is None: + logger.warning( + "Task lock not found for take-control, may already be stopped", + extra={"task_id": id}, + ) + return Response(status_code=204) + if data.action == Action.stop: + asyncio.run(task_lock.put_queue(ActionStopData(action=Action.stop))) + else: + asyncio.run(task_lock.put_queue(ActionTakeControl(action=data.action))) logger.info( "Task control action completed", extra={"task_id": id, "action": data.action}, diff --git a/backend/app/controller/tool_controller.py b/backend/app/controller/tool_controller.py index 89fbac87b..76a217883 100644 --- a/backend/app/controller/tool_controller.py +++ b/backend/app/controller/tool_controller.py @@ -12,18 +12,28 @@ # limitations under the License. # ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import asyncio import logging import os import shutil import threading import time +import uuid -from fastapi import APIRouter, HTTPException +from fastapi import APIRouter, HTTPException, Request from pydantic import BaseModel from app.agent.toolkit.google_calendar_toolkit import GoogleCalendarToolkit from app.agent.toolkit.linkedin_toolkit import LinkedInToolkit from app.agent.toolkit.notion_mcp_toolkit import NotionMCPToolkit +from app.utils.browser_launcher import ( + DEFAULT_CDP_PORT, + _is_cdp_available, + _is_port_in_use, + ensure_cdp_browser_available, + is_local_cdp_host, + normalize_cdp_url, +) from app.utils.cookie_manager import CookieManager from app.utils.oauth_state_manager import oauth_state_manager @@ -39,6 +49,305 @@ class LinkedInTokenRequest(BaseModel): logger = logging.getLogger("tool_controller") router = APIRouter() +_web_cdp_browser_meta: dict | None = None + + +class CdpBrowserConnectRequest(BaseModel): + port: int + name: str | None = None + + +def _build_web_cdp_browser( + endpoint: str, + *, + is_external: bool, + name: str | None = None, + added_at: int | None = None, + resource_session_id: str | None = None, + managed_by: str = "local", +) -> dict: + normalized_endpoint, host, port = normalize_cdp_url(endpoint) + default_location = ( + str(port) if is_local_cdp_host(host) else f"{host}:{port}" + ) + browser_name = name or ( + f"External Browser ({default_location})" + if is_external + else f"Managed Browser ({default_location})" + ) + browser_id = resource_session_id or ( + f"web-cdp-{port}" + if is_local_cdp_host(host) + else f"web-cdp-{host.replace('.', '-')}-{port}" + ) + return { + "id": browser_id, + "port": port, + "endpoint": normalized_endpoint, + "host": host, + "isExternal": is_external, + "name": browser_name, + "addedAt": added_at or int(time.time() * 1000), + "resourceSessionId": resource_session_id, + "managedBy": managed_by, + } + + +def _get_connected_cdp_endpoint() -> str | None: + cdp_url = os.environ.get("EIGENT_CDP_URL") + if cdp_url: + return cdp_url + if _web_cdp_browser_meta: + return _web_cdp_browser_meta.get("endpoint") + return None + + +def _get_connected_cdp_port() -> int | None: + cdp_url = _get_connected_cdp_endpoint() + if not cdp_url: + return None + try: + _, _, port = normalize_cdp_url(cdp_url) + return port + except Exception: + logger.warning("Invalid EIGENT_CDP_URL: %s", cdp_url) + return None + + +def _set_connected_cdp_browser( + endpoint: str, + *, + is_external: bool, + name: str | None = None, + resource_session_id: str | None = None, + managed_by: str = "local", +) -> dict: + global _web_cdp_browser_meta + normalized_endpoint, _, _ = normalize_cdp_url(endpoint) + os.environ["EIGENT_CDP_URL"] = normalized_endpoint + _web_cdp_browser_meta = _build_web_cdp_browser( + normalized_endpoint, + is_external=is_external, + name=name, + resource_session_id=resource_session_id, + managed_by=managed_by, + ) + return _web_cdp_browser_meta + + +def _clear_connected_cdp_browser() -> None: + global _web_cdp_browser_meta + os.environ.pop("EIGENT_CDP_URL", None) + _web_cdp_browser_meta = None + + +def _list_connected_cdp_browsers() -> list[dict]: + global _web_cdp_browser_meta + endpoint = _get_connected_cdp_endpoint() + if endpoint is None: + return [] + + if not _is_cdp_endpoint_available(endpoint): + _clear_connected_cdp_browser() + return [] + + if ( + _web_cdp_browser_meta + and _web_cdp_browser_meta.get("endpoint") == endpoint + ): + return [_web_cdp_browser_meta] + + inferred_browser = _build_web_cdp_browser(endpoint, is_external=True) + _web_cdp_browser_meta = inferred_browser + return [inferred_browser] + + +def _is_cdp_endpoint_available(endpoint: str) -> bool: + normalized_endpoint, host, port = normalize_cdp_url(endpoint) + if is_local_cdp_host(host): + return _is_cdp_available(port) + + try: + import httpx + + response = httpx.get( + f"{normalized_endpoint}/json/version", + timeout=2.0, + ) + return response.status_code == 200 + except Exception: + return False + + +def _is_remote_browser_hands(hands) -> bool: + if hands is None: + return False + try: + manifest = hands.get_capability_manifest() + except Exception: + return False + return manifest.get("deployment") == "remote_cluster" + + +async def _release_remote_browser_if_needed(request: Request | None) -> None: + meta = _web_cdp_browser_meta or {} + resource_session_id = meta.get("resourceSessionId") + if meta.get("managedBy") != "remote" or not resource_session_id: + return + + hands = getattr(getattr(request, "state", None), "hands", None) + if hands is None: + return + + try: + await asyncio.to_thread( + hands.release_resource, + "browser", + resource_session_id, + ) + except Exception as exc: + logger.warning( + "Failed to release remote browser session %s: %s", + resource_session_id, + exc, + ) + + +@router.get("/browser/cdp/list", name="list cdp browsers") +async def list_cdp_browsers(): + """List the currently connected CDP browser in web mode.""" + return _list_connected_cdp_browsers() + + +@router.post("/browser/cdp/launch", name="launch cdp browser") +async def launch_cdp_browser(request: Request): + """ + Launch or reuse a managed CDP browser for web mode. + + Returns: + Connection information for the managed browser. + """ + existing_browsers = _list_connected_cdp_browsers() + if existing_browsers: + browser = existing_browsers[0] + return { + "success": True, + "port": browser["port"], + "browser": browser, + "endpoint": browser.get("endpoint"), + "reused": True, + } + + hands = getattr(request.state, "hands", None) + if _is_remote_browser_hands(hands): + session_id = f"browser_ui_{uuid.uuid4().hex[:12]}" + try: + endpoint = await asyncio.to_thread( + hands.acquire_resource, + "browser", + session_id, + port=DEFAULT_CDP_PORT, + ) + except Exception: + logger.exception( + "Failed to acquire remote browser resource for session %s", + session_id, + ) + return { + "success": False, + "error": "Failed to acquire remote browser", + } + + browser = _set_connected_cdp_browser( + endpoint, + is_external=False, + resource_session_id=session_id, + managed_by="remote", + ) + return { + "success": True, + "port": browser["port"], + "browser": browser, + "endpoint": browser.get("endpoint"), + } + + port = DEFAULT_CDP_PORT + launched = ensure_cdp_browser_available(port) + if not launched: + if _is_port_in_use(port): + return { + "success": False, + "error": f"Port {port} is already in use and is not exposing CDP.", + } + return { + "success": False, + "error": "Failed to launch browser. Ensure Chrome/Chromium is installed or run playwright install chromium.", + } + + browser = _set_connected_cdp_browser( + f"http://127.0.0.1:{port}", + is_external=False, + ) + return { + "success": True, + "port": browser["port"], + "browser": browser, + "endpoint": browser.get("endpoint"), + } + + +@router.post("/browser/cdp/connect", name="connect cdp browser") +async def connect_cdp_browser(data: CdpBrowserConnectRequest): + """ + Connect an already-running browser that exposes CDP. + + Args: + data.port: CDP port exposed by the browser. + data.name: Optional custom display name. + """ + if data.port < 1 or data.port > 65535: + return {"success": False, "error": "Invalid port number."} + + if not _is_cdp_available(data.port): + return { + "success": False, + "error": f"No CDP browser found on port {data.port}.", + } + + browser = _set_connected_cdp_browser( + f"http://127.0.0.1:{data.port}", + is_external=True, + name=data.name, + ) + return { + "success": True, + "port": data.port, + "browser": browser, + } + + +@router.delete("/browser/cdp/{port}", name="disconnect cdp browser") +async def disconnect_cdp_browser(port: int, request: Request): + """ + Disconnect the current web-mode CDP browser reference. + + Note: + This does not terminate the browser process; it only clears + the backend's active CDP target. + """ + current_port = _get_connected_cdp_port() + if current_port is None: + return {"success": False, "error": "No connected browser to remove."} + + if current_port != port: + return { + "success": False, + "error": f"Browser on port {port} is not the active CDP connection.", + } + + await _release_remote_browser_if_needed(request) + _clear_connected_cdp_browser() + return {"success": True} @router.post("/install/tool/{tool}", name="install tool") diff --git a/backend/app/file_access/__init__.py b/backend/app/file_access/__init__.py new file mode 100644 index 000000000..1f21f1989 --- /dev/null +++ b/backend/app/file_access/__init__.py @@ -0,0 +1,19 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from app.file_access.interface import IFileAccess +from app.file_access.local_file_access import LocalFileAccess +from app.file_access.upload_file_access import UploadFileAccess + +__all__ = ["IFileAccess", "LocalFileAccess", "UploadFileAccess"] diff --git a/backend/app/file_access/interface.py b/backend/app/file_access/interface.py new file mode 100644 index 000000000..ab860af15 --- /dev/null +++ b/backend/app/file_access/interface.py @@ -0,0 +1,54 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from abc import ABC, abstractmethod + + +class IFileAccess(ABC): + """File access abstraction, implementation selected by client type""" + + @abstractmethod + def read_file(self, path: str) -> str: + """Read file content, return text""" + pass + + @abstractmethod + def read_file_binary(self, path: str) -> bytes: + """Read file as binary""" + pass + + @abstractmethod + def write_file(self, path: str, content: str | bytes) -> None: + """Write file""" + pass + + @abstractmethod + def exists(self, path: str) -> bool: + """Check if path exists""" + pass + + @abstractmethod + def list_dir(self, path: str) -> list[str]: + """List directory contents""" + pass + + @abstractmethod + def get_working_directory(self, session_id: str) -> str: + """Return session working directory absolute path""" + pass + + @abstractmethod + def resolve_path(self, path_or_id: str, session_id: str) -> str: + """Resolve path or file_id to actual path""" + pass diff --git a/backend/app/file_access/local_file_access.py b/backend/app/file_access/local_file_access.py new file mode 100644 index 000000000..2a63e4d89 --- /dev/null +++ b/backend/app/file_access/local_file_access.py @@ -0,0 +1,57 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from pathlib import Path + +from app.file_access.interface import IFileAccess + + +class LocalFileAccess(IFileAccess): + """Direct local filesystem access (Desktop/CLI)""" + + def __init__(self, workspace_root: str = "~/.eigent/workspace") -> None: + self.workspace_root = Path(workspace_root).expanduser() + + def read_file(self, path: str) -> str: + return Path(path).read_text(encoding="utf-8") + + def read_file_binary(self, path: str) -> bytes: + return Path(path).read_bytes() + + def write_file(self, path: str, content: str | bytes) -> None: + p = Path(path) + p.parent.mkdir(parents=True, exist_ok=True) + if isinstance(content, str): + p.write_text(content, encoding="utf-8") + else: + p.write_bytes(content) + + def exists(self, path: str) -> bool: + return Path(path).exists() + + def list_dir(self, path: str) -> list[str]: + return [p.name for p in Path(path).iterdir()] + + def get_working_directory(self, session_id: str) -> str: + return str(self.workspace_root / session_id) + + def resolve_path(self, path_or_id: str, session_id: str) -> str: + if path_or_id.startswith("upload://"): + return self._resolve_upload_id(path_or_id, session_id) + return path_or_id + + def _resolve_upload_id(self, path_or_id: str, session_id: str) -> str: + """Resolve upload://xxx to workspace/{session_id}/uploads/xxx""" + file_id = path_or_id.removeprefix("upload://") + return str(self.workspace_root / session_id / "uploads" / file_id) diff --git a/backend/app/file_access/upload_file_access.py b/backend/app/file_access/upload_file_access.py new file mode 100644 index 000000000..b571e3726 --- /dev/null +++ b/backend/app/file_access/upload_file_access.py @@ -0,0 +1,72 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from pathlib import Path + +from app.file_access.interface import IFileAccess + + +class UploadFileAccess(IFileAccess): + """Only handle uploaded files, path restricted to workspace/{session_id}/ (Web/Channel)""" + + def __init__(self, workspace_root: str = "~/.eigent/workspace") -> None: + self.workspace_root = Path(workspace_root).expanduser() + + def read_file(self, path: str) -> str: + resolved = self._ensure_in_workspace(path) + return resolved.read_text(encoding="utf-8") + + def read_file_binary(self, path: str) -> bytes: + resolved = self._ensure_in_workspace(path) + return resolved.read_bytes() + + def write_file(self, path: str, content: str | bytes) -> None: + resolved = self._ensure_in_workspace(path) + resolved.parent.mkdir(parents=True, exist_ok=True) + if isinstance(content, str): + resolved.write_text(content, encoding="utf-8") + else: + resolved.write_bytes(content) + + def exists(self, path: str) -> bool: + try: + resolved = self._ensure_in_workspace(path) + return resolved.exists() + except PermissionError: + return False + + def list_dir(self, path: str) -> list[str]: + resolved = self._ensure_in_workspace(path) + return [p.name for p in resolved.iterdir()] + + def get_working_directory(self, session_id: str) -> str: + return str(self.workspace_root / session_id) + + def resolve_path(self, path_or_id: str, session_id: str) -> str: + if not path_or_id.startswith("upload://"): + raise PermissionError("Only uploaded files are accessible") + return self._resolve_upload_id(path_or_id, session_id) + + def _resolve_upload_id(self, path_or_id: str, session_id: str) -> str: + file_id = path_or_id.removeprefix("upload://") + return str(self.workspace_root / session_id / "uploads" / file_id) + + def _ensure_in_workspace(self, path: str) -> Path: + p = Path(path).resolve() + root = self.workspace_root.resolve() + try: + p.relative_to(root) + except ValueError: + raise PermissionError("Path outside workspace") + return p diff --git a/backend/app/hands/__init__.py b/backend/app/hands/__init__.py new file mode 100644 index 000000000..a48442a51 --- /dev/null +++ b/backend/app/hands/__init__.py @@ -0,0 +1,46 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from app.hands.capabilities import BrainCapabilities, detect_capabilities +from app.hands.cluster_config import ( + ClusterEndpointConfig, + HandsClusterConfigError, + HandsClusterRoutingConfig, + load_hands_cluster_config, +) +from app.hands.cluster_interface import IHandsCluster +from app.hands.environment_hands import EnvironmentHands +from app.hands.full_hands import FullHands +from app.hands.http_hands_cluster import HttpHandsCluster +from app.hands.interface import IHands +from app.hands.remote_hands import RemoteHands +from app.hands.routed_hands_cluster import RoutedHandsCluster +from app.hands.sandbox_hands import SandboxHands + +__all__ = [ + "BrainCapabilities", + "ClusterEndpointConfig", + "EnvironmentHands", + "FullHands", + "HandsClusterConfigError", + "HandsClusterRoutingConfig", + "HttpHandsCluster", + "IHandsCluster", + "IHands", + "RemoteHands", + "RoutedHandsCluster", + "SandboxHands", + "detect_capabilities", + "load_hands_cluster_config", +] diff --git a/backend/app/hands/capabilities.py b/backend/app/hands/capabilities.py new file mode 100644 index 000000000..fc63ee4cc --- /dev/null +++ b/backend/app/hands/capabilities.py @@ -0,0 +1,236 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +""" +BrainCapabilities — Brain capability set, determined by deployment env. Everything revolves around what the Brain can operate. + +Hand Types (capability dimensions — what the Brain can reach and control): +- filesystem: operate local files (scope: full | workspace_only | none) +- terminal: execute shell commands +- browser: control browser (CDP) +- mcp: use MCP tool protocol (all | allowlist) + +Design principles: +- Brain on local/cloud VM -> full capabilities (extensible: smart home, router, car, etc.) +- Brain in sandbox/Docker -> limited capabilities +- Channel only affects message display format (Markdown/plain/Block Kit), does not determine Brain capabilities +""" + +import logging +import os +import shutil +from dataclasses import dataclass, field +from pathlib import Path + +from app.component.environment import env + +logger = logging.getLogger("hands.capabilities") + +# Deployment determines Brain capabilities +DEPLOYMENT_FULL = ("local", "cloud_vm", "") # full capabilities +DEPLOYMENT_SANDBOX = ("sandbox", "docker", "container") # limited capabilities + + +def _is_running_in_docker() -> bool: + """Detect if Brain runs inside Docker/container.""" + if Path("/.dockerenv").exists(): + return True + try: + cgroup = Path("/proc/1/cgroup").read_text() + return ( + "docker" in cgroup + or "containerd" in cgroup + or "kubepods" in cgroup + ) + except (OSError, FileNotFoundError): + return False + + +def _probe_cdp_browser() -> bool: + """Check if CDP browser is configured/available.""" + if os.environ.get("EIGENT_CDP_URL"): + return True + cdp_json = Path.home() / ".eigent" / "cdp.json" + if cdp_json.exists(): + return True + # Electron persists CDP pool here; if present, browser capability is likely available. + cdp_pool = Path.home() / ".eigent" / "cdp-browsers.json" + return cdp_pool.exists() + + +def _is_electron_runtime() -> bool: + """Detect whether Brain is launched by Electron desktop host.""" + return env("EIGENT_RUNTIME", "").lower().strip() == "electron" + + +def _can_launch_local_cdp_browser() -> bool: + """Check if local runtime can provision a CDP browser on demand.""" + if os.environ.get("EIGENT_BRAIN_LAUNCH_BROWSER", "true").lower() in ( + "false", + "0", + "no", + ): + return False + try: + from app.utils.browser_launcher import _find_chrome_executable + + return _find_chrome_executable() is not None + except Exception as e: + logger.debug(f"Could not probe local browser executable: {e}") + return False + + +@dataclass +class BrainCapabilities: + """ + Brain capability set (detected + config), determined at startup, global singleton. + + Each field maps to a Hand Type: what the Brain can operate. + """ + + has_terminal: bool = True + """terminal hand: can execute shell""" + + has_browser: bool = False + """browser hand: can control CDP browser""" + + filesystem_scope: str = "full" + """filesystem hand: full | workspace_only | none""" + + mcp_mode: str = "all" + """mcp hand: all | allowlist""" + + mcp_allowlist: list[str] = field(default_factory=list) + """used when mcp_mode=allowlist""" + + workspace_root: str = "~/.eigent/workspace" + """workspace root path""" + + deployment_type: str = "local" + """deployment type (for logging): local | cloud_vm | sandbox | docker""" + + @property + def mode(self) -> str: + """capability tier: full | sandbox — for IHands.mode compatibility""" + return "full" if self._is_full else "sandbox" + + @property + def _is_full(self) -> bool: + return self.filesystem_scope == "full" and self.has_terminal + + +def detect_capabilities(config: dict | None = None) -> BrainCapabilities: + """ + Detect Brain capabilities, two-layer decision: + 1. Deployment env: EIGENT_DEPLOYMENT_TYPE / Docker auto-detect + 2. Env var overrides: EIGENT_HANDS_* + """ + cfg = config or {} + + # 1. Deployment env determines base capabilities + deployment = env("EIGENT_DEPLOYMENT_TYPE") or "" + deployment = deployment.lower().strip() + + if deployment in DEPLOYMENT_FULL: + # local/cloud VM -> full capabilities + in_docker = _is_running_in_docker() + if in_docker: + logger.info("Brain running in Docker, using limited capabilities") + deployment = "docker" + caps = BrainCapabilities( + has_terminal=shutil.which("bash") is not None, + has_browser=False, + filesystem_scope="workspace_only", + mcp_mode="all", # MCP available in all deployment modes + workspace_root=env("EIGENT_WORKSPACE", "~/.eigent/workspace"), + deployment_type="docker", + ) + else: + # local/desktop: browser hand when CDP is configured/reachable, + # Electron host is present, or local browser can be provisioned. + has_browser = _probe_cdp_browser() + if not has_browser and _is_electron_runtime(): + has_browser = True + if not has_browser: + has_browser = _can_launch_local_cdp_browser() + if not has_browser: + logger.warning( + "Browser capability disabled: no CDP config, " + "not running under Electron host, and no launchable browser found." + ) + caps = BrainCapabilities( + has_terminal=shutil.which("bash") is not None, + has_browser=has_browser, + filesystem_scope="full", + mcp_mode="all", + workspace_root=env("EIGENT_WORKSPACE", "~/.eigent/workspace"), + deployment_type="cloud_vm" + if deployment == "cloud_vm" + else "local", + ) + else: + # sandbox / docker / container -> limited capabilities + caps = BrainCapabilities( + has_terminal=shutil.which("bash") is not None, + has_browser=False, + filesystem_scope="workspace_only", + mcp_mode="all", # MCP available in all deployment modes + workspace_root=env("EIGENT_WORKSPACE", "~/.eigent/workspace"), + deployment_type=deployment or "sandbox", + ) + + # 2. Env var overrides + if env("EIGENT_HANDS_TERMINAL") is not None: + caps.has_terminal = env("EIGENT_HANDS_TERMINAL", "true").lower() in ( + "1", + "true", + "yes", + ) + if env("EIGENT_HANDS_BROWSER") is not None: + caps.has_browser = env("EIGENT_HANDS_BROWSER", "false").lower() in ( + "1", + "true", + "yes", + ) + if env("EIGENT_HANDS_FILESYSTEM") is not None: + caps.filesystem_scope = env("EIGENT_HANDS_FILESYSTEM", "full") + if env("EIGENT_HANDS_MCP") is not None: + caps.mcp_mode = env("EIGENT_HANDS_MCP", "all") + if env("EIGENT_CDP_URL"): + caps.has_browser = True + + # 3. Config file overrides + if "terminal" in cfg: + caps.has_terminal = bool(cfg["terminal"]) + if "browser" in cfg: + caps.has_browser = bool(cfg["browser"]) + if "filesystem" in cfg: + caps.filesystem_scope = str(cfg["filesystem"]) + if "mcp" in cfg: + caps.mcp_mode = str(cfg["mcp"]) + if "mcp_allowlist" in cfg: + caps.mcp_allowlist = list(cfg["mcp_allowlist"]) + + logger.info( + "BrainCapabilities detected", + extra={ + "deployment": caps.deployment_type, + "mode": caps.mode, + "terminal": caps.has_terminal, + "browser": caps.has_browser, + "filesystem": caps.filesystem_scope, + "mcp": caps.mcp_mode, + }, + ) + return caps diff --git a/backend/app/hands/cluster_config.py b/backend/app/hands/cluster_config.py new file mode 100644 index 000000000..d1dc2505b --- /dev/null +++ b/backend/app/hands/cluster_config.py @@ -0,0 +1,321 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from __future__ import annotations + +import tomllib +from collections.abc import Callable +from dataclasses import dataclass +from pathlib import Path +from typing import Any + + +class HandsClusterConfigError(ValueError): + """Raised when cluster config file is invalid.""" + + +@dataclass(frozen=True, slots=True) +class ClusterEndpointConfig: + name: str + base_url: str + timeout_seconds: float + verify_tls: bool + acquire_path: str + release_path: str + health_path: str + auth_token: str | None + + +@dataclass(frozen=True, slots=True) +class HandsClusterRoutingConfig: + source_path: str + route_to_cluster: dict[str, ClusterEndpointConfig] + + +def load_hands_cluster_config( + config_file: str, + read_env: Callable[[str], str | None] | None = None, +) -> HandsClusterRoutingConfig: + env_reader = read_env or _default_read_env + source_path = _resolve_config_path(config_file) + data = _read_toml(source_path) + + defaults = _read_defaults(data.get("defaults")) + clusters = _read_clusters(data.get("clusters"), defaults, env_reader) + routes = _read_routes(data.get("routes"), clusters) + + return HandsClusterRoutingConfig( + source_path=str(source_path), + route_to_cluster=routes, + ) + + +def _default_read_env(name: str) -> str | None: + from app.component.environment import env + + return env(name) + + +def _resolve_config_path(config_file: str) -> Path: + raw = (config_file or "").strip() + if not raw: + raise HandsClusterConfigError("config file path is empty") + path = Path(raw).expanduser() + if not path.is_absolute(): + path = Path.cwd() / path + if not path.exists(): + raise HandsClusterConfigError( + f"cluster config file does not exist: {path}" + ) + if not path.is_file(): + raise HandsClusterConfigError( + f"cluster config path is not a file: {path}" + ) + return path + + +def _read_toml(path: Path) -> dict[str, Any]: + try: + with path.open("rb") as handle: + parsed = tomllib.load(handle) + except tomllib.TOMLDecodeError as exc: + raise HandsClusterConfigError( + f"invalid TOML in cluster config: {path}: {exc}" + ) from exc + except OSError as exc: + raise HandsClusterConfigError( + f"unable to read cluster config file: {path}: {exc}" + ) from exc + if not isinstance(parsed, dict): + raise HandsClusterConfigError( + f"cluster config root must be an object: {path}" + ) + return parsed + + +def _read_defaults(raw: Any) -> dict[str, Any]: + if raw is None: + raw = {} + if not isinstance(raw, dict): + raise HandsClusterConfigError("[defaults] must be a TOML table/object") + return { + "timeout_seconds": _as_float( + raw.get("timeout_seconds"), "defaults.timeout_seconds", 10.0 + ), + "verify_tls": _as_bool( + raw.get("verify_tls"), "defaults.verify_tls", True + ), + "acquire_path": _as_path_segment( + raw.get("acquire_path"), "defaults.acquire_path", "/acquire" + ), + "release_path": _as_path_segment( + raw.get("release_path"), "defaults.release_path", "/release" + ), + "health_path": _as_path_segment( + raw.get("health_path"), "defaults.health_path", "/health" + ), + "auth_token": _as_optional_str(raw.get("auth_token")), + "auth_token_env": _as_optional_str(raw.get("auth_token_env")), + } + + +def _read_clusters( + raw: Any, + defaults: dict[str, Any], + read_env: Callable[[str], str | None], +) -> dict[str, ClusterEndpointConfig]: + if not isinstance(raw, dict) or not raw: + raise HandsClusterConfigError( + "[clusters] must be a non-empty TOML table/object" + ) + + clusters: dict[str, ClusterEndpointConfig] = {} + for raw_name, item in raw.items(): + if not isinstance(raw_name, str) or not raw_name.strip(): + raise HandsClusterConfigError( + "cluster name must be a non-empty string" + ) + name = raw_name.strip().lower() + if not isinstance(item, dict): + raise HandsClusterConfigError( + f"[clusters.{raw_name}] must be a TOML table/object" + ) + + base_url = _as_optional_str(item.get("base_url")) or _as_optional_str( + item.get("api") + ) + if not base_url: + raise HandsClusterConfigError( + f"[clusters.{raw_name}] requires base_url (or api)" + ) + + timeout_seconds = _as_float( + item.get("timeout_seconds"), + f"clusters.{raw_name}.timeout_seconds", + defaults["timeout_seconds"], + ) + verify_tls = _as_bool( + item.get("verify_tls"), + f"clusters.{raw_name}.verify_tls", + defaults["verify_tls"], + ) + acquire_path = _as_path_segment( + item.get("acquire_path"), + f"clusters.{raw_name}.acquire_path", + defaults["acquire_path"], + ) + release_path = _as_path_segment( + item.get("release_path"), + f"clusters.{raw_name}.release_path", + defaults["release_path"], + ) + health_path = _as_path_segment( + item.get("health_path"), + f"clusters.{raw_name}.health_path", + defaults["health_path"], + ) + auth_token = _resolve_auth_token( + item=item, + defaults=defaults, + read_env=read_env, + ) + + clusters[name] = ClusterEndpointConfig( + name=name, + base_url=base_url.strip(), + timeout_seconds=timeout_seconds, + verify_tls=verify_tls, + acquire_path=acquire_path, + release_path=release_path, + health_path=health_path, + auth_token=auth_token, + ) + return clusters + + +def _read_routes( + raw: Any, + clusters: dict[str, ClusterEndpointConfig], +) -> dict[str, ClusterEndpointConfig]: + if raw is None: + if len(clusters) == 1: + only = next(iter(clusters.values())) + return {"default": only} + return dict(clusters) + + if not isinstance(raw, dict): + raise HandsClusterConfigError("[routes] must be a TOML table/object") + + route_to_cluster: dict[str, ClusterEndpointConfig] = {} + for raw_route, raw_cluster in raw.items(): + if not isinstance(raw_route, str) or not raw_route.strip(): + raise HandsClusterConfigError( + "route key must be a non-empty string" + ) + if not isinstance(raw_cluster, str) or not raw_cluster.strip(): + raise HandsClusterConfigError( + f"route '{raw_route}' target must be a non-empty string" + ) + + route_key = _normalize_route_key(raw_route) + cluster_name = raw_cluster.strip().lower() + cluster = clusters.get(cluster_name) + if cluster is None: + raise HandsClusterConfigError( + f"route '{raw_route}' references unknown cluster '{raw_cluster}'" + ) + route_to_cluster[route_key] = cluster + + if not route_to_cluster: + raise HandsClusterConfigError("[routes] must not be empty") + return route_to_cluster + + +def _resolve_auth_token( + item: dict[str, Any], + defaults: dict[str, Any], + read_env: Callable[[str], str | None], +) -> str | None: + direct = _as_optional_str(item.get("auth_token")) + if direct: + return direct + + env_name = _as_optional_str(item.get("auth_token_env")) + if env_name: + return _as_optional_str(read_env(env_name)) + + default_direct = _as_optional_str(defaults.get("auth_token")) + if default_direct: + return default_direct + + default_env_name = _as_optional_str(defaults.get("auth_token_env")) + if default_env_name: + return _as_optional_str(read_env(default_env_name)) + + return None + + +def _normalize_route_key(raw: str) -> str: + key = raw.strip().lower() + if key in ("*", "fallback"): + return "default" + return key + + +def _as_optional_str(value: Any) -> str | None: + if value is None: + return None + if isinstance(value, str): + s = value.strip() + return s if s else None + return str(value).strip() or None + + +def _as_float(value: Any, field: str, default: float) -> float: + if value is None: + return default + try: + parsed = float(value) + except (TypeError, ValueError) as exc: + raise HandsClusterConfigError( + f"{field} must be a number, got {value!r}" + ) from exc + if parsed <= 0: + raise HandsClusterConfigError(f"{field} must be > 0, got {parsed!r}") + return parsed + + +def _as_bool(value: Any, field: str, default: bool) -> bool: + if value is None: + return default + if isinstance(value, bool): + return value + if isinstance(value, str): + lowered = value.strip().lower() + if lowered in ("1", "true", "yes", "on"): + return True + if lowered in ("0", "false", "no", "off"): + return False + raise HandsClusterConfigError(f"{field} must be a boolean, got {value!r}") + + +def _as_path_segment(value: Any, field: str, default: str) -> str: + if value is None: + return default + if not isinstance(value, str): + raise HandsClusterConfigError(f"{field} must be a string path") + s = value.strip() + if not s: + raise HandsClusterConfigError(f"{field} must not be empty") + return s if s.startswith("/") else f"/{s}" diff --git a/backend/app/hands/cluster_interface.py b/backend/app/hands/cluster_interface.py new file mode 100644 index 000000000..de53d45e7 --- /dev/null +++ b/backend/app/hands/cluster_interface.py @@ -0,0 +1,41 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from abc import ABC, abstractmethod +from typing import Any + + +class IHandsCluster(ABC): + """Remote Hands worker cluster interface placeholder.""" + + @abstractmethod + async def acquire( + self, + resource_type: str, + session_id: str, + tenant_id: str = "default", + **kwargs, + ) -> dict[str, Any]: + """Acquire a worker resource and return its endpoint metadata.""" + ... + + @abstractmethod + async def release(self, session_id: str) -> None: + """Release the worker resource bound to the session.""" + ... + + @abstractmethod + async def health(self) -> dict[str, Any]: + """Return cluster health and pool availability.""" + ... diff --git a/backend/app/hands/environment_hands.py b/backend/app/hands/environment_hands.py new file mode 100644 index 000000000..d6d420cc5 --- /dev/null +++ b/backend/app/hands/environment_hands.py @@ -0,0 +1,110 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +""" +EnvironmentHands — IHands implementation driven by BrainCapabilities. + +Brain deployment env determines capability set; all Channels share one instance. +Channel only handles message display format adaptation. +""" + +from pathlib import Path + +from app.hands.capabilities import BrainCapabilities +from app.hands.interface import IHands + + +class EnvironmentHands(IHands): + """ + IHands implementation based on BrainCapabilities. + Initialized at Brain startup from deployment env; globally reused. + """ + + def __init__(self, caps: BrainCapabilities) -> None: + self._caps = caps + self.workspace_root = Path(caps.workspace_root).expanduser() + + @property + def mode(self) -> str: + """Capability tier: full | sandbox""" + return self._caps.mode + + def can_execute_terminal(self) -> bool: + return self._caps.has_terminal + + def can_access_filesystem(self, path: str) -> bool: + if self._caps.filesystem_scope == "full": + try: + resolved = Path(path).expanduser().resolve() + home = Path.home() + workspace = self.workspace_root.resolve() + try: + resolved.relative_to(home) + return True + except ValueError: + pass + try: + resolved.relative_to(workspace) + return True + except ValueError: + return False + except (OSError, RuntimeError): + return False + if self._caps.filesystem_scope == "workspace_only": + try: + resolved = Path(path).expanduser().resolve() + workspace = self.workspace_root.resolve() + resolved.relative_to(workspace) + return True + except ValueError: + return False + except (OSError, RuntimeError): + return False + return False # none + + def can_use_mcp(self, mcp_name: str) -> bool: + if self._caps.mcp_mode == "all": + return True + return mcp_name in self._caps.mcp_allowlist + + def can_use_browser(self) -> bool: + return self._caps.has_browser + + def get_working_directory( + self, session_id: str, tenant_id: str = "default" + ) -> str: + return str(self.workspace_root / session_id) + + def get_capability_manifest(self) -> dict[str, str | bool | list[str]]: + return { + "mode": self.mode, + "terminal": self._caps.has_terminal, + "browser": self._caps.has_browser, + "filesystem": self._caps.filesystem_scope, + "mcp": self._caps.mcp_mode, + "mcp_allowlist": list(self._caps.mcp_allowlist), + "deployment": self._caps.deployment_type, + "workspace_root": str(self.workspace_root), + } + + def acquire_resource( + self, resource_type: str, session_id: str, **kwargs + ) -> str: + if resource_type == "browser": + port = kwargs.get("port", 9222) + return f"http://localhost:{port}" + raise ValueError(f"Unknown resource type: {resource_type}") + + def release_resource(self, resource_type: str, session_id: str) -> None: + return None diff --git a/backend/app/hands/full_hands.py b/backend/app/hands/full_hands.py new file mode 100644 index 000000000..c5af52b90 --- /dev/null +++ b/backend/app/hands/full_hands.py @@ -0,0 +1,84 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from pathlib import Path + +from app.hands.interface import IHands + + +class FullHands(IHands): + """Full capabilities: terminal, filesystem, browser, MCP all available""" + + def __init__(self, workspace_root: str = "~/.eigent/workspace") -> None: + self.workspace_root = Path(workspace_root).expanduser() + + @property + def mode(self) -> str: + return "full" + + def can_execute_terminal(self) -> bool: + return True + + def can_access_filesystem(self, path: str) -> bool: + # Allow ~/ and workspace + try: + resolved = Path(path).expanduser().resolve() + home = Path.home() + workspace = self.workspace_root.resolve() + try: + resolved.relative_to(home) + return True + except ValueError: + pass + try: + resolved.relative_to(workspace) + return True + except ValueError: + return False + except (OSError, RuntimeError): + return False + + def can_use_mcp(self, mcp_name: str) -> bool: + return True + + def can_use_browser(self) -> bool: + return True + + def get_working_directory( + self, session_id: str, tenant_id: str = "default" + ) -> str: + return str(self.workspace_root / session_id) + + def get_capability_manifest(self) -> dict[str, str | bool | list[str]]: + return { + "mode": self.mode, + "terminal": True, + "browser": True, + "filesystem": "full", + "mcp": "all", + "mcp_allowlist": [], + "deployment": "override_full", + "workspace_root": str(self.workspace_root), + } + + def acquire_resource( + self, resource_type: str, session_id: str, **kwargs + ) -> str: + if resource_type == "browser": + port = kwargs.get("port", 9222) + return f"http://localhost:{port}" + raise ValueError(f"Unknown resource type: {resource_type}") + + def release_resource(self, resource_type: str, session_id: str) -> None: + return None diff --git a/backend/app/hands/http_hands_cluster.py b/backend/app/hands/http_hands_cluster.py new file mode 100644 index 000000000..5ae92d642 --- /dev/null +++ b/backend/app/hands/http_hands_cluster.py @@ -0,0 +1,154 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +import logging +from typing import Any + +import httpx + +from app.hands.cluster_interface import IHandsCluster + +logger = logging.getLogger("hands.cluster.http") + + +class HttpHandsCluster(IHandsCluster): + """HTTP-backed Hands cluster client.""" + + def __init__( + self, + base_url: str, + timeout_seconds: float = 10.0, + auth_token: str | None = None, + acquire_path: str = "/acquire", + release_path: str = "/release", + health_path: str = "/health", + verify_tls: bool = True, + transport: httpx.AsyncBaseTransport | None = None, + ) -> None: + normalized = base_url.strip().rstrip("/") + if not normalized: + raise ValueError("base_url must not be empty") + self._base_url = normalized + self._timeout_seconds = timeout_seconds + self._auth_token = auth_token + self._acquire_path = acquire_path + self._release_path = release_path + self._health_path = health_path + self._verify_tls = verify_tls + self._transport = transport + + async def acquire( + self, + resource_type: str, + session_id: str, + tenant_id: str = "default", + **kwargs, + ) -> dict[str, Any]: + payload: dict[str, Any] = { + "type": resource_type, + "resource_type": resource_type, + "session_id": session_id, + "tenant_id": tenant_id, + } + payload.update(kwargs) + body = await self._request_json( + method="POST", + url=self._build_url(self._acquire_path), + payload=payload, + ) + data = self._unwrap_response(body) + endpoint = ( + data.get("endpoint") or data.get("cdp_url") or data.get("url") + ) + if not endpoint: + raise RuntimeError( + "Hands cluster acquire response missing endpoint" + ) + data["endpoint"] = str(endpoint) + return data + + async def release(self, session_id: str) -> None: + payload = {"session_id": session_id} + try: + await self._request_json( + method="POST", + url=self._build_url(self._release_path), + payload=payload, + ) + except httpx.HTTPStatusError as exc: + if exc.response.status_code == 404: + logger.warning( + "Hands cluster release returned 404 for session_id=%s", + session_id, + ) + return + raise + + async def health(self) -> dict[str, Any]: + body = await self._request_json( + method="GET", + url=self._build_url(self._health_path), + payload=None, + ) + return self._unwrap_response(body) + + def _build_url(self, path: str) -> str: + p = path.strip() + if p.startswith("http://") or p.startswith("https://"): + return p + if not p.startswith("/"): + p = f"/{p}" + return f"{self._base_url}{p}" + + def _headers(self) -> dict[str, str]: + headers = {"Accept": "application/json"} + if self._auth_token: + headers["Authorization"] = f"Bearer {self._auth_token}" + return headers + + async def _request_json( + self, + method: str, + url: str, + payload: dict[str, Any] | None, + ) -> dict[str, Any]: + request_kwargs: dict[str, Any] = {"headers": self._headers()} + if payload is not None: + request_kwargs["json"] = payload + + async with httpx.AsyncClient( + timeout=self._timeout_seconds, + verify=self._verify_tls, + transport=self._transport, + ) as client: + response = await client.request(method, url, **request_kwargs) + response.raise_for_status() + + if not response.content: + return {} + + try: + body = response.json() + except ValueError: + return {} + if isinstance(body, dict): + return body + return {"result": body} + + def _unwrap_response(self, body: dict[str, Any]) -> dict[str, Any]: + if isinstance(body.get("data"), dict): + return dict(body["data"]) + if isinstance(body.get("result"), dict): + return dict(body["result"]) + return dict(body) diff --git a/backend/app/hands/interface.py b/backend/app/hands/interface.py new file mode 100644 index 000000000..77cdf7d5e --- /dev/null +++ b/backend/app/hands/interface.py @@ -0,0 +1,75 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from abc import ABC, abstractmethod +from typing import Any + + +class IHands(ABC): + """ + Brain capability interface — what the Brain can operate (Hand Types). + + mode: full | sandbox (capability tier) + can_*: whether each Hand Type is available + """ + + @property + @abstractmethod + def mode(self) -> str: + """Capability tier: full | sandbox""" + pass + + @abstractmethod + def can_execute_terminal(self) -> bool: + """terminal hand: can execute shell""" + pass + + @abstractmethod + def can_access_filesystem(self, path: str) -> bool: + """filesystem hand: whether path is within accessible scope""" + pass + + @abstractmethod + def can_use_mcp(self, mcp_name: str) -> bool: + """mcp hand: whether this MCP is available""" + pass + + @abstractmethod + def can_use_browser(self) -> bool: + """browser hand: can control CDP browser""" + pass + + @abstractmethod + def get_working_directory( + self, session_id: str, tenant_id: str = "default" + ) -> str: + """Return session working directory""" + pass + + @abstractmethod + def get_capability_manifest(self) -> dict[str, Any]: + """Return a serializable capability manifest for clients.""" + pass + + @abstractmethod + def acquire_resource( + self, resource_type: str, session_id: str, **kwargs + ) -> str: + """Acquire a resource endpoint for the requested hand type.""" + pass + + @abstractmethod + def release_resource(self, resource_type: str, session_id: str) -> None: + """Release a previously acquired resource.""" + pass diff --git a/backend/app/hands/remote_hands.py b/backend/app/hands/remote_hands.py new file mode 100644 index 000000000..37834b597 --- /dev/null +++ b/backend/app/hands/remote_hands.py @@ -0,0 +1,136 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +import asyncio +from pathlib import Path +from typing import Any + +from app.hands.cluster_interface import IHandsCluster +from app.hands.interface import IHands + + +class RemoteHands(IHands): + """ + Remote/cluster-backed Hands placeholder. + + This class is intentionally minimal for this refactor stage: + - exposes a concrete IHands implementation for remote cluster mode + - supports browser resource acquire/release via IHandsCluster when provided + - keeps safe local fallback endpoint when cluster is not wired yet + """ + + def __init__( + self, + cluster: IHandsCluster | None = None, + workspace_root: str = "~/.eigent/workspace", + ) -> None: + self._cluster = cluster + self.workspace_root = Path(workspace_root).expanduser() + self._acquired: dict[str, dict[str, Any]] = {} + + @property + def mode(self) -> str: + return "full" + + def can_execute_terminal(self) -> bool: + return True + + def can_access_filesystem(self, path: str) -> bool: + try: + resolved = Path(path).expanduser().resolve() + workspace = self.workspace_root.resolve() + resolved.relative_to(workspace) + return True + except ValueError: + return False + except (OSError, RuntimeError): + return False + + def can_use_mcp(self, mcp_name: str) -> bool: + _ = mcp_name + return True + + def can_use_browser(self) -> bool: + return True + + def get_working_directory( + self, session_id: str, tenant_id: str = "default" + ) -> str: + _ = tenant_id + return str(self.workspace_root / session_id) + + def get_capability_manifest(self) -> dict[str, str | bool | list[str]]: + return { + "mode": self.mode, + "terminal": True, + "browser": True, + "filesystem": "workspace_only", + "mcp": "all", + "mcp_allowlist": [], + "deployment": "remote_cluster", + "workspace_root": str(self.workspace_root), + } + + def acquire_resource( + self, resource_type: str, session_id: str, **kwargs + ) -> str: + if self._cluster is None: + if resource_type == "browser": + port = int(kwargs.get("port", 9222)) + return f"http://localhost:{port}" + raise ValueError( + f"Unknown resource type without cluster configured: {resource_type}" + ) + + # IHands interface is sync; bridge to async cluster API here. + try: + _ = asyncio.get_running_loop() + except RuntimeError: + pass + else: + raise RuntimeError( + "Cannot synchronously acquire remote resource while event loop is running" + ) + + acquired = asyncio.run( + self._cluster.acquire( + resource_type=resource_type, + session_id=session_id, + **kwargs, + ) + ) + self._acquired[session_id] = acquired + endpoint = acquired.get("endpoint") + if not endpoint: + raise RuntimeError( + "Remote cluster acquire() did not return endpoint" + ) + return str(endpoint) + + def release_resource(self, resource_type: str, session_id: str) -> None: + _ = resource_type + self._acquired.pop(session_id, None) + if self._cluster is None: + return + + # Best-effort release for sync interface. + try: + asyncio.run(self._cluster.release(session_id)) + except RuntimeError: + # If called from a running loop, schedule best-effort release. + try: + loop = asyncio.get_running_loop() + loop.create_task(self._cluster.release(session_id)) + except RuntimeError: + return diff --git a/backend/app/hands/routed_hands_cluster.py b/backend/app/hands/routed_hands_cluster.py new file mode 100644 index 000000000..28b8a7184 --- /dev/null +++ b/backend/app/hands/routed_hands_cluster.py @@ -0,0 +1,122 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +import logging +from typing import Any + +from app.hands.cluster_interface import IHandsCluster + +logger = logging.getLogger("hands.cluster.routed") + + +class RoutedHandsCluster(IHandsCluster): + """ + Route resource requests to different cluster clients by resource type. + + Example keys: + - "browser" + - "terminal" + - "model" + - "default" + """ + + def __init__( + self, + clusters: dict[str, IHandsCluster], + default_key: str = "default", + ) -> None: + normalized = { + k.strip().lower(): v + for k, v in clusters.items() + if k and isinstance(k, str) + } + if not normalized: + raise ValueError("clusters must not be empty") + self._clusters = normalized + self._default_key = ( + default_key if default_key in self._clusters else None + ) + self._session_cluster_key: dict[str, str] = {} + + async def acquire( + self, + resource_type: str, + session_id: str, + tenant_id: str = "default", + **kwargs, + ) -> dict[str, Any]: + key = self._select_cluster_key(resource_type) + cluster = self._clusters[key] + acquired = await cluster.acquire( + resource_type=resource_type, + session_id=session_id, + tenant_id=tenant_id, + **kwargs, + ) + self._session_cluster_key[session_id] = key + if "cluster_key" not in acquired: + acquired["cluster_key"] = key + return acquired + + async def release(self, session_id: str) -> None: + key = self._session_cluster_key.pop(session_id, None) + if key is not None and key in self._clusters: + await self._clusters[key].release(session_id) + return + + if self._default_key is not None: + await self._clusters[self._default_key].release(session_id) + return + + last_error: Exception | None = None + for cluster_key, cluster in self._clusters.items(): + try: + await cluster.release(session_id) + return + except Exception as exc: # pragma: no cover - best effort log path + last_error = exc + logger.warning( + "Release attempt failed on cluster key %s for session_id=%s: %s", + cluster_key, + session_id, + exc, + ) + if last_error is not None: + raise last_error + + async def health(self) -> dict[str, Any]: + clusters_health: dict[str, Any] = {} + for key, cluster in self._clusters.items(): + try: + clusters_health[key] = await cluster.health() + except Exception as exc: + clusters_health[key] = {"error": str(exc)} + return { + "mode": "routed", + "default_key": self._default_key, + "clusters": clusters_health, + } + + def _select_cluster_key(self, resource_type: str) -> str: + wanted = (resource_type or "").strip().lower() + if wanted in self._clusters: + return wanted + if self._default_key is not None: + return self._default_key + if len(self._clusters) == 1: + return next(iter(self._clusters.keys())) + raise ValueError( + "No cluster configured for " + f"resource_type={resource_type!r} and no default cluster" + ) diff --git a/backend/app/hands/sandbox_hands.py b/backend/app/hands/sandbox_hands.py new file mode 100644 index 000000000..1e5460815 --- /dev/null +++ b/backend/app/hands/sandbox_hands.py @@ -0,0 +1,94 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from pathlib import Path + +from app.hands.interface import IHands + +# Terminal command allowlist for sandbox +SANDBOX_TERMINAL_ALLOWLIST = frozenset( + {"ls", "cat", "python", "node", "pwd", "echo"} +) + + +class SandboxHands(IHands): + """Limited capabilities: workspace only, MCP allowed, no browser""" + + def __init__( + self, + workspace_root: str = "~/.eigent/workspace", + allowed_mcps: frozenset[str] | None = None, + ) -> None: + self.workspace_root = Path(workspace_root).expanduser() + # None = allow all MCP (default for debug override); frozenset() = allow none + self.allowed_mcps = allowed_mcps + + @property + def mode(self) -> str: + return "sandbox" + + def can_execute_terminal(self) -> bool: + return ( + True # Enabled; toolkit layer validates against command allowlist + ) + + def can_access_filesystem(self, path: str) -> bool: + try: + resolved = Path(path).expanduser().resolve() + workspace = self.workspace_root.resolve() + resolved.relative_to(workspace) + return True + except ValueError: + return False + except (OSError, RuntimeError): + return False + + def can_use_mcp(self, mcp_name: str) -> bool: + if self.allowed_mcps is None: + return True # MCP available in all cases + if not self.allowed_mcps: + return False + return mcp_name in self.allowed_mcps + + def can_use_browser(self) -> bool: + return False # No browser hand in limited mode + + def get_working_directory( + self, session_id: str, tenant_id: str = "default" + ) -> str: + return str(self.workspace_root / session_id) + + def get_capability_manifest(self) -> dict[str, str | bool | list[str]]: + return { + "mode": self.mode, + "terminal": self.can_execute_terminal(), + "browser": False, + "filesystem": "workspace_only", + "mcp": "all" if self.allowed_mcps is None else "allowlist", + "mcp_allowlist": [] + if self.allowed_mcps is None + else list(self.allowed_mcps), + "deployment": "override_sandbox", + "workspace_root": str(self.workspace_root), + } + + def acquire_resource( + self, resource_type: str, session_id: str, **kwargs + ) -> str: + raise ValueError( + f"Resource type {resource_type!r} is not available in sandbox mode" + ) + + def release_resource(self, resource_type: str, session_id: str) -> None: + return None diff --git a/backend/app/hardware/__init__.py b/backend/app/hardware/__init__.py new file mode 100644 index 000000000..9bd7e8fef --- /dev/null +++ b/backend/app/hardware/__init__.py @@ -0,0 +1,18 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from app.hardware.interface import IHardwareBridge +from app.hardware.null_bridge import NullHardwareBridge + +__all__ = ["IHardwareBridge", "NullHardwareBridge"] diff --git a/backend/app/hardware/interface.py b/backend/app/hardware/interface.py new file mode 100644 index 000000000..e51f30219 --- /dev/null +++ b/backend/app/hardware/interface.py @@ -0,0 +1,55 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from abc import ABC, abstractmethod +from typing import Any + + +class IHardwareBridge(ABC): + """Hardware capability bridge. Only Desktop has implementation, others use NullBridge""" + + @abstractmethod + def get_cdp_browsers(self) -> list[dict]: + """Get available CDP browser list""" + pass + + @abstractmethod + def add_cdp_browser(self, browser_id: str, **kwargs: Any) -> dict: + """Add CDP browser""" + pass + + @abstractmethod + def remove_cdp_browser(self, browser_id: str) -> bool: + """Remove CDP browser""" + pass + + @abstractmethod + def create_webview(self, id: str, url: str) -> None: + """Create WebView (Electron)""" + pass + + @abstractmethod + def hide_webview(self, id: str) -> None: + """Hide WebView""" + pass + + @abstractmethod + def show_webview(self, id: str) -> None: + """Show WebView""" + pass + + @abstractmethod + def set_webview_size(self, id: str, size: dict) -> None: + """Set WebView size""" + pass diff --git a/backend/app/hardware/null_bridge.py b/backend/app/hardware/null_bridge.py new file mode 100644 index 000000000..df9c232d4 --- /dev/null +++ b/backend/app/hardware/null_bridge.py @@ -0,0 +1,42 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from typing import Any + +from app.hardware.interface import IHardwareBridge + + +class NullHardwareBridge(IHardwareBridge): + """Null implementation when no hardware. All methods return empty or no-op""" + + def get_cdp_browsers(self) -> list[dict]: + return [] + + def add_cdp_browser(self, browser_id: str, **kwargs: Any) -> dict: + raise NotImplementedError("CDP not available in this environment") + + def remove_cdp_browser(self, browser_id: str) -> bool: + return False + + def create_webview(self, id: str, url: str) -> None: + raise NotImplementedError("WebView not available in this environment") + + def hide_webview(self, id: str) -> None: + pass + + def show_webview(self, id: str) -> None: + pass + + def set_webview_size(self, id: str, size: dict) -> None: + pass diff --git a/backend/app/model/chat.py b/backend/app/model/chat.py index 8f13a5aa4..a0780f457 100644 --- a/backend/app/model/chat.py +++ b/backend/app/model/chat.py @@ -78,6 +78,9 @@ class Chat(BaseModel): search_config: dict[str, str] | None = None # User identifier for user-specific skill configurations user_id: str | None = None + # Direct server API base URL (for example http://localhost:3001/api/v1) + # used by standalone Brain to sync replay steps without Electron env injection. + server_url: str | None = None @field_validator("model_type") @classmethod diff --git a/backend/app/router.py b/backend/app/router.py index 2909eaae0..fc58f25d6 100644 --- a/backend/app/router.py +++ b/backend/app/router.py @@ -23,8 +23,12 @@ from app.controller import ( chat_controller, + file_controller, health_controller, + mcp_controller, + message_controller, model_controller, + skill_controller, task_controller, tool_controller, ) @@ -51,11 +55,31 @@ def register_routers(app: FastAPI, prefix: str = "") -> None: "tags": ["Health"], "description": "Health check endpoint for service readiness", }, + { + "router": file_controller.router, + "tags": ["Files"], + "description": "File upload for Web/Channel clients", + }, + { + "router": mcp_controller.router, + "tags": ["MCP"], + "description": "MCP config (list, install, remove, update)", + }, + { + "router": skill_controller.router, + "tags": ["Skills"], + "description": "Skills scan, write, read, delete", + }, { "router": chat_controller.router, "tags": ["chat"], "description": "Chat session management, improvements, and human interactions", }, + { + "router": message_controller.router, + "tags": ["Message Router"], + "description": "Phase 2 Message Router - /messages endpoint (prefix-aware)", + }, { "router": model_controller.router, "tags": ["model"], diff --git a/backend/app/router_layer/__init__.py b/backend/app/router_layer/__init__.py new file mode 100644 index 000000000..84962ae14 --- /dev/null +++ b/backend/app/router_layer/__init__.py @@ -0,0 +1,39 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +"""Message Router layer: Channel/Session binding, Hands selection.""" + +from app.router_layer.hands_resolver import ( + get_environment_hands, + get_hands_for_channel, + init_environment_hands, +) +from app.router_layer.interface import ( + InboundMessage, + IRouter, + OutboundMessage, +) +from app.router_layer.message_router import DefaultMessageRouter +from app.router_layer.middleware import ChannelSessionMiddleware + +__all__ = [ + "ChannelSessionMiddleware", + "DefaultMessageRouter", + "get_environment_hands", + "get_hands_for_channel", + "InboundMessage", + "init_environment_hands", + "IRouter", + "OutboundMessage", +] diff --git a/backend/app/router_layer/hands_resolver.py b/backend/app/router_layer/hands_resolver.py new file mode 100644 index 000000000..d72f3d739 --- /dev/null +++ b/backend/app/router_layer/hands_resolver.py @@ -0,0 +1,206 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +""" +Hands = Brain capabilities, driven by deployment env, not by Channel. + +- Brain on local/cloud VM -> full capabilities (extensible: smart home, router, car, etc.) +- Brain in sandbox/Docker -> limited capabilities +- Channel only affects message display format +- MCP is available in all deployment modes +""" + +import logging + +from app.component.environment import env +from app.hands import ( + FullHands, + HandsClusterConfigError, + HandsClusterRoutingConfig, + HttpHandsCluster, + IHands, + IHandsCluster, + RemoteHands, + RoutedHandsCluster, + SandboxHands, + load_hands_cluster_config, +) +from app.hands.capabilities import detect_capabilities +from app.hands.environment_hands import EnvironmentHands + +logger = logging.getLogger("hands.resolver") + +# Global EnvironmentHands singleton, initialized at startup +_environment_hands: IHands | None = None + + +def _is_truthy(raw: str | None) -> bool: + if raw is None: + return False + return raw.strip().lower() in ("1", "true", "yes", "on") + + +def _new_http_cluster( + cluster_api: str, + timeout_seconds: float, + verify_tls: bool, + auth_token: str | None, + acquire_path: str, + release_path: str, + health_path: str, +) -> HttpHandsCluster: + logger.info( + "Configured HttpHandsCluster", + extra={ + "cluster_api": cluster_api, + "acquire_path": acquire_path, + "release_path": release_path, + "health_path": health_path, + "verify_tls": verify_tls, + "has_auth_token": bool(auth_token), + "timeout_seconds": timeout_seconds, + }, + ) + return HttpHandsCluster( + base_url=cluster_api, + timeout_seconds=timeout_seconds, + verify_tls=verify_tls, + auth_token=auth_token, + acquire_path=acquire_path, + release_path=release_path, + health_path=health_path, + ) + + +def _build_remote_cluster() -> IHandsCluster | None: + config_file = env("EIGENT_HANDS_CLUSTER_CONFIG_FILE", "").strip() + if not config_file: + return None + + try: + routing = load_hands_cluster_config(config_file) + except HandsClusterConfigError as exc: + logger.warning( + "Failed to load hands cluster config file %r: %s", + config_file, + exc, + ) + return None + + logger.info( + "Loaded hands cluster config file", + extra={ + "config_file": routing.source_path, + "routes": sorted(routing.route_to_cluster.keys()), + }, + ) + return _build_cluster_from_routing(routing) + + +def _build_cluster_from_routing( + routing: HandsClusterRoutingConfig, +) -> IHandsCluster: + clusters_by_name: dict[str, IHandsCluster] = {} + route_clients: dict[str, IHandsCluster] = {} + + for route_key, endpoint in routing.route_to_cluster.items(): + client = clusters_by_name.get(endpoint.name) + if client is None: + client = _new_http_cluster( + endpoint.base_url, + timeout_seconds=endpoint.timeout_seconds, + verify_tls=endpoint.verify_tls, + auth_token=endpoint.auth_token, + acquire_path=endpoint.acquire_path, + release_path=endpoint.release_path, + health_path=endpoint.health_path, + ) + clusters_by_name[endpoint.name] = client + route_clients[route_key] = client + + if len(route_clients) == 1 and "default" in route_clients: + return route_clients["default"] + return RoutedHandsCluster(clusters=route_clients) + + +def _create_remote_hands(workspace_root: str) -> RemoteHands: + cluster = _build_remote_cluster() + if cluster is None: + logger.warning( + "RemoteHands enabled but EIGENT_HANDS_CLUSTER_CONFIG_FILE is missing/invalid; " + "browser resource acquisition will fallback to localhost endpoint" + ) + return RemoteHands(cluster=cluster, workspace_root=workspace_root) + + +def init_environment_hands(config: dict | None = None) -> IHands: + """Initialize global EnvironmentHands (capability set) at Brain startup""" + global _environment_hands + mode = env("EIGENT_HANDS_MODE", "").strip().lower() + remote_enabled = _is_truthy(env("EIGENT_HANDS_REMOTE", "false")) + + if mode == "remote" or remote_enabled: + workspace_root = env("EIGENT_WORKSPACE", "~/.eigent/workspace") + logger.info( + "Initializing RemoteHands from env switch", + extra={"mode": mode, "remote_enabled": remote_enabled}, + ) + _environment_hands = _create_remote_hands(workspace_root) + return _environment_hands + + caps = detect_capabilities(config) + _environment_hands = EnvironmentHands(caps) + return _environment_hands + + +def get_environment_hands() -> IHands: + """Return global EnvironmentHands, shared by all Channels. Auto-detect if not initialized.""" + global _environment_hands + if _environment_hands is None: + init_environment_hands() + return _environment_hands + + +def _reset_environment_hands_for_testing() -> None: + """Testing only: reset global Hands so it can be re-initialized with new env.""" + global _environment_hands + _environment_hands = None + + +def get_hands_for_channel( + _channel: str, + hands_override: str | None = None, + workspace_root: str | None = None, +) -> IHands: + """ + Return Hands (Brain capability) instance. Capabilities driven by deployment env; Channel not involved. + + - _channel: Kept for API compatibility; not used (Hands are env-driven per ADR-0006) + - hands_override: For debugging; force full/sandbox/remote + - workspace_root: Override workspace root (optional) + """ + root = workspace_root or env("EIGENT_WORKSPACE", "~/.eigent/workspace") + + if hands_override: + if hands_override in ("full", "sandbox", "remote"): + if hands_override == "remote": + return _create_remote_hands(root) + cls = {"full": FullHands, "sandbox": SandboxHands}[hands_override] + return cls(workspace_root=root) + logger.warning( + "Ignoring invalid X-Hands-Override: %r, expected full, sandbox or remote", + hands_override, + ) + + return get_environment_hands() diff --git a/backend/app/router_layer/interface.py b/backend/app/router_layer/interface.py new file mode 100644 index 000000000..3eac0c418 --- /dev/null +++ b/backend/app/router_layer/interface.py @@ -0,0 +1,72 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +"""IRouter interface and message types for Phase 2 Message Router.""" + +from abc import ABC, abstractmethod +from collections.abc import AsyncGenerator +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from fastapi import Request + + +@dataclass +class InboundMessage: + """Standardized inbound message format.""" + + session_id: str + channel: str # desktop | web | cli | whatsapp | telegram | slack | ... + user_id: str | None + payload: dict[str, Any] + headers: dict[str, str] + + +@dataclass +class OutboundMessage: + """Outbound message for routing back to Client.""" + + session_id: str + payload: dict[str, Any] + stream: bool = False + + +class IRouter(ABC): + """Message Router interface.""" + + @abstractmethod + def route_in( + self, + msg: InboundMessage, + *, + request: "Request | None" = None, + ) -> AsyncGenerator[OutboundMessage, None]: + """Inbound: dispatch to Core and return streaming outbound messages.""" + pass + + @abstractmethod + async def route_out(self, session_id: str, msg: OutboundMessage) -> None: + """Outbound: route back to Client (for WebSocket push).""" + pass + + @abstractmethod + async def resolve_session( + self, + channel: str, + session_id: str | None, + user_id: str | None, + ) -> str: + """Resolve or create Session, return session_id.""" + pass diff --git a/backend/app/router_layer/message_router.py b/backend/app/router_layer/message_router.py new file mode 100644 index 000000000..8875c92cd --- /dev/null +++ b/backend/app/router_layer/message_router.py @@ -0,0 +1,190 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +"""Default Message Router implementation for Phase 2.""" + +import logging +import os +import time +import uuid +from collections.abc import AsyncGenerator +from typing import TYPE_CHECKING + +from app.router_layer.interface import ( + InboundMessage, + IRouter, + OutboundMessage, +) +from app.router_layer.session_store import ISessionStore, MemorySessionStore + +if TYPE_CHECKING: + from fastapi import Request + +logger = logging.getLogger("router_layer") + +# Session TTL: 24 hours per docs/design/06-protocol.md §1.2 +SESSION_TTL_SECONDS = 86400 + + +def _now_ts() -> float: + return time.time() + + +class DefaultMessageRouter(IRouter): + """ + Default Message Router with in-memory session store. + Implements resolve_session per docs/design/06-protocol.md §1.5. + """ + + def __init__( + self, + session_ttl: int = SESSION_TTL_SECONDS, + session_store: ISessionStore | None = None, + ): + self._session_ttl = session_ttl + self._session_store: ISessionStore = ( + session_store or MemorySessionStore() + ) + + async def resolve_session( + self, + channel: str, + session_id: str | None, + user_id: str | None, + ) -> str: + """ + Resolve or create Session per docs/design/06-protocol.md §1.5. + Uses channel isolation: same user_id on different channels get different sessions. + """ + # 1. If session_id provided, check store + if session_id: + entry = await self._session_store.get(session_id) + if ( + isinstance(entry, dict) + and entry.get("channel") == channel + and not self._is_expired(entry) + ): + entry["last_activity"] = _now_ts() + await self._session_store.set( + session_id, entry, ttl=self._session_ttl + ) + return session_id + # Expired or not found → treat as new, fall through + await self._session_store.delete(session_id) + + # 2. Generate new session_id + new_id = f"sess_{uuid.uuid4().hex[:16]}" + + # 3. Channel isolation: always create new session (per §1.3 recommendation) + now = _now_ts() + entry = { + "session_id": new_id, + "channel": channel, + "user_id": user_id, + "created_at": now, + "last_activity": now, + } + await self._session_store.set(new_id, entry, ttl=self._session_ttl) + + return new_id + + def _is_expired(self, entry: dict) -> bool: + last_activity = entry.get("last_activity") + if not isinstance(last_activity, (int, float)): + return True + return (_now_ts() - float(last_activity)) > self._session_ttl + + async def route_in( + self, + msg: InboundMessage, + *, + request: "Request | None" = None, + ) -> AsyncGenerator[OutboundMessage, None]: + """ + Inbound: dispatch to Core. + For chat payload (content present), forwards to step_solve. + request is required for chat dispatch (disconnect detection, hands). + """ + payload = msg.payload + content = payload.get("content") if isinstance(payload, dict) else None + + if content is not None and request is not None: + # Chat message: build Chat and stream from step_solve + async for out in self._route_chat(msg, request): + yield out + else: + # Unknown or unsupported payload + yield OutboundMessage( + session_id=msg.session_id, + payload={ + "code": -1, + "text": "Unsupported message type or missing content", + "data": {}, + }, + stream=False, + ) + + async def _route_chat( + self, + msg: InboundMessage, + request: "Request", + ) -> AsyncGenerator[OutboundMessage, None]: + """Dispatch chat payload to step_solve.""" + from app.controller.chat_controller import start_chat_stream + from app.model.chat import Chat + + payload = msg.payload or {} + project_id = payload.get("project_id") or msg.session_id + task_id = payload.get("task_id") or str(uuid.uuid4()) + content = payload.get("content", "") + attachments = payload.get("attachments") or [] + # Map design doc attachments [{type, file_id}] -> attaches (paths/ids) + attaches = [] + for a in attachments: + if isinstance(a, dict) and "file_id" in a: + attaches.append(a["file_id"]) + elif isinstance(a, str): + attaches.append(a) + + user_id = msg.user_id or "user" + email = f"{user_id}@local" if "@" not in user_id else user_id + + # Build Chat with defaults + chat = Chat( + task_id=task_id, + project_id=project_id, + question=content, + email=email, + attaches=attaches, + model_platform=payload.get("model_platform") or "openai", + model_type=payload.get("model_type") or "gpt-4o", + # TODO(multi-tenant): falling back to os.environ inherits whatever + # the last /chat request wrote – unsafe under concurrent sessions. + api_key=payload.get("api_key") + or os.environ.get("OPENAI_API_KEY", ""), + api_url=payload.get("api_url"), + user_id=msg.user_id, + ) + + stream = await start_chat_stream(chat, request) + async for sse_chunk in stream: + yield OutboundMessage( + session_id=msg.session_id, + payload={"raw": sse_chunk}, + stream=True, + ) + + async def route_out(self, session_id: str, msg: OutboundMessage) -> None: + """Outbound: route back to Client. Empty for now (WebSocket push later).""" + pass diff --git a/backend/app/router_layer/middleware.py b/backend/app/router_layer/middleware.py new file mode 100644 index 000000000..39ac2580f --- /dev/null +++ b/backend/app/router_layer/middleware.py @@ -0,0 +1,117 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +"""Channel/Session header middleware for Phase 2 Message Router.""" + +import logging +import uuid + +from app.component.environment import env +from app.router_layer.hands_resolver import get_hands_for_channel + +logger = logging.getLogger("router_layer") + +DEFAULT_CHANNEL = "desktop" +CHANNELS = frozenset( + { + "desktop", + "web", + "cli", + "whatsapp", + "telegram", + "slack", + "discord", + "lark", + "browser_extension", + } +) + + +def _is_truthy(raw: str | None) -> bool: + if raw is None: + return False + return raw.strip().lower() in ("1", "true", "yes", "on") + + +def _get_header( + scope: dict, name: str, default: str | None = None +) -> str | None: + name_lower = name.lower().encode() + for k, v in scope.get("headers", []): + if k.lower() == name_lower: + return v.decode() if v else default + return default + + +class ChannelSessionMiddleware: + """ + Parse X-Channel, X-Session-ID, X-User-ID headers and store in request.state. + Add X-Session-ID to response for clients. + Uses plain ASGI for reliable response header injection. + """ + + def __init__(self, app): + self.app = app + + async def __call__(self, scope, receive, send): + if scope["type"] != "http": + await self.app(scope, receive, send) + return + + channel = ( + _get_header(scope, "X-Channel", DEFAULT_CHANNEL) or DEFAULT_CHANNEL + ) + if channel not in CHANNELS: + logger.warning( + "Invalid X-Channel header %r, falling back to %r", + channel, + DEFAULT_CHANNEL, + ) + channel = DEFAULT_CHANNEL + + session_id = _get_header(scope, "X-Session-ID") + user_id = _get_header(scope, "X-User-ID") + hands_override = _get_header(scope, "X-Hands-Override") + debug_override_enabled = _is_truthy(env("EIGENT_DEBUG", "false")) + + if hands_override and not debug_override_enabled: + logger.warning( + "Ignoring X-Hands-Override because EIGENT_DEBUG is disabled" + ) + hands_override = None + + if not session_id: + session_id = f"sess_{uuid.uuid4().hex[:16]}" + + hands = get_hands_for_channel(channel, hands_override) + + if "state" not in scope: + scope["state"] = {} + scope["state"]["channel"] = channel + scope["state"]["session_id"] = session_id + scope["state"]["user_id"] = user_id + scope["state"]["hands_override"] = hands_override + scope["state"]["hands"] = hands + + session_id_bytes = session_id.encode() + + async def send_wrapper(message): + if message["type"] == "http.response.start" and session_id: + headers = list(message.get("headers", [])) + if not any(h[0].lower() == b"x-session-id" for h in headers): + headers.append((b"x-session-id", session_id_bytes)) + message["headers"] = headers + await send(message) + + await self.app(scope, receive, send_wrapper) diff --git a/backend/app/router_layer/session_store.py b/backend/app/router_layer/session_store.py new file mode 100644 index 000000000..c8ac4ba87 --- /dev/null +++ b/backend/app/router_layer/session_store.py @@ -0,0 +1,87 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +import time +from abc import ABC, abstractmethod +from typing import Any + + +class ISessionStore(ABC): + @abstractmethod + async def get(self, session_id: str) -> dict[str, Any] | None: ... + + @abstractmethod + async def set( + self, session_id: str, entry: dict[str, Any], ttl: int = 86400 + ) -> None: ... + + @abstractmethod + async def delete(self, session_id: str) -> None: ... + + +class MemorySessionStore(ISessionStore): + _CLEANUP_INTERVAL_SET_CALLS = 128 + _CLEANUP_BATCH_SIZE = 512 + + def __init__(self) -> None: + self._sessions: dict[str, dict[str, Any]] = {} + self._expires_at: dict[str, float] = {} + self._set_calls = 0 + + def _is_expired(self, session_id: str) -> bool: + expires_at = self._expires_at.get(session_id) + if expires_at is None: + return False + return time.time() > expires_at + + def _cleanup_if_expired(self, session_id: str) -> None: + if self._is_expired(session_id): + self._sessions.pop(session_id, None) + self._expires_at.pop(session_id, None) + + def _cleanup_expired_entries(self, max_entries: int) -> None: + if not self._expires_at: + return + now = time.time() + cleaned = 0 + for session_id, expires_at in list(self._expires_at.items()): + if expires_at > now: + continue + self._sessions.pop(session_id, None) + self._expires_at.pop(session_id, None) + cleaned += 1 + if cleaned >= max_entries: + break + + async def get(self, session_id: str) -> dict[str, Any] | None: + self._cleanup_if_expired(session_id) + return self._sessions.get(session_id) + + async def set( + self, session_id: str, entry: dict[str, Any], ttl: int = 86400 + ) -> None: + self._set_calls += 1 + if ( + self._set_calls % self._CLEANUP_INTERVAL_SET_CALLS == 0 + ): # lazy GC for never-read sessions + self._cleanup_expired_entries(self._CLEANUP_BATCH_SIZE) + self._sessions[session_id] = entry + if ttl > 0: + self._expires_at[session_id] = time.time() + ttl + else: + self._expires_at.pop(session_id, None) + + async def delete(self, session_id: str) -> None: + self._sessions.pop(session_id, None) + self._expires_at.pop(session_id, None) diff --git a/backend/app/service/chat_service.py b/backend/app/service/chat_service.py index b0ea4e606..5ca228847 100644 --- a/backend/app/service/chat_service.py +++ b/backend/app/service/chat_service.py @@ -16,6 +16,7 @@ import datetime import logging import platform +from functools import partial from pathlib import Path from typing import Any @@ -43,6 +44,7 @@ from app.agent.toolkit.skill_toolkit import SkillToolkit from app.agent.toolkit.terminal_toolkit import TerminalToolkit from app.agent.tools import get_mcp_tools, get_toolkits +from app.hands.interface import IHands from app.model.chat import Chat, NewAgent, Status, TaskContent, sse_json from app.service.task import ( Action, @@ -335,6 +337,9 @@ async def step_solve(options: Chat, request: Request, task_lock: TaskLock): event_loop = asyncio.get_running_loop() sub_tasks: list[Task] = [] + # Phase 4: hands from ChannelSessionMiddleware (desktop=full, web=sandbox, etc.) + hands = getattr(request.state, "hands", None) + logger.info("=" * 80) logger.info( "🚀 [LIFECYCLE] step_solve STARTED", @@ -533,10 +538,13 @@ async def step_solve(options: Chat, request: Request, task_lock: TaskLock): ) try: - simple_resp = question_agent.step(simple_answer_prompt) - if simple_resp and simple_resp.msgs: - answer_content = simple_resp.msgs[0].content - else: + simple_resp = await question_agent.astep( + simple_answer_prompt + ) + answer_content = _extract_agent_response_content( + simple_resp + ) + if not answer_content: answer_content = ( "I understand your " "question, but I'm " @@ -633,11 +641,15 @@ async def step_solve(options: Chat, request: Request, task_lock: TaskLock): logger.info( "[NEW-QUESTION] Creating NEW workforce instance" ) - (workforce, mcp) = await construct_workforce(options) + (workforce, mcp) = await construct_workforce( + options, hands=hands + ) for new_agent in options.new_agents: workforce.add_single_agent_worker( format_agent_description(new_agent), - await new_agent_model(new_agent, options), + await new_agent_model( + new_agent, options, hands=hands + ), ) task_lock.status = Status.confirmed @@ -1224,14 +1236,15 @@ async def run_decomposition(): ) try: - simple_resp = question_agent.step( + simple_resp = await question_agent.astep( simple_answer_prompt ) - if simple_resp and simple_resp.msgs: - answer_content = simple_resp.msgs[ - 0 - ].content - else: + answer_content = ( + _extract_agent_response_content( + simple_resp + ) + ) + if not answer_content: answer_content = ( "I understand your " "question, but I'm " @@ -1563,7 +1576,7 @@ def on_stream_text(chunk): workforce.pause() workforce.add_single_agent_worker( format_agent_description(item), - await new_agent_model(item, options), + await new_agent_model(item, options, hands=hands), ) workforce.resume() elif item.action == Action.timeout: @@ -1958,18 +1971,12 @@ async def question_confirm( Is this a complex task? (yes/no):""" try: - resp = agent.step(full_prompt) - - if not resp or not resp.msgs or len(resp.msgs) == 0: - logger.warning( - "No response from agent, defaulting to complex task" - ) - return True + resp = await _run_agent_step(agent, full_prompt) - content = resp.msgs[0].content + content = _extract_agent_response_content(resp) if not content: logger.warning( - "Empty content from agent, defaulting to complex task" + "No response from agent, defaulting to complex task" ) return True @@ -2004,8 +2011,8 @@ async def summary_task(agent: ListenChatAgent, task: Task) -> str: """ logger.debug("Generating task summary", extra={"task_id": task.id}) try: - res = agent.step(prompt) - summary = res.msgs[0].content + res = await _run_agent_step(agent, prompt) + summary = _extract_agent_response_content(res) or "" logger.info("Task summary generated", extra={"summary": summary}) return summary except Exception as e: @@ -2056,8 +2063,8 @@ async def summary_subtasks_result(agent: ListenChatAgent, task: Task) -> str: Summary: """ - res = agent.step(prompt) - summary = res.msgs[0].content + res = await _run_agent_step(agent, prompt) + summary = _extract_agent_response_content(res) or "" logger.info( "Generated subtasks summary for " @@ -2068,6 +2075,47 @@ async def summary_subtasks_result(agent: ListenChatAgent, task: Task) -> str: return summary +def _extract_agent_response_content(resp) -> str | None: + if resp is None: + return None + + msg = getattr(resp, "msg", None) + if msg is not None: + content = getattr(msg, "content", None) + if content: + return content + + msgs = getattr(resp, "msgs", None) + if msgs: + first = msgs[0] + content = getattr(first, "content", None) + if content: + return content + + return None + + +async def _run_agent_step(agent: ListenChatAgent, prompt: str): + """Run one model step with backward-compatible priority. + + Some call sites and tests still stub synchronous ``step`` while newer paths + provide ``astep``. Prefer ``step`` when available to preserve existing + behavior, and fall back to ``astep``. + """ + step_fn = getattr(agent, "step", None) + if callable(step_fn): + result = step_fn(prompt) + if asyncio.iscoroutine(result): + return await result + return result + + astep_fn = getattr(agent, "astep", None) + if callable(astep_fn): + return await astep_fn(prompt) + + raise AttributeError("Agent has neither step nor astep") + + async def get_task_result_with_optional_summary( task: Task, options: Chat ) -> str: @@ -2083,6 +2131,24 @@ async def get_task_result_with_optional_summary( """ result = str(task.result or "") + def _is_failed_state(state) -> bool: + if state is None: + return False + state_name = getattr(state, "name", str(state)) + return "fail" in str(state_name).lower() + + has_failed_subtask = any( + _is_failed_state(getattr(subtask, "state", None)) + for subtask in (task.subtasks or []) + ) + + if has_failed_subtask: + logger.info( + "Task %s has failed subtasks, skipping LLM summary to finish quickly", + task.id, + ) + return result + if task.subtasks and len(task.subtasks) > 1: logger.info( f"Task {task.id} has " @@ -2110,12 +2176,16 @@ async def get_task_result_with_optional_summary( async def construct_workforce( options: Chat, + hands: IHands | None = None, ) -> tuple[Workforce, ListenChatAgent]: """Construct a workforce with all required agents. This function creates all agents in PARALLEL to minimize startup time. Sync functions are run in thread pool, async functions are awaited concurrently. + + When hands is passed, base agents add tools based on Brain capabilities: + hands.can_execute_terminal(), hands.can_use_browser(), etc. determine whether terminal/browser hands are enabled. """ logger.debug( "construct_workforce started", @@ -2226,10 +2296,12 @@ def _create_new_worker_agent() -> ListenChatAgent: results = await asyncio.gather( asyncio.to_thread(_create_coordinator_and_task_agents), asyncio.to_thread(_create_new_worker_agent), - asyncio.to_thread(browser_agent, options), - developer_agent(options), - document_agent(options), - asyncio.to_thread(multi_modal_agent, options), + asyncio.to_thread(partial(browser_agent, options, hands=hands)), + developer_agent(options, hands=hands), + document_agent(options, hands=hands), + asyncio.to_thread( + partial(multi_modal_agent, options, hands=hands) + ), mcp_agent(options), ) except Exception as e: @@ -2237,13 +2309,6 @@ def _create_new_worker_agent() -> ListenChatAgent: f"Failed to create agents in parallel: {e}", exc_info=True ) raise - finally: - # Always clear event loop reference after - # parallel agent creation completes. - # This prevents stale references and - # potential cross-request interference - set_main_event_loop(None) - # Unpack results ( coord_task_agents, @@ -2347,7 +2412,11 @@ def format_agent_description(agent_data: NewAgent | ActionNewAgent) -> str: return " ".join(description_parts) -async def new_agent_model(data: NewAgent | ActionNewAgent, options: Chat): +async def new_agent_model( + data: NewAgent | ActionNewAgent, + options: Chat, + hands: IHands | None = None, +): logger.info( "Creating new agent", extra={ @@ -2361,21 +2430,26 @@ async def new_agent_model(data: NewAgent | ActionNewAgent, options: Chat): ) working_directory = get_working_directory(options) tool_names = [] - tools = [*await get_toolkits(data.tools, data.name, options.project_id)] + tools = [ + *await get_toolkits( + data.tools, data.name, options.project_id, hands=hands + ) + ] for item in data.tools: tool_names.append(titleize(item)) - # Always include terminal_toolkit with proper working directory - terminal_toolkit = TerminalToolkit( - options.project_id, - agent_name=data.name, - working_directory=working_directory, - safe_mode=True, - clone_current_env=True, - ) - tools.extend(terminal_toolkit.get_tools()) - tool_names.append(titleize("terminal_toolkit")) + # Add terminal_toolkit when terminal hand is available + if hands is None or hands.can_execute_terminal(): + terminal_toolkit = TerminalToolkit( + options.project_id, + agent_name=data.name, + working_directory=working_directory, + safe_mode=True, + clone_current_env=True, + ) + tools.extend(terminal_toolkit.get_tools()) + tool_names.append(titleize("terminal_toolkit")) if data.mcp_tools is not None: - tools = [*tools, *await get_mcp_tools(data.mcp_tools)] + tools = [*tools, *await get_mcp_tools(data.mcp_tools, hands=hands)] for item in data.mcp_tools["mcpServers"].keys(): tool_names.append(titleize(item)) for item in tools: diff --git a/backend/app/service/mcp_config.py b/backend/app/service/mcp_config.py new file mode 100644 index 000000000..8658dae30 --- /dev/null +++ b/backend/app/service/mcp_config.py @@ -0,0 +1,105 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +import json +import logging +from pathlib import Path + +logger = logging.getLogger("mcp_config") + +MCP_CONFIG_DIR = Path.home() / ".eigent" +MCP_CONFIG_PATH = MCP_CONFIG_DIR / "mcp.json" + + +def _normalize_args(args) -> list[str]: + """Normalize args to list of strings.""" + if args is None: + return [] + if isinstance(args, str): + try: + parsed = json.loads(args) + return ( + [str(x) for x in parsed] + if isinstance(parsed, list) + else [args] + ) + except json.JSONDecodeError: + return [x.strip() for x in args.split(",") if x.strip()] + if isinstance(args, list): + return [str(x) for x in args] + return [] + + +def _normalize_mcp(mcp: dict) -> dict: + """Normalize MCP server config.""" + out = dict(mcp) + if "args" in out: + out["args"] = _normalize_args(out["args"]) + return out + + +def get_mcp_config_path() -> Path: + return MCP_CONFIG_PATH + + +def read_mcp_config() -> dict: + """Read MCP config from ~/.eigent/mcp.json.""" + if not MCP_CONFIG_PATH.exists(): + default = {"mcpServers": {}} + write_mcp_config(default) + return default + try: + data = MCP_CONFIG_PATH.read_text(encoding="utf-8") + parsed = json.loads(data) + if not isinstance(parsed.get("mcpServers"), dict): + return {"mcpServers": {}} + for name, server in parsed["mcpServers"].items(): + if isinstance(server, dict): + parsed["mcpServers"][name] = _normalize_mcp(server) + return parsed + except (json.JSONDecodeError, OSError) as e: + logger.warning("Failed to read MCP config: %s, using default", e) + return {"mcpServers": {}} + + +def write_mcp_config(config: dict) -> None: + """Write MCP config to ~/.eigent/mcp.json.""" + MCP_CONFIG_DIR.mkdir(parents=True, exist_ok=True) + MCP_CONFIG_PATH.write_text( + json.dumps(config, indent=2, ensure_ascii=False), + encoding="utf-8", + ) + + +def add_mcp(name: str, mcp: dict) -> None: + """Add MCP server to config.""" + config = read_mcp_config() + if name not in config["mcpServers"]: + config["mcpServers"][name] = _normalize_mcp(mcp) + write_mcp_config(config) + + +def remove_mcp(name: str) -> None: + """Remove MCP server from config.""" + config = read_mcp_config() + if name in config["mcpServers"]: + del config["mcpServers"][name] + write_mcp_config(config) + + +def update_mcp(name: str, mcp: dict) -> None: + """Update MCP server in config.""" + config = read_mcp_config() + config["mcpServers"][name] = _normalize_mcp(mcp) + write_mcp_config(config) diff --git a/backend/app/service/skill_config_service.py b/backend/app/service/skill_config_service.py new file mode 100644 index 000000000..933256856 --- /dev/null +++ b/backend/app/service/skill_config_service.py @@ -0,0 +1,131 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +"""Skills config: ~/.eigent//skills-config.json.""" + +import json +import logging +import time +from pathlib import Path + +logger = logging.getLogger("skill_config") + +EIGENT_ROOT = Path.home() / ".eigent" + + +def _config_path(user_id: str) -> Path: + return EIGENT_ROOT / str(user_id) / "skills-config.json" + + +def _load_config(user_id: str) -> dict: + path = _config_path(user_id) + if not path.exists(): + path.parent.mkdir(parents=True, exist_ok=True) + default = {"version": 1, "skills": {}} + path.write_text(json.dumps(default, indent=2, ensure_ascii=False)) + return default + try: + return json.loads(path.read_text(encoding="utf-8")) + except (json.JSONDecodeError, OSError) as e: + logger.warning("Failed to load skill config: %s", e) + return {"version": 1, "skills": {}} + + +def _save_config(user_id: str, config: dict) -> None: + path = _config_path(user_id) + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(json.dumps(config, indent=2, ensure_ascii=False)) + + +def _ensure_skills_key(config: dict) -> dict: + if "skills" not in config: + config["skills"] = {} + return config + + +def skill_config_load(user_id: str) -> dict: + """Load skills config for user.""" + config = _load_config(user_id) + return _ensure_skills_key(config) + + +def skill_config_init(user_id: str) -> dict: + """Load or create config, merge default from example-skills if present.""" + config = _load_config(user_id) + config = _ensure_skills_key(config) + + # Try to merge default-config.json from example-skills (same as Electron) + try: + backend_root = Path(__file__).resolve().parent.parent.parent + default_path = ( + backend_root.parent + / "resources" + / "example-skills" + / "default-config.json" + ) + if default_path.exists(): + default = json.loads(default_path.read_text(encoding="utf-8")) + if default.get("skills"): + for skill_name, skill_cfg in default["skills"].items(): + if skill_name not in config["skills"]: + config["skills"][skill_name] = { + **skill_cfg, + "addedAt": int(time.time() * 1000), + } + logger.info( + "Initialized config for example skill: %s", + skill_name, + ) + _save_config(user_id, config) + except Exception as e: + logger.warning("Failed to merge default config: %s", e) + + return config + + +def skill_config_update( + user_id: str, skill_name: str, skill_config: dict +) -> None: + """Update config for a skill (merge with existing, don't replace entirely).""" + config = _load_config(user_id) + config = _ensure_skills_key(config) + existing = config["skills"].get(skill_name, {}) + config["skills"][skill_name] = {**existing, **skill_config} + _save_config(user_id, config) + + +def skill_config_delete(user_id: str, skill_name: str) -> None: + """Remove skill from config.""" + config = _load_config(user_id) + config = _ensure_skills_key(config) + if skill_name in config["skills"]: + del config["skills"][skill_name] + _save_config(user_id, config) + + +def skill_config_toggle(user_id: str, skill_name: str, enabled: bool) -> dict: + """Toggle skill enabled state.""" + config = _load_config(user_id) + config = _ensure_skills_key(config) + if skill_name not in config["skills"]: + config["skills"][skill_name] = { + "enabled": enabled, + "scope": {"isGlobal": True, "selectedAgents": []}, + "addedAt": int(time.time() * 1000), + "isExample": False, + } + else: + config["skills"][skill_name]["enabled"] = enabled + _save_config(user_id, config) + return config["skills"][skill_name] diff --git a/backend/app/service/skill_service.py b/backend/app/service/skill_service.py new file mode 100644 index 000000000..f65f2f593 --- /dev/null +++ b/backend/app/service/skill_service.py @@ -0,0 +1,300 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +import logging +import re +import shutil +import tempfile +import zipfile +from pathlib import Path + +SKILLS_ROOT = Path.home() / ".eigent" / "skills" +SKILL_FILE = "SKILL.md" +logger = logging.getLogger("skill_service") + + +def _parse_skill_frontmatter(content: str) -> dict | None: + """Parse name and description from SKILL.md frontmatter.""" + if not content.startswith("---"): + return None + end = content.find("\n---", 3) + block = content[4:end] if end > 0 else content[4:] + name_match = re.search(r"^\s*name\s*:\s*(.+)$", block, re.MULTILINE) + desc_match = re.search(r"^\s*description\s*:\s*(.+)$", block, re.MULTILINE) + name = name_match.group(1).strip().strip("'\"") if name_match else None + desc = desc_match.group(1).strip().strip("'\"") if desc_match else None + if name and desc: + return {"name": name, "description": desc} + return None + + +def _assert_under_skills_root(target: Path) -> Path: + """Ensure path is under SKILLS_ROOT (security).""" + root = SKILLS_ROOT.resolve() + resolved = target.resolve() + try: + resolved.relative_to(root) + except ValueError: + raise PermissionError("Path is outside skills directory") + return resolved + + +def skills_scan() -> list[dict]: + """Scan skills directory and return list of skills with metadata.""" + if not SKILLS_ROOT.exists(): + return [] + skills = [] + for entry in SKILLS_ROOT.iterdir(): + if not entry.is_dir() or entry.name.startswith("."): + continue + skill_path = entry / SKILL_FILE + try: + raw = skill_path.read_text(encoding="utf-8") + meta = _parse_skill_frontmatter(raw) + if meta: + skills.append( + { + "name": meta["name"], + "description": meta["description"], + "path": str(skill_path), + "scope": "user", + "skillDirName": entry.name, + } + ) + except (OSError, UnicodeDecodeError): + pass + return skills + + +def skill_get_path_by_name(skill_name: str) -> str | None: + """Return the absolute directory path for a skill by its display name, or None if not found.""" + if not SKILLS_ROOT.exists(): + return None + name_lower = (skill_name or "").strip().lower() + if not name_lower: + return None + for entry in SKILLS_ROOT.iterdir(): + if not entry.is_dir() or entry.name.startswith("."): + continue + skill_path = entry / SKILL_FILE + if not skill_path.exists(): + continue + try: + meta = _parse_skill_frontmatter( + skill_path.read_text(encoding="utf-8") + ) + if meta and meta.get("name", "").lower().strip() == name_lower: + return str(entry.resolve()) + except (OSError, UnicodeDecodeError): + pass + return None + + +def skill_write(skill_dir_name: str, content: str) -> None: + """Write SKILL.md for a skill.""" + name = (skill_dir_name or "").strip() + if not name: + raise ValueError("Skill folder name is required") + dir_path = _assert_under_skills_root(SKILLS_ROOT / name) + dir_path.mkdir(parents=True, exist_ok=True) + (dir_path / SKILL_FILE).write_text(content, encoding="utf-8") + + +def skill_read(skill_dir_name: str) -> str: + """Read SKILL.md content.""" + name = (skill_dir_name or "").strip() + if not name: + raise ValueError("Skill folder name is required") + skill_path = _assert_under_skills_root(SKILLS_ROOT / name / SKILL_FILE) + return skill_path.read_text(encoding="utf-8") + + +def skill_delete(skill_dir_name: str) -> None: + """Delete skill directory.""" + name = (skill_dir_name or "").strip() + if not name: + raise ValueError("Skill folder name is required") + dir_path = _assert_under_skills_root(SKILLS_ROOT / name) + if dir_path.exists(): + import shutil + + shutil.rmtree(dir_path) + + +def skill_list_files(skill_dir_name: str) -> list[str]: + """List files in skill directory.""" + name = (skill_dir_name or "").strip() + if not name: + raise ValueError("Skill folder name is required") + dir_path = _assert_under_skills_root(SKILLS_ROOT / name) + if not dir_path.exists(): + return [] + return [e.name for e in dir_path.iterdir()] + + +def _get_skill_name_from_file(skill_file_path: Path) -> str: + """Extract skill name from SKILL.md frontmatter.""" + try: + raw = skill_file_path.read_text(encoding="utf-8") + name_match = re.search(r"^\s*name\s*:\s*(.+)$", raw, re.MULTILINE) + parsed = ( + name_match.group(1).strip().strip("'\"") if name_match else None + ) + return parsed or skill_file_path.parent.name + except (OSError, UnicodeDecodeError): + return skill_file_path.parent.name + + +def _folder_name_from_skill_name(skill_name: str, fallback: str) -> str: + """Derive safe folder name from skill display name.""" + cleaned = ( + (skill_name or "") + .replace("\\", "-") + .replace("/", "-") + .replace("*", "-") + .replace("?", "-") + .replace(":", "-") + .replace('"', "-") + .replace("<", "-") + .replace(">", "-") + .replace("|", "-") + .replace(" ", "-") + ) + cleaned = re.sub(r"-+", "-", cleaned).strip("-") + return cleaned or fallback + + +def skill_import_zip( + zip_bytes: bytes, + replacements: list[str] | None = None, +) -> dict: + """ + Import skills from a zip archive. + Returns {success, error?, conflicts?} matching Electron IPC contract. + """ + replacements_set = set(replacements or []) + temp_dir = Path(tempfile.mkdtemp(prefix="eigent-skill-extract-")) + try: + SKILLS_ROOT.mkdir(parents=True, exist_ok=True) + + # Step 1: Extract zip into temp directory + with zipfile.ZipFile(__import__("io").BytesIO(zip_bytes), "r") as zf: + for info in zf.infolist(): + if info.is_dir(): + continue + name = info.filename.replace("\\", "/").lstrip("/") + if ".." in name or name.startswith("/"): + return { + "success": False, + "error": "Zip archive contains unsafe paths", + } + dest = temp_dir / name + dest.parent.mkdir(parents=True, exist_ok=True) + dest.write_bytes(zf.read(info)) + + # Step 2: Find all SKILL.md files + skill_files: list[Path] = [] + + def find_skill_md(d: Path) -> None: + for entry in d.iterdir(): + if entry.name.startswith("."): + continue + if entry.is_dir(): + find_skill_md(entry) + elif entry.name == SKILL_FILE: + skill_files.append(entry) + + find_skill_md(temp_dir) + + if not skill_files: + return { + "success": False, + "error": "No SKILL.md files found in zip archive", + } + + # Step 3: Build existing skill names map + existing_names: dict[str, str] = {} + if SKILLS_ROOT.exists(): + for entry in SKILLS_ROOT.iterdir(): + if not entry.is_dir() or entry.name.startswith("."): + continue + skill_file = entry / SKILL_FILE + if not skill_file.exists(): + continue + try: + meta = _parse_skill_frontmatter( + skill_file.read_text(encoding="utf-8") + ) + if meta and meta.get("name"): + existing_names[meta["name"].lower()] = entry.name + except (OSError, UnicodeDecodeError): + pass + + conflicts: list[dict] = [] + + for skill_file in skill_files: + skill_dir = skill_file.parent + incoming_name = _get_skill_name_from_file(skill_file) + incoming_lower = incoming_name.lower() + + fallback = ( + "imported-skill" if skill_dir == temp_dir else skill_dir.name + ) + dest_folder = _folder_name_from_skill_name(incoming_name, fallback) + dest_path = SKILLS_ROOT / dest_folder + + existing_folder = existing_names.get(incoming_lower) + if existing_folder: + if replacements is None: + conflicts.append( + { + "folderName": existing_folder, + "skillName": incoming_name, + } + ) + continue + if existing_folder in replacements_set: + shutil.rmtree( + SKILLS_ROOT / existing_folder, ignore_errors=True + ) + else: + continue + + dest_path.mkdir(parents=True, exist_ok=True) + if skill_dir == temp_dir: + for item in temp_dir.iterdir(): + dest_item = dest_path / item.name + if item.is_dir(): + shutil.copytree(item, dest_item, dirs_exist_ok=True) + else: + shutil.copy2(item, dest_item) + else: + for item in skill_dir.iterdir(): + dest_item = dest_path / item.name + if item.is_dir(): + shutil.copytree(item, dest_item, dirs_exist_ok=True) + else: + shutil.copy2(item, dest_item) + + if conflicts and replacements is None: + return {"success": False, "conflicts": conflicts} + + return {"success": True} + except zipfile.BadZipFile: + return {"success": False, "error": "Invalid zip file"} + except Exception: + logger.exception("Failed to import skills from zip archive") + return {"success": False, "error": "Failed to import skills"} + finally: + shutil.rmtree(temp_dir, ignore_errors=True) diff --git a/backend/app/service/task.py b/backend/app/service/task.py index 604fbc717..1696535f3 100644 --- a/backend/app/service/task.py +++ b/backend/app/service/task.py @@ -36,6 +36,8 @@ logger = logging.getLogger("task_service") +TASK_LOCK_CLEANUP_SENTINEL = "__task_lock_cleanup__" + class Action(str, Enum): improve = "improve" # user -> backend @@ -444,6 +446,16 @@ async def cleanup(self): pass self.background_tasks.clear() + # Unblock agents waiting on human input so shutdown can proceed. + for agent, queue in self.human_input.items(): + try: + queue.put_nowait(TASK_LOCK_CLEANUP_SENTINEL) + except asyncio.QueueFull: + logger.debug( + "Human input queue already full during cleanup", + extra={"task_id": self.id, "agent": agent}, + ) + # Clean up registered toolkits (e.g., remove TerminalToolkit venvs) for toolkit in self.registered_toolkits: try: diff --git a/backend/app/utils/browser_launcher.py b/backend/app/utils/browser_launcher.py new file mode 100644 index 000000000..ec16a26b6 --- /dev/null +++ b/backend/app/utils/browser_launcher.py @@ -0,0 +1,254 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +""" +Browser launcher for Web mode: ensures a CDP-capable browser is running +when Electron is not available (e.g. web + brain mode). + +Previously, Electron provided the CDP browser via remote-debugging-port. +In web mode, Brain launches Chrome/Chromium directly. +""" + +import logging +import os +import platform +import socket +import subprocess +from pathlib import Path +from urllib.parse import urlparse + +logger = logging.getLogger("browser_launcher") + +# Default CDP port (must match browser_port in Chat model) +DEFAULT_CDP_PORT = 9222 +LOCAL_CDP_HOSTS = frozenset({"127.0.0.1", "localhost", "::1"}) + + +def is_local_cdp_host(host: str | None) -> bool: + """Return whether the CDP endpoint host points at the local machine.""" + if not host: + return True + return host.lower() in LOCAL_CDP_HOSTS + + +def normalize_cdp_url( + cdp_url: str, + *, + default_host: str = "127.0.0.1", + default_port: int = DEFAULT_CDP_PORT, +) -> tuple[str, str, int]: + """Normalize a CDP endpoint into ``scheme://host:port`` form.""" + parsed = urlparse(cdp_url) + scheme = parsed.scheme or "http" + host = parsed.hostname or default_host + port = parsed.port or default_port + return f"{scheme}://{host}:{port}", host, port + + +def is_cdp_url_available(cdp_url: str) -> bool: + """Check whether a CDP endpoint is reachable.""" + normalized, host, port = normalize_cdp_url(cdp_url) + if is_local_cdp_host(host): + return _is_cdp_available(port) + + try: + import httpx + + r = httpx.get(f"{normalized}/json/version", timeout=2.0) + return r.status_code == 200 + except Exception: + return False + + +def _is_port_in_use(port: int) -> bool: + """Check if a port is in use.""" + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + return s.connect_ex(("127.0.0.1", port)) == 0 + + +def _is_cdp_available(port: int) -> bool: + """Check if a CDP-capable browser is listening on the port.""" + try: + import httpx + + r = httpx.get(f"http://127.0.0.1:{port}/json/version", timeout=2.0) + return r.status_code == 200 + except Exception: + return False + + +def _find_chrome_executable() -> str | None: + """Find Chrome or Chromium executable for launching with CDP.""" + system = platform.system() + + # 1. Try Playwright's Chromium (most reliable, cross-platform) + try: + from playwright.sync_api import sync_playwright + + with sync_playwright() as p: + path = p.chromium.executable_path + if path and Path(path).exists(): + logger.debug(f"Using Playwright Chromium: {path}") + return path + except Exception as e: + logger.debug(f"Playwright Chromium not available: {e}") + + # 2. Platform-specific paths + if system == "Darwin": + candidates = [ + "/Applications/Google Chrome.app/Contents/MacOS/Google Chrome", + "/Applications/Chromium.app/Contents/MacOS/Chromium", + "/Applications/Google Chrome for Testing.app/Contents/MacOS/Google Chrome for Testing", + ] + elif system == "Linux": + candidates = [ + "/usr/bin/google-chrome", + "/usr/bin/google-chrome-stable", + "/usr/bin/chromium", + "/usr/bin/chromium-browser", + ] + elif system == "Windows": + candidates = [ + os.path.expandvars( + r"%ProgramFiles%\Google\Chrome\Application\chrome.exe" + ), + os.path.expandvars( + r"%ProgramFiles(x86)%\Google\Chrome\Application\chrome.exe" + ), + ] + else: + candidates = [] + + for path in candidates: + if path and Path(path).exists(): + logger.debug(f"Using system Chrome: {path}") + return path + + # 3. Try executable from PATH + for name in ("google-chrome", "chromium", "chromium-browser", "chrome"): + exe = _which(name) + if exe: + return exe + + return None + + +def _which(name: str) -> str | None: + """Find executable in PATH.""" + for path in os.environ.get("PATH", "").split(os.pathsep): + exe = Path(path) / name + if exe.exists(): + return str(exe) + return None + + +def _launch_browser( + executable: str, port: int, user_data_dir: str +) -> subprocess.Popen | None: + """Launch browser with CDP enabled. Returns process or None on failure.""" + profile_dir = Path(user_data_dir).expanduser() + profile_dir.mkdir(parents=True, exist_ok=True) + + args = [ + executable, + f"--remote-debugging-port={port}", + f"--user-data-dir={profile_dir}", + "--no-first-run", + "--no-default-browser-check", + "--disable-blink-features=AutomationControlled", + "about:blank", + ] + + try: + proc = subprocess.Popen( + args, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + start_new_session=True, + ) + logger.info( + f"[BROWSER LAUNCHER] Launched browser on port {port} (PID={proc.pid})" + ) + return proc + except Exception as e: + logger.error( + f"[BROWSER LAUNCHER] Failed to launch: {e}", exc_info=True + ) + return None + + +def ensure_cdp_browser_available(port: int = DEFAULT_CDP_PORT) -> bool: + """ + Ensure a CDP-capable browser is running on the given port. + + If no browser is listening, attempts to launch Chrome/Chromium. + Used in web mode when Electron is not available to provide CDP. + + Returns: + True if CDP is available (either already running or newly launched), + False otherwise. + """ + # Check if auto-launch is disabled + if os.environ.get("EIGENT_BRAIN_LAUNCH_BROWSER", "true").lower() in ( + "false", + "0", + "no", + ): + logger.debug("[BROWSER LAUNCHER] Auto-launch disabled by env") + return _is_cdp_available(port) + + # Already available + if _is_cdp_available(port): + logger.debug( + f"[BROWSER LAUNCHER] CDP already available on port {port}" + ) + return True + + # Port in use but not CDP (e.g. another service) + if _is_port_in_use(port): + logger.warning( + f"[BROWSER LAUNCHER] Port {port} in use but not CDP. " + "Another service may be using it." + ) + return False + + # Launch browser + executable = _find_chrome_executable() + if not executable: + logger.error( + "[BROWSER LAUNCHER] No Chrome/Chromium found. " + "Run: playwright install chromium" + ) + return False + + user_data_dir = os.path.expanduser( + f"~/.eigent/browser_profiles/cdp_brain_{port}" + ) + proc = _launch_browser(executable, port, user_data_dir) + if not proc: + return False + + # Poll for readiness (max 10s) + import time + + for _ in range(20): + time.sleep(0.5) + if _is_cdp_available(port): + logger.info(f"[BROWSER LAUNCHER] CDP ready on port {port}") + return True + + logger.warning( + "[BROWSER LAUNCHER] Browser launched but CDP not ready after 10s" + ) + return False diff --git a/backend/app/utils/event_loop_utils.py b/backend/app/utils/event_loop_utils.py index 13207054e..8d4a97c99 100644 --- a/backend/app/utils/event_loop_utils.py +++ b/backend/app/utils/event_loop_utils.py @@ -51,7 +51,7 @@ def _schedule_async_task(coro): try: # Try to get the running loop (works in main event loop thread) loop = asyncio.get_running_loop() - loop.create_task(coro) + return loop.create_task(coro) except RuntimeError: # No running loop in this thread (we're in a worker thread) # First try contextvars, then fallback to global reference @@ -60,10 +60,14 @@ def _schedule_async_task(coro): with _GLOBAL_MAIN_LOOP_LOCK: main_loop = _GLOBAL_MAIN_LOOP if main_loop is not None and main_loop.is_running(): - asyncio.run_coroutine_threadsafe(coro, main_loop) + return asyncio.run_coroutine_threadsafe(coro, main_loop) else: # This should not happen in normal operation - log error and skip + close = getattr(coro, "close", None) + if callable(close): + close() logging.error( "No event loop available for async task scheduling, task skipped. " "Ensure set_main_event_loop() is called before parallel agent creation." ) + return None diff --git a/backend/app/utils/listen/toolkit_listen.py b/backend/app/utils/listen/toolkit_listen.py index 1c2c9bbc6..f9db611d4 100644 --- a/backend/app/utils/listen/toolkit_listen.py +++ b/backend/app/utils/listen/toolkit_listen.py @@ -231,7 +231,7 @@ def run_in_thread(): logger.error(f"[SAFE_PUT_QUEUE] Thread failed: {e}") result_queue.put(("error", e)) - thread = threading.Thread(target=run_in_thread, daemon=False) + thread = threading.Thread(target=run_in_thread, daemon=True) thread.start() # Wait briefly for completion diff --git a/backend/app/utils/server/sync_step.py b/backend/app/utils/server/sync_step.py index ec2d8f514..aec4a6d98 100644 --- a/backend/app/utils/server/sync_step.py +++ b/backend/app/utils/server/sync_step.py @@ -18,14 +18,13 @@ High-frequency events (decompose_text) are batched to reduce API calls. Config (~/.eigent/.env): - SERVER_URL=https://dev.eigent.ai/api + SERVER_URL=https://dev.eigent.ai/api/v1 """ import asyncio import json import logging import time -from functools import lru_cache import httpx @@ -39,27 +38,54 @@ # Buffer storage: task_id -> accumulated text _text_buffers: dict[str, str] = {} +_warned_missing_auth_projects: set[str] = set() +_warned_missing_server_url_projects: set[str] = set() +_logged_sync_targets: set[str] = set() +_logged_first_sync_tasks: set[str] = set() -@lru_cache(maxsize=1) -def _get_config(): - server_url = env("SERVER_URL", "") +def _normalize_server_url(server_url: str | None) -> str: + if not server_url: + return "" + + trimmed = server_url.rstrip("/") + if trimmed.endswith("/api/v1"): + return trimmed + return f"{trimmed}/api/v1" + + +def _get_config(args): + server_url = ( + getattr(args[0], "server_url", None) + if args and hasattr(args[0], "server_url") + else None + ) + + if not server_url: + server_url = env("SERVER_URL", "") + + server_url = _normalize_server_url(server_url) if not server_url: return None - return f"{server_url.rstrip('/')}/chat/steps" + return f"{server_url}/chat/steps" def sync_step(func): async def wrapper(*args, **kwargs): - config = _get_config() + config = _get_config(args) if not config: + _warn_missing_server_url(args) async for value in func(*args, **kwargs): yield value return + if config not in _logged_sync_targets: + _logged_sync_targets.add(config) + logger.info("Cloud step sync enabled: %s", config) + async for value in func(*args, **kwargs): _try_sync(args, value, config) yield value @@ -76,18 +102,23 @@ def _try_sync(args, value, sync_url): if not task_id: return + headers = _get_auth_headers(args) + if headers is None: + _warn_missing_auth(args) + return + step = data.get("step") # Batch decompose_text events to reduce API calls if step == "decompose_text": _buffer_text(task_id, data["data"].get("content", "")) if _should_flush(task_id): - _flush_buffer(task_id, sync_url) + _flush_buffer(task_id, sync_url, headers) return # Flush any buffered text before sending other events (preserves order) if task_id in _text_buffers: - _flush_buffer(task_id, sync_url) + _flush_buffer(task_id, sync_url, headers) payload = { "task_id": task_id, @@ -96,7 +127,16 @@ def _try_sync(args, value, sync_url): "timestamp": time.time_ns() / 1_000_000_000, } - asyncio.create_task(_send(sync_url, payload)) + if task_id not in _logged_first_sync_tasks: + _logged_first_sync_tasks.add(task_id) + logger.info( + "Scheduling first cloud step sync: task_id=%s, step=%s, url=%s", + task_id, + step, + sync_url, + ) + + asyncio.create_task(_send(sync_url, payload, headers)) def _buffer_text(task_id: str, content: str): @@ -113,7 +153,11 @@ def _should_flush(task_id: str) -> bool: return word_count >= BATCH_WORD_THRESHOLD -def _flush_buffer(task_id: str, sync_url: str): +def _flush_buffer( + task_id: str, + sync_url: str, + headers: dict[str, str], +): """Send buffered text and clear buffer.""" text = _text_buffers.pop(task_id, "") if not text: @@ -126,7 +170,7 @@ def _flush_buffer(task_id: str, sync_url: str): "timestamp": time.time_ns() / 1_000_000_000, } - asyncio.create_task(_send(sync_url, payload)) + asyncio.create_task(_send(sync_url, payload, headers)) def _parse_value(value): @@ -162,9 +206,58 @@ def _get_task_id(args): return chat.task_id -async def _send(url, data): +def _get_auth_headers(args) -> dict[str, str] | None: + if len(args) < 2: + return None + + request = args[1] + headers = getattr(request, "headers", None) + if not headers: + return None + + auth_header = headers.get("authorization") + if not auth_header: + return None + + return {"Authorization": auth_header} + + +def _warn_missing_auth(args) -> None: + project_id = getattr(args[0], "project_id", None) if args else None + if not project_id or project_id in _warned_missing_auth_projects: + return + + _warned_missing_auth_projects.add(project_id) + logger.info( + "Skipping cloud step sync because Authorization header is missing " + "for project_id=%s. Replay will be unavailable for this run.", + project_id, + ) + + +def _warn_missing_server_url(args) -> None: + project_id = getattr(args[0], "project_id", None) if args else None + if not project_id or project_id in _warned_missing_server_url_projects: + return + + _warned_missing_server_url_projects.add(project_id) + logger.info( + "Skipping cloud step sync because SERVER_URL is empty for " + "project_id=%s. Replay will be unavailable for this run.", + project_id, + ) + + +async def _send(url, data, headers: dict[str, str]): try: async with httpx.AsyncClient(timeout=5.0) as client: - await client.post(url, json=data) + response = await client.post(url, json=data, headers=headers) + if response.is_error: + logger.error( + "Failed to sync step to %s: HTTP %s: %s", + url, + response.status_code, + response.text[:500], + ) except Exception as e: logger.error(f"Failed to sync step to {url}: {type(e).__name__}: {e}") diff --git a/backend/app/utils/telemetry/workforce_metrics.py b/backend/app/utils/telemetry/workforce_metrics.py index 550357789..6944e6acf 100644 --- a/backend/app/utils/telemetry/workforce_metrics.py +++ b/backend/app/utils/telemetry/workforce_metrics.py @@ -183,6 +183,25 @@ def get_tracer_provider() -> TracerProvider | None: return _GLOBAL_TRACER_PROVIDER +def shutdown_tracer_provider() -> None: + """Shutdown the global TracerProvider and release background threads. + + Call during FastAPI shutdown to ensure BatchSpanProcessor worker threads + are properly released, preventing process hang on exit. + """ + global _GLOBAL_TRACER_PROVIDER + if _GLOBAL_TRACER_PROVIDER is None: + return + try: + _GLOBAL_TRACER_PROVIDER.force_flush(timeout_millis=2000) + _GLOBAL_TRACER_PROVIDER.shutdown() + logger.info("TracerProvider shutdown completed") + except Exception as e: + logger.warning(f"TracerProvider shutdown failed: {e}") + finally: + _GLOBAL_TRACER_PROVIDER = None + + def _create_langfuse_endpoint(base_url: str) -> str: """Create Langfuse OTLP endpoint URL. diff --git a/backend/app/utils/workforce.py b/backend/app/utils/workforce.py index e8d749b51..31efa9cb4 100644 --- a/backend/app/utils/workforce.py +++ b/backend/app/utils/workforce.py @@ -52,6 +52,7 @@ get_camel_task, get_task_lock, ) +from app.utils.event_loop_utils import _schedule_async_task from app.utils.single_agent_worker import SingleAgentWorker from app.utils.telemetry.workforce_metrics import WorkforceMetricsCallback @@ -790,6 +791,10 @@ async def _handle_failed_task(self, task: Task) -> bool: if task.failure_count < max_retries: return result + # Keep parent.subtasks in sync for terminal failures as well so + # downstream result summarization can see that a subtask failed. + self._sync_subtask_to_parent(task) + error_message = "" # Use proper CAMEL pattern for metrics logging metrics_callbacks = [ @@ -907,8 +912,11 @@ def stop(self) -> None: f"new state: {self._state.name}" ) task_lock = get_task_lock(self.api_task_id) - task = asyncio.create_task(task_lock.put_queue(ActionEndData())) - task_lock.add_background_task(task) + # Use thread-safe scheduling: stop() may be called from worker thread + # when task fails (e.g. max retries exceeded) + task = _schedule_async_task(task_lock.put_queue(ActionEndData())) + if task is not None: + task_lock.add_background_task(task) logger.info("[WF-LIFECYCLE] ✅ ActionEndData queued") def stop_gracefully(self) -> None: diff --git a/backend/config/hands_clusters.example.toml b/backend/config/hands_clusters.example.toml new file mode 100644 index 000000000..a93388b01 --- /dev/null +++ b/backend/config/hands_clusters.example.toml @@ -0,0 +1,26 @@ +[defaults] +timeout_seconds = 10 +verify_tls = true +acquire_path = "/acquire" +release_path = "/release" +health_path = "/health" +# auth_token_env = "EIGENT_HANDS_CLUSTER_AUTH_TOKEN" + +[routes] +browser = "browser_pool" +terminal = "terminal_pool" +model = "model_pool" +default = "gateway" + +[clusters.gateway] +base_url = "http://hands-gateway:8080" + +[clusters.browser_pool] +base_url = "http://browser-cluster:8080" +# auth_token_env = "EIGENT_BROWSER_CLUSTER_TOKEN" + +[clusters.terminal_pool] +base_url = "http://terminal-cluster:8080" + +[clusters.model_pool] +base_url = "http://model-cluster:8080" diff --git a/backend/main.py b/backend/main.py index 92843bf1e..e2d9dc3ab 100644 --- a/backend/main.py +++ b/backend/main.py @@ -18,6 +18,7 @@ import pathlib import signal import sys +import threading # Add project root to Python path to import shared utils _project_root = pathlib.Path(__file__).parent.parent @@ -41,6 +42,7 @@ from app import api from app.component.environment import env from app.router import register_routers +from app.utils.event_loop_utils import set_main_event_loop os.environ["PYTHONIOENCODING"] = "utf-8" @@ -53,6 +55,9 @@ prefix = env("url_prefix", "") app_logger.info(f"Loading routers with prefix: '{prefix}'") +app_logger.info( + f"MCP will be at: {prefix}/mcp/list, health at: {prefix}/health" +) register_routers(api, prefix) app_logger.info("All routers loaded successfully") @@ -99,9 +104,16 @@ async def write_pid_file(): @api.on_event("startup") async def startup_event(): global pid_task + set_main_event_loop(asyncio.get_running_loop()) pid_task = asyncio.create_task(write_pid_file()) app_logger.info("PID write task created") + # Initialize EnvironmentHands from Brain deployment (full on local/cloud_vm, sandbox in Docker) + from app.router_layer.hands_resolver import init_environment_hands + + hands = init_environment_hands() + app_logger.info(f"EnvironmentHands initialized: mode={hands.mode}") + # Initialize telemetry tracer provider from app.utils.telemetry.workforce_metrics import ( initialize_tracer_provider, @@ -111,8 +123,10 @@ async def startup_event(): app_logger.info("Telemetry tracer provider initialized") -# Graceful shutdown handler -shutdown_event = asyncio.Event() +@api.on_event("shutdown") +async def shutdown_event_handler(): + r"""Run cleanup when uvicorn receives SIGINT/SIGTERM and shuts down.""" + await cleanup_resources() async def cleanup_resources(): @@ -141,18 +155,43 @@ async def cleanup_resources(): if pid_file.exists(): pid_file.unlink() - app_logger.info("All resources cleaned up successfully") + # Shutdown OpenTelemetry tracer (releases BatchSpanProcessor worker threads) + try: + from app.utils.telemetry.workforce_metrics import ( + shutdown_tracer_provider, + ) + shutdown_tracer_provider() + except Exception as e: + app_logger.warning(f"Telemetry shutdown failed: {e}") -def signal_handler(signum, frame): - r"""Handle shutdown signals""" - app_logger.warning(f"Received shutdown signal: {signum}") - asyncio.create_task(cleanup_resources()) - shutdown_event.set() + # Shutdown TerminalToolkit thread pool (prevents non-daemon threads blocking exit) + try: + from app.agent.toolkit.terminal_toolkit import TerminalToolkit + if TerminalToolkit._thread_pool is not None: + TerminalToolkit._thread_pool.shutdown(wait=False) + TerminalToolkit._thread_pool = None + except Exception as e: + app_logger.warning(f"TerminalToolkit shutdown failed: {e}") -signal.signal(signal.SIGTERM, signal_handler) -signal.signal(signal.SIGINT, signal_handler) + # Best-effort close Browser toolkit WebSocket/Node connections. + # Use a timeout so shutdown stays responsive even if a wrapper is stuck. + try: + from app.agent.toolkit.hybrid_browser_toolkit import ( + websocket_connection_pool, + ) + + await asyncio.wait_for( + websocket_connection_pool.close_all(), timeout=3.0 + ) + except TimeoutError: + app_logger.warning("Browser WebSocket pool shutdown timed out") + except Exception as e: + app_logger.warning(f"Browser WebSocket pool shutdown failed: {e}") + + set_main_event_loop(None) + app_logger.info("All resources cleaned up successfully") # Register cleanup on exit with safe synchronous wrapper @@ -172,3 +211,86 @@ def sync_cleanup(): # Log successful initialization app_logger.info("Application initialization completed successfully") + + +def run_standalone(): + """Run Brain in standalone mode (no Electron dependency).""" + import uvicorn + + port = int(env("EIGENT_BRAIN_PORT", "5001")) + host = env("EIGENT_BRAIN_HOST", "0.0.0.0") # nosec B104 - bind all for Docker/dev + reload = os.environ.get("EIGENT_DEBUG", "").lower() in ("1", "true", "yes") + + app_logger.info( + f"Starting Brain in standalone mode: {host}:{port} (reload={reload})" + ) + if reload: + uvicorn.run( + "main:api", + host=host, + port=port, + reload=reload, + timeout_graceful_shutdown=5, + ) + return + + config = uvicorn.Config( + "main:api", + host=host, + port=port, + reload=False, + timeout_graceful_shutdown=5, + ) + server = uvicorn.Server(config) + server.install_signal_handlers = lambda: None + + force_exit_timer = None + signal_count = {"count": 0} + old_sigint = signal.getsignal(signal.SIGINT) + old_sigterm = signal.getsignal(signal.SIGTERM) + + def _force_exit(signum: int): + signame = signal.Signals(signum).name + app_logger.error( + "Force exiting Brain after %s because graceful shutdown did not finish", + signame, + ) + os._exit(128 + signum) + + def _handle_signal(signum, _frame): + nonlocal force_exit_timer + signame = signal.Signals(signum).name + signal_count["count"] += 1 + + if signal_count["count"] == 1: + app_logger.warning( + "%s received, requesting graceful shutdown. Press Ctrl+C again to force exit.", + signame, + ) + server.should_exit = True + if force_exit_timer is None: + force_exit_timer = threading.Timer( + 5.0, _force_exit, args=(signum,) + ) + force_exit_timer.daemon = True + force_exit_timer.start() + return + + app_logger.error( + "%s received again, force exiting Brain immediately", signame + ) + _force_exit(signum) + + signal.signal(signal.SIGINT, _handle_signal) + signal.signal(signal.SIGTERM, _handle_signal) + try: + server.run() + finally: + if force_exit_timer is not None: + force_exit_timer.cancel() + signal.signal(signal.SIGINT, old_sigint) + signal.signal(signal.SIGTERM, old_sigterm) + + +if __name__ == "__main__": + run_standalone() diff --git a/backend/tests/app/agent/factory/test_browser.py b/backend/tests/app/agent/factory/test_browser.py index aab7aaadf..0c26a90c5 100644 --- a/backend/tests/app/agent/factory/test_browser.py +++ b/backend/tests/app/agent/factory/test_browser.py @@ -12,6 +12,7 @@ # limitations under the License. # ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import os from unittest.mock import MagicMock, patch import pytest @@ -91,3 +92,55 @@ def test_browser_agent_creation(sample_chat_data): assert ( "search" in str(system_message).lower() ) # system_prompt contains search + + +def test_browser_agent_prefers_preconnected_cdp_url(sample_chat_data): + """A browser connected from the Browser page should be reused by the agent.""" + options = Chat(**sample_chat_data) + + from app.service.task import task_locks + + mock_task_lock = MagicMock() + task_locks[options.task_id] = mock_task_lock + + _mod = "app.agent.factory.browser" + with ( + patch(f"{_mod}.agent_model") as mock_agent_model, + patch( + f"{_mod}.get_working_directory", return_value="/tmp/test_workdir" + ), + patch("asyncio.create_task"), + patch(f"{_mod}.HumanToolkit") as mock_human_toolkit, + patch(f"{_mod}.HybridBrowserToolkit") as mock_browser_toolkit, + patch(f"{_mod}.TerminalToolkit") as mock_terminal_toolkit, + patch(f"{_mod}.NoteTakingToolkit") as mock_note_toolkit, + patch(f"{_mod}.ScreenshotToolkit") as mock_screenshot_toolkit, + patch(f"{_mod}.SearchToolkit") as mock_search_toolkit, + patch(f"{_mod}.ToolkitMessageIntegration"), + patch("uuid.uuid4") as mock_uuid, + patch.dict( + os.environ, + {"EIGENT_CDP_URL": "http://worker-17:9222"}, + clear=True, + ), + ): + mock_human_toolkit.get_can_use_tools.return_value = [] + mock_browser_toolkit.return_value.get_tools.return_value = [] + mock_terminal_instance = MagicMock() + mock_terminal_instance.shell_exec = MagicMock() + mock_terminal_toolkit.return_value = mock_terminal_instance + mock_note_toolkit.return_value.get_tools.return_value = [] + mock_screenshot_toolkit.return_value.get_tools.return_value = [] + mock_search_instance = MagicMock() + mock_search_instance.search_google = MagicMock() + mock_search_toolkit.return_value = mock_search_instance + mock_uuid.return_value.__getitem__ = lambda self, key: "test_session" + + mock_agent = MagicMock() + mock_agent_model.return_value = mock_agent + + browser_agent(options) + + assert mock_browser_toolkit.call_args.kwargs["cdp_url"] == ( + "http://worker-17:9222" + ) diff --git a/backend/tests/app/controller/test_chat_controller.py b/backend/tests/app/controller/test_chat_controller.py index 5366b6dc3..2356fa7d8 100644 --- a/backend/tests/app/controller/test_chat_controller.py +++ b/backend/tests/app/controller/test_chat_controller.py @@ -13,6 +13,7 @@ # ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= import os +from types import SimpleNamespace from unittest.mock import AsyncMock, MagicMock, patch import pytest @@ -111,7 +112,137 @@ async def mock_generator(): assert os.environ.get("CAMEL_MODEL_LOG_ENABLED") == "true" assert os.environ.get("browser_port") == "8080" - def test_improve_chat_success(self, mock_task_lock): + @pytest.mark.asyncio + async def test_post_chat_sets_cdp_url_when_browser_ready( + self, sample_chat_data, mock_request, mock_task_lock + ): + """Web mode should set EIGENT_CDP_URL after successful browser ensure.""" + chat_data = Chat(**sample_chat_data) + mock_request.state = SimpleNamespace() + + with ( + patch( + "app.controller.chat_controller.get_or_create_task_lock", + return_value=mock_task_lock, + ), + patch( + "app.controller.chat_controller.step_solve" + ) as mock_step_solve, + patch( + "app.controller.chat_controller.is_cdp_url_available", + return_value=False, + ), + patch( + "app.controller.chat_controller.ensure_cdp_browser_available", + return_value=True, + ), + patch("app.controller.chat_controller.load_dotenv"), + patch("app.controller.chat_controller.set_current_task_id"), + patch("pathlib.Path.mkdir"), + patch("pathlib.Path.home", return_value=MagicMock()), + patch.dict(os.environ, {}, clear=True), + ): + + async def mock_generator(): + yield "data: test_response\n\n" + + mock_step_solve.return_value = mock_generator() + + await post(chat_data, mock_request) + + assert os.environ.get("EIGENT_CDP_URL") == "http://127.0.0.1:8080" + assert mock_request.state.browser_available is True + + @pytest.mark.asyncio + async def test_post_chat_clears_cdp_url_when_browser_unavailable( + self, sample_chat_data, mock_request, mock_task_lock + ): + """Web mode should mark browser unavailable and clear EIGENT_CDP_URL.""" + chat_data = Chat(**sample_chat_data) + mock_request.state = SimpleNamespace() + + with ( + patch( + "app.controller.chat_controller.get_or_create_task_lock", + return_value=mock_task_lock, + ), + patch( + "app.controller.chat_controller.step_solve" + ) as mock_step_solve, + patch( + "app.controller.chat_controller.is_cdp_url_available", + return_value=False, + ), + patch( + "app.controller.chat_controller.ensure_cdp_browser_available", + return_value=False, + ), + patch("app.controller.chat_controller.load_dotenv"), + patch("app.controller.chat_controller.set_current_task_id"), + patch("pathlib.Path.mkdir"), + patch("pathlib.Path.home", return_value=MagicMock()), + patch.dict( + os.environ, + {"EIGENT_CDP_URL": "http://127.0.0.1:9222"}, + clear=True, + ), + ): + + async def mock_generator(): + yield "data: test_response\n\n" + + mock_step_solve.return_value = mock_generator() + + await post(chat_data, mock_request) + + assert "EIGENT_CDP_URL" not in os.environ + assert mock_request.state.browser_available is False + + @pytest.mark.asyncio + async def test_post_chat_preserves_existing_cdp_url( + self, sample_chat_data, mock_request, mock_task_lock + ): + chat_data = Chat(**sample_chat_data) + mock_request.state = SimpleNamespace() + + with ( + patch( + "app.controller.chat_controller.get_or_create_task_lock", + return_value=mock_task_lock, + ), + patch( + "app.controller.chat_controller.step_solve" + ) as mock_step_solve, + patch( + "app.controller.chat_controller.is_cdp_url_available", + return_value=True, + ), + patch( + "app.controller.chat_controller.ensure_cdp_browser_available", + ) as mock_ensure_browser, + patch("app.controller.chat_controller.load_dotenv"), + patch("app.controller.chat_controller.set_current_task_id"), + patch("pathlib.Path.mkdir"), + patch("pathlib.Path.home", return_value=MagicMock()), + patch.dict( + os.environ, + {"EIGENT_CDP_URL": "http://worker-17:9222"}, + clear=True, + ), + ): + + async def mock_generator(): + yield "data: test_response\n\n" + + mock_step_solve.return_value = mock_generator() + + await post(chat_data, mock_request) + + assert os.environ.get("EIGENT_CDP_URL") == "http://worker-17:9222" + assert mock_request.state.browser_available is True + mock_ensure_browser.assert_not_called() + + def test_improve_chat_success(self, mock_task_lock, mock_request): """Test successful chat improvement.""" task_id = "test_task_123" supplement_data = SupplementChat(question="Improve this code") @@ -124,15 +255,18 @@ def test_improve_chat_success(self, mock_task_lock): ), patch("asyncio.run") as mock_run, ): - response = improve(task_id, supplement_data) + mock_run.side_effect = lambda coro: coro.close() + response = improve(task_id, supplement_data, mock_request) assert isinstance(response, Response) assert response.status_code == 201 - mock_run.assert_called_once() + assert mock_run.call_count == 2 # put_queue is invoked when creating the coroutine passed to asyncio.run mock_task_lock.put_queue.assert_called_once() - def test_improve_chat_task_done_resets_to_confirming(self, mock_task_lock): + def test_improve_chat_task_done_resets_to_confirming( + self, mock_task_lock, mock_request + ): """Test improvement when task is done resets status to confirming.""" task_id = "test_task_123" supplement_data = SupplementChat(question="Improve this code") @@ -145,7 +279,8 @@ def test_improve_chat_task_done_resets_to_confirming(self, mock_task_lock): ), patch("asyncio.run") as mock_run, ): - response = improve(task_id, supplement_data) + mock_run.side_effect = lambda coro: coro.close() + response = improve(task_id, supplement_data, mock_request) assert mock_task_lock.status == Status.confirming assert isinstance(response, Response) @@ -189,11 +324,12 @@ def test_stop_chat_success(self, mock_task_lock): with ( patch( - "app.controller.chat_controller.get_task_lock", + "app.controller.chat_controller.get_task_lock_if_exists", return_value=mock_task_lock, ), patch("asyncio.run") as mock_run, ): + mock_run.side_effect = lambda coro: coro.close() response = stop(task_id) assert isinstance(response, Response) @@ -428,13 +564,14 @@ def test_improve_with_nonexistent_task(self): """Test improve endpoint with nonexistent task.""" task_id = "nonexistent_task" supplement_data = SupplementChat(question="Improve this code") + request = SimpleNamespace() with patch( "app.controller.chat_controller.get_task_lock", side_effect=KeyError("Task not found"), ): with pytest.raises(KeyError): - improve(task_id, supplement_data) + improve(task_id, supplement_data, request) def test_supplement_with_empty_question(self, mock_task_lock): """Test supplement endpoint with empty question.""" diff --git a/backend/tests/app/controller/test_message_controller.py b/backend/tests/app/controller/test_message_controller.py new file mode 100644 index 000000000..451381253 --- /dev/null +++ b/backend/tests/app/controller/test_message_controller.py @@ -0,0 +1,63 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +import pytest +from fastapi import FastAPI +from fastapi.testclient import TestClient + +from app.router import register_routers +from app.router_layer import ChannelSessionMiddleware + + +def _build_app(prefix: str) -> FastAPI: + app = FastAPI() + register_routers(app, prefix=prefix) + app.add_middleware(ChannelSessionMiddleware) + return app + + +@pytest.mark.unit +def test_message_endpoint_is_prefix_aware(): + app = _build_app("/api/v1") + paths = {route.path for route in app.routes if hasattr(route, "path")} + assert "/api/v1/messages" in paths + assert "/api/v1/v1/messages" not in paths + + +@pytest.mark.unit +def test_message_endpoint_streams_via_start_chat_stream(monkeypatch): + async def fake_start_chat_stream(_chat, _request): + async def _stream(): + yield 'data: {"text":"ok"}\n\n' + + return _stream() + + monkeypatch.setattr( + "app.controller.chat_controller.start_chat_stream", + fake_start_chat_stream, + ) + + app = _build_app("/api/v1") + with TestClient(app) as client: + with client.stream( + "POST", + "/api/v1/messages", + json={"content": "hello"}, + headers={"X-Channel": "web"}, + ) as response: + assert response.status_code == 200 + body = "".join(response.iter_text()) + assert '"text":"ok"' in body + sid = response.headers.get("x-session-id") + assert sid and sid.startswith("sess_") diff --git a/backend/tests/app/controller/test_tool_controller.py b/backend/tests/app/controller/test_tool_controller.py index 22b78e8e8..c6652c4aa 100644 --- a/backend/tests/app/controller/test_tool_controller.py +++ b/backend/tests/app/controller/test_tool_controller.py @@ -12,12 +12,15 @@ # limitations under the License. # ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import os +from types import SimpleNamespace from unittest.mock import AsyncMock, MagicMock, patch import pytest from fastapi import HTTPException from fastapi.testclient import TestClient +from app.controller import tool_controller from app.controller.tool_controller import install_tool @@ -139,6 +142,36 @@ async def test_install_notion_tool_with_complex_tools(self): mock_toolkit.connect.assert_called_once() mock_toolkit.disconnect.assert_called_once() + @pytest.mark.asyncio + async def test_launch_cdp_browser_uses_remote_hands_endpoint( + self, monkeypatch: pytest.MonkeyPatch + ): + class _FakeRemoteHands: + def get_capability_manifest(self): + return {"deployment": "remote_cluster"} + + def acquire_resource( + self, resource_type: str, session_id: str, **kwargs + ): + _ = (resource_type, session_id, kwargs) + return "http://worker-17:9222" + + monkeypatch.delenv("EIGENT_CDP_URL", raising=False) + tool_controller._web_cdp_browser_meta = None + request = SimpleNamespace( + state=SimpleNamespace(hands=_FakeRemoteHands()) + ) + + response = await tool_controller.launch_cdp_browser(request) + + assert response["success"] is True + assert response["endpoint"] == "http://worker-17:9222" + assert response["browser"]["managedBy"] == "remote" + assert response["browser"]["host"] == "worker-17" + assert os.environ["EIGENT_CDP_URL"] == "http://worker-17:9222" + + tool_controller._clear_connected_cdp_browser() + @pytest.mark.integration class TestToolControllerIntegration: @@ -198,6 +231,74 @@ def test_install_notion_tool_endpoint_with_connection_error( assert data["tools"] == [] assert "warning" in data + def test_launch_cdp_browser_endpoint_integration( + self, client: TestClient, monkeypatch: pytest.MonkeyPatch + ): + monkeypatch.delenv("EIGENT_CDP_URL", raising=False) + tool_controller._web_cdp_browser_meta = None + + with patch( + "app.controller.tool_controller.ensure_cdp_browser_available", + return_value=True, + ): + response = client.post("/browser/cdp/launch") + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + assert data["port"] == 9222 + assert data["browser"]["id"] == "web-cdp-9222" + assert tool_controller._get_connected_cdp_port() == 9222 + + tool_controller._clear_connected_cdp_browser() + + def test_connect_list_and_disconnect_cdp_browser_endpoints( + self, client: TestClient, monkeypatch: pytest.MonkeyPatch + ): + monkeypatch.delenv("EIGENT_CDP_URL", raising=False) + tool_controller._web_cdp_browser_meta = None + + with patch( + "app.controller.tool_controller._is_cdp_available", + return_value=True, + ): + connect_response = client.post( + "/browser/cdp/connect", + json={"port": 9333, "name": "External Browser (9333)"}, + ) + + assert connect_response.status_code == 200 + connect_data = connect_response.json() + assert connect_data["success"] is True + assert connect_data["browser"]["port"] == 9333 + assert connect_data["browser"]["isExternal"] is True + + list_response = client.get("/browser/cdp/list") + assert list_response.status_code == 200 + assert list_response.json() == [connect_data["browser"]] + + disconnect_response = client.delete("/browser/cdp/9333") + assert disconnect_response.status_code == 200 + assert disconnect_response.json()["success"] is True + assert tool_controller._get_connected_cdp_port() is None + + def test_connect_cdp_browser_endpoint_returns_error_when_unreachable( + self, client: TestClient, monkeypatch: pytest.MonkeyPatch + ): + monkeypatch.delenv("EIGENT_CDP_URL", raising=False) + tool_controller._web_cdp_browser_meta = None + + with patch( + "app.controller.tool_controller._is_cdp_available", + return_value=False, + ): + response = client.post("/browser/cdp/connect", json={"port": 9555}) + + assert response.status_code == 200 + data = response.json() + assert data["success"] is False + assert "9555" in data["error"] + @pytest.mark.model_backend class TestToolControllerWithRealMCP: diff --git a/backend/tests/app/hands/test_cluster_config.py b/backend/tests/app/hands/test_cluster_config.py new file mode 100644 index 000000000..d84b16604 --- /dev/null +++ b/backend/tests/app/hands/test_cluster_config.py @@ -0,0 +1,133 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from pathlib import Path + +import pytest + +from app.hands.cluster_config import ( + HandsClusterConfigError, + load_hands_cluster_config, +) + + +@pytest.mark.unit +def test_load_cluster_config_with_routes_and_env_token(tmp_path: Path): + config = tmp_path / "hands_clusters.toml" + config.write_text( + """ +[defaults] +timeout_seconds = 12 +verify_tls = true +acquire_path = "/acquire" +release_path = "/release" +health_path = "/health" + +[routes] +browser = "browser_pool" +terminal = "terminal_pool" +default = "gateway" + +[clusters.gateway] +base_url = "http://hands-gateway.local" + +[clusters.browser_pool] +base_url = "http://browser-cluster.local" +auth_token_env = "BROWSER_CLUSTER_TOKEN" + +[clusters.terminal_pool] +api = "http://terminal-cluster.local" +verify_tls = false +""".strip(), + encoding="utf-8", + ) + routing = load_hands_cluster_config( + str(config), + read_env=lambda name: "token_browser" + if name == "BROWSER_CLUSTER_TOKEN" + else None, + ) + + assert set(routing.route_to_cluster.keys()) == { + "browser", + "terminal", + "default", + } + assert ( + routing.route_to_cluster["browser"].base_url + == "http://browser-cluster.local" + ) + assert routing.route_to_cluster["browser"].auth_token == "token_browser" + assert routing.route_to_cluster["terminal"].verify_tls is False + assert routing.route_to_cluster["default"].timeout_seconds == 12 + + +@pytest.mark.unit +def test_load_cluster_config_without_routes_single_cluster_defaults_to_default( + tmp_path: Path, +): + config = tmp_path / "hands_clusters.toml" + config.write_text( + """ +[clusters.default] +base_url = "http://hands-gateway.local" +""".strip(), + encoding="utf-8", + ) + routing = load_hands_cluster_config(str(config)) + assert set(routing.route_to_cluster.keys()) == {"default"} + assert ( + routing.route_to_cluster["default"].base_url + == "http://hands-gateway.local" + ) + + +@pytest.mark.unit +def test_load_cluster_config_normalizes_fallback_route(tmp_path: Path): + config = tmp_path / "hands_clusters.toml" + config.write_text( + """ +[routes] +fallback = "gateway" + +[clusters.gateway] +base_url = "http://hands-gateway.local" +""".strip(), + encoding="utf-8", + ) + routing = load_hands_cluster_config(str(config)) + assert set(routing.route_to_cluster.keys()) == {"default"} + + +@pytest.mark.unit +def test_load_cluster_config_invalid_route_target_raises(tmp_path: Path): + config = tmp_path / "hands_clusters.toml" + config.write_text( + """ +[routes] +browser = "missing_cluster" + +[clusters.gateway] +base_url = "http://hands-gateway.local" +""".strip(), + encoding="utf-8", + ) + with pytest.raises(HandsClusterConfigError): + load_hands_cluster_config(str(config)) + + +@pytest.mark.unit +def test_load_cluster_config_missing_file_raises(): + with pytest.raises(HandsClusterConfigError): + load_hands_cluster_config("/tmp/non-existent-hands-cluster.toml") diff --git a/backend/tests/app/hands/test_http_hands_cluster.py b/backend/tests/app/hands/test_http_hands_cluster.py new file mode 100644 index 000000000..8bbb3a403 --- /dev/null +++ b/backend/tests/app/hands/test_http_hands_cluster.py @@ -0,0 +1,103 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +import asyncio +import json + +import httpx +import pytest + +from app.hands.http_hands_cluster import HttpHandsCluster + + +@pytest.mark.unit +def test_http_hands_cluster_acquire_with_wrapped_data(): + def _handler(request: httpx.Request) -> httpx.Response: + assert request.method == "POST" + assert str(request.url) == "http://hands-cluster.local/acquire" + assert request.headers.get("authorization") == "Bearer token_abc" + payload = json.loads(request.content.decode("utf-8")) + assert payload["type"] == "browser" + assert payload["resource_type"] == "browser" + assert payload["session_id"] == "sess_11" + assert payload["tenant_id"] == "tenant_1" + assert payload["port"] == 9444 + return httpx.Response( + 200, + json={ + "data": { + "endpoint": "http://worker-11:9222", + "container_id": "abc123", + } + }, + ) + + cluster = HttpHandsCluster( + base_url="http://hands-cluster.local", + auth_token="token_abc", + transport=httpx.MockTransport(_handler), + ) + acquired = asyncio.run( + cluster.acquire( + resource_type="browser", + session_id="sess_11", + tenant_id="tenant_1", + port=9444, + ) + ) + assert acquired["endpoint"] == "http://worker-11:9222" + assert acquired["container_id"] == "abc123" + + +@pytest.mark.unit +def test_http_hands_cluster_release_404_is_ignored(): + def _handler(request: httpx.Request) -> httpx.Response: + assert request.method == "POST" + assert str(request.url) == "http://hands-cluster.local/release" + payload = json.loads(request.content.decode("utf-8")) + assert payload["session_id"] == "sess_missing" + return httpx.Response(404, json={"detail": "not found"}) + + cluster = HttpHandsCluster( + base_url="http://hands-cluster.local", + transport=httpx.MockTransport(_handler), + ) + asyncio.run(cluster.release("sess_missing")) + + +@pytest.mark.unit +def test_http_hands_cluster_health_with_custom_path(): + def _handler(request: httpx.Request) -> httpx.Response: + assert request.method == "GET" + assert str(request.url) == "http://hands-cluster.local/api/v1/healthz" + return httpx.Response( + 200, + json={ + "browser_workers": {"total": 3, "available": 2, "in_use": 1} + }, + ) + + cluster = HttpHandsCluster( + base_url="http://hands-cluster.local", + health_path="/api/v1/healthz", + transport=httpx.MockTransport(_handler), + ) + health = asyncio.run(cluster.health()) + assert health["browser_workers"]["available"] == 2 + + +@pytest.mark.unit +def test_http_hands_cluster_requires_base_url(): + with pytest.raises(ValueError): + HttpHandsCluster(base_url=" ") diff --git a/backend/tests/app/hands/test_path_scope_checks.py b/backend/tests/app/hands/test_path_scope_checks.py new file mode 100644 index 000000000..60de91cee --- /dev/null +++ b/backend/tests/app/hands/test_path_scope_checks.py @@ -0,0 +1,62 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +import pytest + +from app.file_access.upload_file_access import UploadFileAccess +from app.hands.full_hands import FullHands +from app.hands.sandbox_hands import SandboxHands + + +@pytest.mark.unit +def test_sandbox_hands_blocks_workspace_prefix_bypass(tmp_path): + workspace = tmp_path / "workspace" + workspace.mkdir() + sibling = tmp_path / "workspace_evil" + sibling.mkdir() + inside = workspace / "safe.txt" + inside.write_text("safe", encoding="utf-8") + outside = sibling / "evil.txt" + outside.write_text("evil", encoding="utf-8") + + hands = SandboxHands(workspace_root=str(workspace)) + assert hands.can_access_filesystem(str(inside)) is True + assert hands.can_access_filesystem(str(outside)) is False + + +@pytest.mark.unit +def test_full_hands_blocks_workspace_prefix_bypass(tmp_path): + workspace = tmp_path / "workspace" + workspace.mkdir() + sibling = tmp_path / "workspace_evil" + sibling.mkdir() + outside = sibling / "evil.txt" + outside.write_text("evil", encoding="utf-8") + + hands = FullHands(workspace_root=str(workspace)) + assert hands.can_access_filesystem(str(outside)) is False + + +@pytest.mark.unit +def test_upload_file_access_blocks_workspace_prefix_bypass(tmp_path): + workspace = tmp_path / "workspace" + workspace.mkdir() + sibling = tmp_path / "workspace_evil" + sibling.mkdir() + outside = sibling / "evil.txt" + outside.write_text("evil", encoding="utf-8") + + file_access = UploadFileAccess(workspace_root=str(workspace)) + with pytest.raises(PermissionError): + file_access.read_file(str(outside)) diff --git a/backend/tests/app/hands/test_remote_hands.py b/backend/tests/app/hands/test_remote_hands.py new file mode 100644 index 000000000..6432617ba --- /dev/null +++ b/backend/tests/app/hands/test_remote_hands.py @@ -0,0 +1,93 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +import pytest + +from app.hands.remote_hands import RemoteHands + + +class _FakeCluster: + def __init__(self) -> None: + self.released: list[str] = [] + self.acquired: list[tuple[str, str]] = [] + + async def acquire( + self, + resource_type: str, + session_id: str, + tenant_id: str = "default", + **kwargs, + ) -> dict: + _ = (tenant_id, kwargs) + self.acquired.append((resource_type, session_id)) + return {"endpoint": "http://worker-17:9222", "container_id": "abc123"} + + async def release(self, session_id: str) -> None: + self.released.append(session_id) + + async def health(self) -> dict: + return {"browser_workers": {"total": 1, "available": 1, "in_use": 0}} + + +@pytest.mark.unit +def test_remote_hands_browser_fallback_endpoint(): + hands = RemoteHands(cluster=None) + endpoint = hands.acquire_resource("browser", "sess_1", port=9444) + assert endpoint == "http://localhost:9444" + + +@pytest.mark.unit +def test_remote_hands_cluster_acquire_and_release(): + cluster = _FakeCluster() + hands = RemoteHands(cluster=cluster) + + endpoint = hands.acquire_resource("browser", "sess_2") + assert endpoint == "http://worker-17:9222" + assert ("browser", "sess_2") in cluster.acquired + + hands.release_resource("browser", "sess_2") + assert "sess_2" in cluster.released + + +@pytest.mark.unit +def test_remote_hands_cluster_allows_non_browser_resource(): + cluster = _FakeCluster() + hands = RemoteHands(cluster=cluster) + + endpoint = hands.acquire_resource("terminal", "sess_terminal") + assert endpoint == "http://worker-17:9222" + assert ("terminal", "sess_terminal") in cluster.acquired + + +@pytest.mark.unit +def test_remote_hands_unknown_resource_raises(): + hands = RemoteHands(cluster=None) + with pytest.raises(ValueError): + hands.acquire_resource("terminal", "sess_3") + + +@pytest.mark.unit +def test_remote_hands_workspace_prefix_bypass_blocked(tmp_path): + workspace = tmp_path / "workspace" + workspace.mkdir() + sibling = tmp_path / "workspace_evil" + sibling.mkdir() + inside = workspace / "ok.txt" + inside.write_text("ok", encoding="utf-8") + outside = sibling / "evil.txt" + outside.write_text("evil", encoding="utf-8") + + hands = RemoteHands(cluster=None, workspace_root=str(workspace)) + assert hands.can_access_filesystem(str(inside)) is True + assert hands.can_access_filesystem(str(outside)) is False diff --git a/backend/tests/app/hands/test_routed_hands_cluster.py b/backend/tests/app/hands/test_routed_hands_cluster.py new file mode 100644 index 000000000..caaffc2b3 --- /dev/null +++ b/backend/tests/app/hands/test_routed_hands_cluster.py @@ -0,0 +1,88 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +import asyncio + +import pytest + +from app.hands.routed_hands_cluster import RoutedHandsCluster + + +class _FakeCluster: + def __init__(self, endpoint: str) -> None: + self.endpoint = endpoint + self.acquired: list[tuple[str, str]] = [] + self.released: list[str] = [] + + async def acquire( + self, + resource_type: str, + session_id: str, + tenant_id: str = "default", + **kwargs, + ) -> dict: + _ = (tenant_id, kwargs) + self.acquired.append((resource_type, session_id)) + return {"endpoint": self.endpoint} + + async def release(self, session_id: str) -> None: + self.released.append(session_id) + + async def health(self) -> dict: + return {"endpoint": self.endpoint} + + +@pytest.mark.unit +def test_routed_cluster_routes_by_resource_type(): + browser = _FakeCluster("http://browser-cluster:9222") + terminal = _FakeCluster("http://terminal-cluster:7001") + routed = RoutedHandsCluster({"browser": browser, "terminal": terminal}) + + acquired = asyncio.run( + routed.acquire("terminal", "sess_terminal", tenant_id="default") + ) + assert acquired["endpoint"] == "http://terminal-cluster:7001" + assert acquired["cluster_key"] == "terminal" + assert ("terminal", "sess_terminal") in terminal.acquired + assert browser.acquired == [] + + +@pytest.mark.unit +def test_routed_cluster_release_uses_same_cluster_as_acquire(): + browser = _FakeCluster("http://browser-cluster:9222") + terminal = _FakeCluster("http://terminal-cluster:7001") + routed = RoutedHandsCluster({"browser": browser, "terminal": terminal}) + + asyncio.run(routed.acquire("browser", "sess_browser")) + asyncio.run(routed.release("sess_browser")) + assert "sess_browser" in browser.released + assert terminal.released == [] + + +@pytest.mark.unit +def test_routed_cluster_uses_default_when_resource_missing(): + default_cluster = _FakeCluster("http://default-cluster:8100") + routed = RoutedHandsCluster( + {"default": default_cluster, "browser": _FakeCluster("http://b:1")} + ) + + acquired = asyncio.run(routed.acquire("model", "sess_model")) + assert acquired["endpoint"] == "http://default-cluster:8100" + assert acquired["cluster_key"] == "default" + + +@pytest.mark.unit +def test_routed_cluster_requires_non_empty_mapping(): + with pytest.raises(ValueError): + RoutedHandsCluster({}) diff --git a/backend/tests/app/router_layer/test_message_router.py b/backend/tests/app/router_layer/test_message_router.py new file mode 100644 index 000000000..32e7925a0 --- /dev/null +++ b/backend/tests/app/router_layer/test_message_router.py @@ -0,0 +1,109 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +import pytest + +from app.router_layer.message_router import DefaultMessageRouter +from app.router_layer.session_store import MemorySessionStore + + +@pytest.mark.unit +@pytest.mark.asyncio +async def test_resolve_session_reuses_valid_session(monkeypatch): + current = {"now": 1000.0} + monkeypatch.setattr( + "app.router_layer.message_router._now_ts", lambda: current["now"] + ) + + router = DefaultMessageRouter(session_ttl=60) + session_id = await router.resolve_session("web", None, "alice") + + current["now"] = 1020.0 + reused = await router.resolve_session("web", session_id, "alice") + + assert reused == session_id + + +@pytest.mark.unit +@pytest.mark.asyncio +async def test_resolve_session_channel_mismatch_creates_new(monkeypatch): + current = {"now": 1000.0} + monkeypatch.setattr( + "app.router_layer.message_router._now_ts", lambda: current["now"] + ) + + router = DefaultMessageRouter(session_ttl=60) + session_id = await router.resolve_session("web", None, "alice") + + current["now"] = 1010.0 + new_session = await router.resolve_session("desktop", session_id, "alice") + + assert new_session != session_id + + +@pytest.mark.unit +@pytest.mark.asyncio +async def test_resolve_session_expired_creates_new(monkeypatch): + current = {"now": 1000.0} + monkeypatch.setattr( + "app.router_layer.message_router._now_ts", lambda: current["now"] + ) + + router = DefaultMessageRouter(session_ttl=10) + session_id = await router.resolve_session("web", None, "alice") + + current["now"] = 1015.0 + new_session = await router.resolve_session("web", session_id, "alice") + + assert new_session != session_id + + +@pytest.mark.unit +@pytest.mark.asyncio +async def test_memory_session_store_respects_ttl(monkeypatch): + current = {"now": 2000.0} + monkeypatch.setattr( + "app.router_layer.session_store.time.time", + lambda: current["now"], + ) + + store = MemorySessionStore() + await store.set("sess_1", {"session_id": "sess_1"}, ttl=10) + assert await store.get("sess_1") == {"session_id": "sess_1"} + + current["now"] = 2011.0 + assert await store.get("sess_1") is None + + +@pytest.mark.unit +@pytest.mark.asyncio +async def test_memory_session_store_lazy_cleanup_on_set(monkeypatch): + current = {"now": 1000.0} + monkeypatch.setattr( + "app.router_layer.session_store.time.time", + lambda: current["now"], + ) + + store = MemorySessionStore() + for i in range(127): + await store.set( + f"sess_old_{i}", {"session_id": f"sess_old_{i}"}, ttl=1 + ) + + current["now"] = 2000.0 + await store.set("sess_live", {"session_id": "sess_live"}, ttl=60) + + # GC should run on the 128th set call and purge expired entries even if never read. + assert len(store._sessions) == 1 + assert await store.get("sess_live") == {"session_id": "sess_live"} diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py index cc27f27ed..ef58488a0 100644 --- a/backend/tests/conftest.py +++ b/backend/tests/conftest.py @@ -17,6 +17,7 @@ import tempfile from collections.abc import AsyncGenerator, Generator from pathlib import Path +from types import SimpleNamespace from unittest.mock import AsyncMock, MagicMock, patch import pytest @@ -193,6 +194,7 @@ def mock_request(): """Mock FastAPI Request object.""" request = AsyncMock() request.is_disconnected = AsyncMock(return_value=False) + request.state = SimpleNamespace() return request diff --git a/docs/core/brain-architecture.md b/docs/core/brain-architecture.md new file mode 100644 index 000000000..2f6139033 --- /dev/null +++ b/docs/core/brain-architecture.md @@ -0,0 +1,146 @@ +--- +title: Brain Architecture +description: Understand how Eigent separates clients, Brain, and runtime capabilities across desktop and web deployments. +icon: brain +--- + +## Overview + +Eigent is evolving toward a Brain-centered architecture. + +Instead of treating the desktop app as the system boundary, Eigent treats the Brain as the primary runtime. Clients such as Desktop and Web connect to the same Brain over HTTP and SSE, while runtime capabilities are determined by where the Brain is deployed. + +This shift makes it easier to: + +- support both Desktop and Web as first-class clients +- run Brain independently from Electron +- keep capability boundaries consistent across local and remote deployments +- prepare for future CLI, channel, and remote resource integrations + +## Core Building Blocks + +### Brain + +The Brain is the central runtime for Eigent. It is responsible for: + +- task and chat orchestration +- agent and workforce coordination +- file, tool, MCP, and skill APIs +- session-aware request handling +- runtime capability resolution + +In practice, the Brain is the part of the system that reasons, routes work, and executes actions through the available runtime capabilities. + +### Clients + +Eigent supports multiple client shapes around the same Brain: + +- Desktop +- Web +- future CLI and channel-based clients + +Clients are responsible for presentation and interaction. They do not define what the system is allowed to do. They only define how users connect to and interact with the Brain. + +### Hands + +Hands represent what the Brain can actually operate in its current environment. + +Examples include: + +- terminal execution +- browser control +- filesystem access +- MCP usage + +This is an important architectural choice: Hands are determined by the Brain deployment environment, not by the client type. + +That means a Web client connected to a full local or VM-hosted Brain can still access browser and terminal capabilities, while a client connected to a restricted Brain will see a reduced capability set. + +### Host + +For Desktop, Electron acts as a host layer. It provides native integrations such as: + +- window controls +- file picking +- CDP and webview-related integrations +- backend lifecycle support + +The host is intentionally kept separate from Brain logic so shared frontend code can work across Desktop and Web. + +## High-Level Architecture + +```text +Clients + ├─ Desktop + ├─ Web + └─ Future CLI / Channels + │ + │ HTTP / SSE + ▼ + Brain + ├─ Router layer + ├─ Chat / Task / Tool / File APIs + ├─ MCP / Skills services + └─ Hands + ├─ terminal + ├─ browser + ├─ filesystem + └─ MCP +``` + +## Request Flow + +### Desktop + +In Desktop mode, Electron starts and hosts the local Brain. The frontend resolves the local Brain endpoint through the host layer, then uses shared Brain HTTP and SSE APIs for most business flows. + +### Web + +In Web mode, the frontend connects directly to a Brain endpoint. Session metadata is carried through headers, file attachments are uploaded through Brain APIs, and task streaming uses shared SSE transport. + +This makes Web a first-class entry point instead of a limited fallback path. + +## Why Hands Are Environment-Driven + +A common pitfall in multi-client systems is tying capability boundaries to the client type. + +Eigent avoids that by separating: + +- **channel**: how a client connects and how responses should be adapted +- **hands**: what the Brain can actually do in its runtime environment + +This enables a cleaner model: + +- Desktop does not automatically mean full capability +- Web does not automatically mean restricted capability +- the Brain environment remains the source of truth for runtime power + +## Deployment Modes + +The architecture supports multiple deployment shapes: + +- **Desktop + Local Brain** + - best for local development and full machine access +- **Web + Local Brain** + - useful for frontend/backend separation and local web usage +- **Web + Cloud or VM Brain** + - allows browser-based access to a remotely hosted Brain +- **Brain + Remote resource pools** + - enables future remote browser or terminal acquisition patterns + +## What This Architecture Enables + +This architecture lays the foundation for: + +- stronger separation between UI and runtime +- better Web support without breaking Desktop +- clearer capability modeling +- future remote execution and multi-client expansion + +It also reduces the amount of client-specific branching required in the product by moving more system behavior into shared Brain-side abstractions. + +## Current Direction + +The architecture is being rolled out incrementally. Desktop remains supported while Web and standalone Brain flows are being strengthened around the same core abstractions. + +That incremental approach helps Eigent evolve toward a more portable and extensible system without requiring a full rewrite. diff --git a/docs/docs.json b/docs/docs.json index d20352a07..99c71db89 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -59,6 +59,7 @@ "icon": "key", "pages": [ "/core/concepts", + "/core/brain-architecture", "/core/workforce", { "group": "Models", diff --git a/electron/main/index.ts b/electron/main/index.ts index 1b9d5da40..fd3848a3f 100644 --- a/electron/main/index.ts +++ b/electron/main/index.ts @@ -1390,413 +1390,10 @@ function registerIpcHandlers() { } }); - // ======================== skills ======================== - // SKILLS_ROOT, SKILL_FILE, seedDefaultSkillsIfEmpty are defined at module level (used at startup too). - function parseSkillFrontmatter( - content: string - ): { name: string; description: string } | null { - if (!content.startsWith('---')) return null; - const end = content.indexOf('\n---', 3); - const block = end > 0 ? content.slice(4, end) : content.slice(4); - const nameMatch = block.match(/^\s*name\s*:\s*(.+)$/m); - const descMatch = block.match(/^\s*description\s*:\s*(.+)$/m); - const name = nameMatch?.[1]?.trim()?.replace(/^['"]|['"]$/g, ''); - const desc = descMatch?.[1]?.trim()?.replace(/^['"]|['"]$/g, ''); - if (name && desc) return { name, description: desc }; - return null; - } - - const normalizePathForCompare = (value: string) => - process.platform === 'win32' ? value.toLowerCase() : value; - - function assertPathUnderSkillsRoot(targetPath: string): string { - const resolvedRoot = path.resolve(SKILLS_ROOT); - const resolvedTarget = path.resolve(targetPath); - const rootCmp = normalizePathForCompare(resolvedRoot); - const targetCmp = normalizePathForCompare(resolvedTarget); - const rootWithSep = rootCmp.endsWith(path.sep) - ? rootCmp - : `${rootCmp}${path.sep}`; - if (targetCmp !== rootCmp && !targetCmp.startsWith(rootWithSep)) { - throw new Error('Path is outside skills directory'); - } - return resolvedTarget; - } - - function resolveSkillDirPath(skillDirName: string): string { - const name = String(skillDirName || '').trim(); - if (!name) { - throw new Error('Skill folder name is required'); - } - return assertPathUnderSkillsRoot(path.join(SKILLS_ROOT, name)); - } - - ipcMain.handle('get-skills-dir', async () => { - try { - if (!existsSync(SKILLS_ROOT)) { - await fsp.mkdir(SKILLS_ROOT, { recursive: true }); - } - await seedDefaultSkillsIfEmpty(); - return { success: true, path: SKILLS_ROOT }; - } catch (error: any) { - log.error('get-skills-dir failed', error); - return { success: false, error: error?.message }; - } - }); - - ipcMain.handle('skills-scan', async () => { - try { - if (!existsSync(SKILLS_ROOT)) { - return { success: true, skills: [] }; - } - await seedDefaultSkillsIfEmpty(); - const entries = await fsp.readdir(SKILLS_ROOT, { withFileTypes: true }); - const exampleSkillsDir = getExampleSkillsSourceDir(); - const skills: Array<{ - name: string; - description: string; - path: string; - scope: string; - skillDirName: string; - isExample: boolean; - }> = []; - for (const e of entries) { - if (!e.isDirectory() || e.name.startsWith('.')) continue; - const skillPath = path.join(SKILLS_ROOT, e.name, SKILL_FILE); - try { - const raw = await fsp.readFile(skillPath, 'utf-8'); - const meta = parseSkillFrontmatter(raw); - if (meta) { - const isExample = existsSync( - path.join(exampleSkillsDir, e.name, SKILL_FILE) - ); - skills.push({ - name: meta.name, - description: meta.description, - path: skillPath, - scope: 'user', - skillDirName: e.name, - isExample, - }); - } - } catch (_) { - // skip invalid or unreadable skill - } - } - return { success: true, skills }; - } catch (error: any) { - log.error('skills-scan failed', error); - return { success: false, error: error?.message, skills: [] }; - } - }); - - ipcMain.handle( - 'skill-write', - async (_event, skillDirName: string, content: string) => { - try { - const dir = resolveSkillDirPath(skillDirName); - await fsp.mkdir(dir, { recursive: true }); - await fsp.writeFile(path.join(dir, SKILL_FILE), content, 'utf-8'); - return { success: true }; - } catch (error: any) { - log.error('skill-write failed', error); - return { success: false, error: error?.message }; - } - } - ); - - ipcMain.handle('skill-delete', async (_event, skillDirName: string) => { - try { - const dir = resolveSkillDirPath(skillDirName); - if (!existsSync(dir)) return { success: true }; - await fsp.rm(dir, { recursive: true, force: true }); - return { success: true }; - } catch (error: any) { - log.error('skill-delete failed', error); - return { success: false, error: error?.message }; - } - }); - - ipcMain.handle('skill-read', async (_event, filePath: string) => { - try { - const fullPath = path.isAbsolute(filePath) - ? assertPathUnderSkillsRoot(filePath) - : assertPathUnderSkillsRoot( - path.join(SKILLS_ROOT, filePath, SKILL_FILE) - ); - const content = await fsp.readFile(fullPath, 'utf-8'); - return { success: true, content }; - } catch (error: any) { - log.error('skill-read failed', error); - return { success: false, error: error?.message }; - } - }); - - ipcMain.handle('skill-list-files', async (_event, skillDirName: string) => { - try { - const dir = resolveSkillDirPath(skillDirName); - if (!existsSync(dir)) - return { success: false, error: 'Skill folder not found', files: [] }; - const entries = await fsp.readdir(dir, { withFileTypes: true }); - const files = entries.map((e) => - e.isDirectory() ? `${e.name}/` : e.name - ); - return { success: true, files }; - } catch (error: any) { - log.error('skill-list-files failed', error); - return { success: false, error: error?.message, files: [] }; - } - }); - - ipcMain.handle('open-skill-folder', async (_event, skillName: string) => { - try { - const name = String(skillName || '').trim(); - if (!name) return { success: false, error: 'Skill name is required' }; - if (!existsSync(SKILLS_ROOT)) - return { success: false, error: 'Skills dir not found' }; - const entries = await fsp.readdir(SKILLS_ROOT, { withFileTypes: true }); - const nameLower = name.toLowerCase(); - for (const e of entries) { - if (!e.isDirectory() || e.name.startsWith('.')) continue; - const skillPath = path.join(SKILLS_ROOT, e.name, SKILL_FILE); - try { - const raw = await fsp.readFile(skillPath, 'utf-8'); - const meta = parseSkillFrontmatter(raw); - if (meta && meta.name.toLowerCase().trim() === nameLower) { - const dirPath = path.join(SKILLS_ROOT, e.name); - await shell.openPath(dirPath); - return { success: true }; - } - } catch (_) { - continue; - } - } - return { success: false, error: `Skill not found: ${name}` }; - } catch (error: any) { - log.error('open-skill-folder failed', error); - return { success: false, error: error?.message }; - } - }); - - // ======================== skills-config.json handlers ======================== - - function getSkillConfigPath(userId: string): string { - return path.join(os.homedir(), '.eigent', userId, 'skills-config.json'); - } - - async function loadSkillConfig(userId: string): Promise { - const configPath = getSkillConfigPath(userId); - - // Auto-create config file if it doesn't exist - if (!existsSync(configPath)) { - const defaultConfig = { version: 1, skills: {} }; - try { - await fsp.mkdir(path.dirname(configPath), { recursive: true }); - await fsp.writeFile( - configPath, - JSON.stringify(defaultConfig, null, 2), - 'utf-8' - ); - log.info(`Auto-created skills config at ${configPath}`); - return defaultConfig; - } catch (error) { - log.error('Failed to create default skills config', error); - return defaultConfig; - } - } - - try { - const content = await fsp.readFile(configPath, 'utf-8'); - return JSON.parse(content); - } catch (error) { - log.error('Failed to load skill config', error); - return { version: 1, skills: {} }; - } - } - - async function saveSkillConfig(userId: string, config: any): Promise { - const configPath = getSkillConfigPath(userId); - await fsp.mkdir(path.dirname(configPath), { recursive: true }); - await fsp.writeFile(configPath, JSON.stringify(config, null, 2), 'utf-8'); - } - - ipcMain.handle('skill-config-load', async (_event, userId: string) => { - try { - const config = await loadSkillConfig(userId); - return { success: true, config }; - } catch (error: any) { - log.error('skill-config-load failed', error); - return { success: false, error: error?.message }; - } - }); - - ipcMain.handle( - 'skill-config-toggle', - async (_event, userId: string, skillName: string, enabled: boolean) => { - try { - const config = await loadSkillConfig(userId); - if (!config.skills[skillName]) { - // Use SkillScope object format - config.skills[skillName] = { - enabled, - scope: { - isGlobal: true, - selectedAgents: [], - }, - addedAt: Date.now(), - isExample: false, - }; - } else { - config.skills[skillName].enabled = enabled; - } - await saveSkillConfig(userId, config); - return { success: true, config: config.skills[skillName] }; - } catch (error: any) { - log.error('skill-config-toggle failed', error); - return { success: false, error: error?.message }; - } - } - ); - - ipcMain.handle( - 'skill-config-update', - async (_event, userId: string, skillName: string, skillConfig: any) => { - try { - const config = await loadSkillConfig(userId); - config.skills[skillName] = { ...skillConfig }; - await saveSkillConfig(userId, config); - return { success: true }; - } catch (error: any) { - log.error('skill-config-update failed', error); - return { success: false, error: error?.message }; - } - } - ); - - ipcMain.handle( - 'skill-config-delete', - async (_event, userId: string, skillName: string) => { - try { - const config = await loadSkillConfig(userId); - delete config.skills[skillName]; - await saveSkillConfig(userId, config); - return { success: true }; - } catch (error: any) { - log.error('skill-config-delete failed', error); - return { success: false, error: error?.message }; - } - } - ); - - // Initialize skills config for a user (ensures config file exists) - ipcMain.handle('skill-config-init', async (_event, userId: string) => { - try { - log.info(`[SKILLS-CONFIG] Initializing config for user: ${userId}`); - const config = await loadSkillConfig(userId); - - try { - const exampleSkillsDir = getExampleSkillsSourceDir(); - const defaultConfigPath = path.join( - exampleSkillsDir, - 'default-config.json' - ); - - if (existsSync(defaultConfigPath)) { - const defaultConfigContent = await fsp.readFile( - defaultConfigPath, - 'utf-8' - ); - const defaultConfig = JSON.parse(defaultConfigContent); - - if (defaultConfig.skills) { - let addedCount = 0; - // Merge default skills config with user's existing config - for (const [skillName, skillConfig] of Object.entries( - defaultConfig.skills - )) { - if (!config.skills[skillName]) { - // Add new skill config with current timestamp - config.skills[skillName] = { - ...(skillConfig as any), - addedAt: Date.now(), - }; - addedCount++; - log.info( - `[SKILLS-CONFIG] Initialized config for example skill: ${skillName}` - ); - } - } - - if (addedCount > 0) { - await saveSkillConfig(userId, config); - log.info( - `[SKILLS-CONFIG] Added ${addedCount} example skill configs` - ); - } - } - } else { - log.warn( - `[SKILLS-CONFIG] Default config not found at: ${defaultConfigPath}` - ); - } - } catch (err) { - log.error( - '[SKILLS-CONFIG] Failed to load default config template:', - err - ); - // Continue anyway - user config is still valid - } - - log.info( - `[SKILLS-CONFIG] Config initialized with ${Object.keys(config.skills || {}).length} skills` - ); - return { success: true, config }; - } catch (error: any) { - log.error('skill-config-init failed', error); - return { success: false, error: error?.message }; - } - }); - - ipcMain.handle( - 'skill-import-zip', - async ( - _event, - zipPathOrBuffer: string | Buffer | ArrayBuffer | Uint8Array, - replacements?: string[] - ) => - withImportLock(async () => { - // Use typeof check instead of instanceof to handle cross-realm objects - // from Electron IPC (instanceof can fail across context boundaries) - const replacementsSet = replacements - ? new Set(replacements) - : undefined; - const isBufferLike = typeof zipPathOrBuffer !== 'string'; - if (isBufferLike) { - const buf = Buffer.isBuffer(zipPathOrBuffer) - ? zipPathOrBuffer - : Buffer.from( - zipPathOrBuffer instanceof ArrayBuffer - ? zipPathOrBuffer - : (zipPathOrBuffer as any) - ); - const tempPath = path.join( - os.tmpdir(), - `eigent-skill-import-${Date.now()}.zip` - ); - try { - await fsp.writeFile(tempPath, buf); - const result = await importSkillsFromZip(tempPath, replacementsSet); - return result; - } finally { - await fsp.unlink(tempPath).catch(() => {}); - } - } - return importSkillsFromZip(zipPathOrBuffer as string, replacementsSet); - }) - ); + // Skills: all operations via Brain REST API (backend). No IPC. // ==================== read file handler ==================== - ipcMain.handle('read-file', async (event, filePath: string) => { + ipcMain.handle('read-file', async (_event, filePath: string) => { try { log.info('Reading file:', filePath); @@ -1805,15 +1402,12 @@ function registerIpcHandlers() { log.error('File does not exist:', filePath); return { success: false, error: 'File does not exist' }; } - - // Check if it's a directory const stats = await fsp.stat(filePath); if (stats.isDirectory()) { log.error('Path is a directory, not a file:', filePath); return { success: false, error: 'Path is a directory, not a file' }; } - // Read file content const fileContent = await fsp.readFile(filePath); log.info('File read successfully:', filePath); diff --git a/electron/main/init.ts b/electron/main/init.ts index 57fb26c87..e6d5aa60f 100644 --- a/electron/main/init.ts +++ b/electron/main/init.ts @@ -40,7 +40,7 @@ import { const execAsync = promisify(exec); -const DEFAULT_SERVER_URL = 'https://dev.eigent.ai/api'; +const DEFAULT_SERVER_URL = 'https://dev.eigent.ai'; function readEnvValue(filePath: string, key: string): string | undefined { try { @@ -81,9 +81,8 @@ function buildLocalServerUrl(proxyUrl: string | undefined): string | undefined { if (!proxyUrl) return undefined; const trimmed = proxyUrl.trim().replace(/\/+$/, ''); if (!trimmed) return undefined; - // Avoid double /api suffix - if (trimmed.endsWith('/api')) return trimmed; - return `${trimmed}/api`; + // Keep SERVER_URL as host/base only; API version belongs to concrete endpoints. + return trimmed.replace(/\/api\/v[12]$/i, ''); } // helper function to get main window @@ -271,9 +270,8 @@ export async function startBackend( } } - const devServerUrl = process.env.VITE_DEV_SERVER_URL; - if (!resolvedServerUrl && devServerUrl) { - const devEnvPath = path.join(app.getAppPath(), '.env.development'); + const devEnvPath = path.join(app.getAppPath(), '.env.development'); + if (!resolvedServerUrl && fs.existsSync(devEnvPath)) { const devProxyEnabled = readEnvValue(devEnvPath, 'VITE_USE_LOCAL_PROXY') === 'true'; const devProxyUrl = readEnvValue(devEnvPath, 'VITE_PROXY_URL'); @@ -329,6 +327,7 @@ export async function startBackend( ...uvEnv, ...proxyEnv, SERVER_URL: serverUrl, + EIGENT_RUNTIME: 'electron', PYTHONIOENCODING: 'utf-8', PYTHONUNBUFFERED: '1', npm_config_cache: npmCacheDir, @@ -354,8 +353,10 @@ export async function startBackend( }; const pythonPath = getVenvPythonPath(venvPath); - // Dev mode: use uv run (ensures sync); Packaged: use venv's python directly (prebuilt has deps) - const useDirectPython = app.isPackaged; + // Use venv's python directly when venv exists (avoids uv run hang in some terminals/Electron spawn). + // Packaged: always direct. Dev: use direct when venv exists, else uv run for first-time sync. + const venvExists = fs.existsSync(path.join(venvPath, 'pyvenv.cfg')); + const useDirectPython = app.isPackaged || venvExists; return new Promise(async (resolve, reject) => { const spawnCmd = useDirectPython diff --git a/electron/main/webview.ts b/electron/main/webview.ts index d7ddb02e5..fda3bde0e 100644 --- a/electron/main/webview.ts +++ b/electron/main/webview.ts @@ -45,11 +45,52 @@ export class WebViewManager { // IPC handlers should be registered once in the main process public async captureWebview(webviewId: string) { - const webContents = this.webViews.get(webviewId); - if (!webContents) return null; + const webViewInfo = this.webViews.get(webviewId); + if (!webViewInfo) return null; - const image = await webContents.view.webContents.capturePage(); - const jpegBuffer = image.toJPEG(10); + const targetContents = webViewInfo.view.webContents; + if (!targetContents || targetContents.isDestroyed()) { + return null; + } + + const debuggerApi = targetContents.debugger; + let attachedHere = false; + + try { + if (!debuggerApi.isAttached()) { + debuggerApi.attach('1.3'); + attachedHere = true; + } + + const result = (await debuggerApi.sendCommand('Page.captureScreenshot', { + format: 'jpeg', + quality: 60, + fromSurface: true, + })) as { data?: string }; + + if (result?.data) { + return 'data:image/jpeg;base64,' + result.data; + } + } catch (error) { + console.warn( + `CDP screenshot failed for webview ${webviewId}, falling back to capturePage:`, + error + ); + } finally { + if (attachedHere && debuggerApi.isAttached()) { + try { + debuggerApi.detach(); + } catch (detachError) { + console.warn( + `Failed to detach debugger for webview ${webviewId}:`, + detachError + ); + } + } + } + + const image = await targetContents.capturePage(); + const jpegBuffer = image.toJPEG(60); return 'data:image/jpeg;base64,' + jpegBuffer.toString('base64'); } diff --git a/electron/preload/index.ts b/electron/preload/index.ts index 910a670d7..2071116cd 100644 --- a/electron/preload/index.ts +++ b/electron/preload/index.ts @@ -175,32 +175,7 @@ contextBridge.exposeInMainWorld('electronAPI', { ipcRenderer.off(channel, listener); }; }, - // Skills - getSkillsDir: () => ipcRenderer.invoke('get-skills-dir'), - skillsScan: () => ipcRenderer.invoke('skills-scan'), - skillWrite: (skillDirName: string, content: string) => - ipcRenderer.invoke('skill-write', skillDirName, content), - skillDelete: (skillDirName: string) => - ipcRenderer.invoke('skill-delete', skillDirName), - skillRead: (filePath: string) => ipcRenderer.invoke('skill-read', filePath), - skillListFiles: (skillDirName: string) => - ipcRenderer.invoke('skill-list-files', skillDirName), - skillImportZip: ( - zipPathOrBuffer: string | ArrayBuffer, - replacements?: string[] - ) => ipcRenderer.invoke('skill-import-zip', zipPathOrBuffer, replacements), - openSkillFolder: (skillName: string) => - ipcRenderer.invoke('open-skill-folder', skillName), - skillConfigInit: (userId: string) => - ipcRenderer.invoke('skill-config-init', userId), - skillConfigLoad: (userId: string) => - ipcRenderer.invoke('skill-config-load', userId), - skillConfigToggle: (userId: string, skillName: string, enabled: boolean) => - ipcRenderer.invoke('skill-config-toggle', userId, skillName, enabled), - skillConfigUpdate: (userId: string, skillName: string, skillConfig: any) => - ipcRenderer.invoke('skill-config-update', userId, skillName, skillConfig), - skillConfigDelete: (userId: string, skillName: string) => - ipcRenderer.invoke('skill-config-delete', userId, skillName), + // Skills: all operations via Brain REST API, no IPC }); // --------- Preload scripts loading --------- diff --git a/eslint.config.js b/eslint.config.js index fa8839466..66f196871 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -58,6 +58,7 @@ export default [ '.vite/**', // Config files 'vite.config.ts', + 'vite.config.*.ts', 'vitest.config.ts', 'tailwind.config.js', 'postcss.config.cjs', @@ -70,6 +71,8 @@ export default [ '**/.venv/**', // Prebuilt resources 'resources/prebuilt/**', + // Archive (pre-refactor snapshots) + 'archive/**', ], }, @@ -140,6 +143,34 @@ export default [ ], }, }, + // Guardrail: in src code, always use Host abstraction instead of direct window Electron APIs + { + files: ['src/**/*.{ts,tsx,js,jsx}'], + rules: { + 'no-restricted-properties': [ + 'error', + { + object: 'window', + property: 'electronAPI', + message: + 'Use Host abstraction (useHost/createHost) instead of window.electronAPI', + }, + { + object: 'window', + property: 'ipcRenderer', + message: + 'Use Host abstraction (useHost/createHost) instead of window.ipcRenderer', + }, + ], + }, + }, + // Single allowed bridge for reading global Electron APIs + { + files: ['src/host/createHost.ts'], + rules: { + 'no-restricted-properties': 'off', + }, + }, // Prettier config (must be last to override conflicting rules) prettier, ]; diff --git a/index.html b/index.html index e000c9cd4..bde6b479d 100644 --- a/index.html +++ b/index.html @@ -46,7 +46,7 @@ https://registry.npmmirror.com; worker-src 'self' blob:; child-src 'self' blob:; - frame-src 'self' localfile: blob: data:; + frame-src 'self' localfile: blob: data: http://localhost:5001 http://127.0.0.1:5001; " /> diff --git a/package.json b/package.json index 264deabbc..8d14d5f32 100644 --- a/package.json +++ b/package.json @@ -16,6 +16,7 @@ "prepare": "husky", "clean-cache": "rimraf node_modules/.vite", "dev": "npm run clean-cache && vite", + "dev:web": "npm run clean-cache && vite --config vite.config.web.ts", "preinstall-deps": "node scripts/preinstall-deps.js", "fix-venv-paths": "node scripts/fix-venv-paths.js", "fix-symlinks": "node scripts/fix-symlinks.js", @@ -35,6 +36,8 @@ "build:linux": "npm run prebuild && electron-builder --linux --publish never", "build:all": "npm run prebuild && electron-builder --mac --win --linux --publish never", "preview": "vite preview", + "build:web": "vite build --config vite.config.web.ts", + "preview:web": "vite preview --config vite.config.web.ts", "pretest": "vite build --mode=test", "test": "vitest run", "test:watch": "vitest", @@ -87,6 +90,7 @@ "dompurify": "^3.2.7", "electron-log": "^5.4.0", "electron-updater": "^6.3.9", + "@emotion/is-prop-valid": "^1.3.1", "embla-carousel-autoplay": "^8.6.0", "embla-carousel-react": "^8.6.0", "framer-motion": "^12.17.0", diff --git a/scripts/check-electron-access.sh b/scripts/check-electron-access.sh new file mode 100644 index 000000000..9736265e0 --- /dev/null +++ b/scripts/check-electron-access.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash + +set -euo pipefail + +# Guardrail for web separation: +# only src/host/createHost.ts may read window.electronAPI/window.ipcRenderer. +violations="$( + rg -n \ + -e 'window\.electronAPI' \ + -e 'window\.ipcRenderer' \ + --glob '*.{ts,tsx,js,jsx}' \ + --glob '!src/host/createHost.ts' \ + src || true +)" + +if [[ -n "${violations}" ]]; then + echo "Found forbidden direct Electron window access outside Host bridge:" + echo "${violations}" + exit 1 +fi + +echo "Electron window access guard passed." diff --git a/scripts/smoke-web-local-brain.sh b/scripts/smoke-web-local-brain.sh new file mode 100644 index 000000000..9a4943050 --- /dev/null +++ b/scripts/smoke-web-local-brain.sh @@ -0,0 +1,130 @@ +#!/usr/bin/env bash +set -euo pipefail + +ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +TMP_DIR="$(mktemp -d)" +BACKEND_LOG="${TMP_DIR}/backend.log" +FRONTEND_LOG="${TMP_DIR}/frontend.log" +BACKEND_PID="" +FRONTEND_PID="" + +BRAIN_HOST="${BRAIN_HOST:-127.0.0.1}" +BRAIN_PORT="${BRAIN_PORT:-5001}" +WEB_HOST="${WEB_HOST:-127.0.0.1}" +WEB_PORT="${WEB_PORT:-5173}" + +cleanup() { + local exit_code=$? + if [[ -n "${FRONTEND_PID}" ]] && kill -0 "${FRONTEND_PID}" >/dev/null 2>&1; then + kill "${FRONTEND_PID}" >/dev/null 2>&1 || true + wait "${FRONTEND_PID}" 2>/dev/null || true + fi + if [[ -n "${BACKEND_PID}" ]] && kill -0 "${BACKEND_PID}" >/dev/null 2>&1; then + kill "${BACKEND_PID}" >/dev/null 2>&1 || true + wait "${BACKEND_PID}" 2>/dev/null || true + fi + if [[ ${exit_code} -ne 0 ]]; then + echo + echo "[smoke] backend log:" + cat "${BACKEND_LOG}" || true + echo + echo "[smoke] frontend log:" + cat "${FRONTEND_LOG}" || true + fi + rm -rf "${TMP_DIR}" +} +trap cleanup EXIT INT TERM + +wait_http() { + local url="$1" + local label="$2" + local timeout_seconds="${3:-120}" + local started_at + started_at="$(date +%s)" + + while true; do + if curl --silent --show-error --output /dev/null --fail "${url}"; then + return 0 + fi + if (( "$(date +%s)" - started_at > timeout_seconds )); then + echo "[smoke] timeout waiting for ${label}: ${url}" >&2 + return 1 + fi + sleep 1 + done +} + +assert_html_doc() { + local file="$1" + local label="$2" + if ! grep -Eiq "|&2 + return 1 + fi +} + +echo "[smoke] starting backend on ${BRAIN_HOST}:${BRAIN_PORT}" +( + cd "${ROOT_DIR}/backend" + EIGENT_BRAIN_HOST="${BRAIN_HOST}" \ + EIGENT_BRAIN_PORT="${BRAIN_PORT}" \ + EIGENT_DEBUG="false" \ + uv run python main.py >"${BACKEND_LOG}" 2>&1 +) & +BACKEND_PID=$! + +wait_http "http://${BRAIN_HOST}:${BRAIN_PORT}/health" "backend health" + +echo "[smoke] checking session header + health detail" +curl --silent --show-error \ + --header "X-Channel: web" \ + --dump-header "${TMP_DIR}/health_headers.txt" \ + --output "${TMP_DIR}/health.json" \ + "http://${BRAIN_HOST}:${BRAIN_PORT}/health" + +if ! grep -qi '^x-session-id:' "${TMP_DIR}/health_headers.txt"; then + echo "[smoke] missing X-Session-ID header in /health response" >&2 + exit 1 +fi + +curl --silent --show-error \ + --header "X-Channel: web" \ + --output "${TMP_DIR}/health_detail.json" \ + "http://${BRAIN_HOST}:${BRAIN_PORT}/health?detail=true" + +python3 - "${TMP_DIR}/health_detail.json" <<'PY' +import json +import sys +from pathlib import Path + +payload = json.loads(Path(sys.argv[1]).read_text(encoding="utf-8")) +assert isinstance(payload, dict), "health detail payload must be object" +assert "capabilities" in payload, "health detail missing capabilities" +assert isinstance(payload["capabilities"], dict), "capabilities must be object" +PY + +echo "[smoke] starting web frontend on ${WEB_HOST}:${WEB_PORT}" +( + cd "${ROOT_DIR}" + npm run dev:web -- --host "${WEB_HOST}" --port "${WEB_PORT}" >"${FRONTEND_LOG}" 2>&1 +) & +FRONTEND_PID=$! + +wait_http "http://${WEB_HOST}:${WEB_PORT}/" "frontend root" + +curl --silent --show-error \ + --output "${TMP_DIR}/web_root.html" \ + "http://${WEB_HOST}:${WEB_PORT}/" +assert_html_doc "${TMP_DIR}/web_root.html" "web root" + +status_code="$(curl --silent --show-error \ + --output "${TMP_DIR}/web_route.html" \ + --write-out "%{http_code}" \ + "http://${WEB_HOST}:${WEB_PORT}/project/smoke-route")" +if [[ "${status_code}" != "200" ]]; then + echo "[smoke] browser-router route fallback failed: status=${status_code}" >&2 + exit 1 +fi +assert_html_doc "${TMP_DIR}/web_route.html" "web route fallback" + +echo "[smoke] PASS: web + local brain smoke checks completed" diff --git a/src/App.tsx b/src/App.tsx index ebfca2717..fceb87b1e 100644 --- a/src/App.tsx +++ b/src/App.tsx @@ -12,6 +12,7 @@ // limitations under the License. // ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import { useHost } from '@/host'; import { queryClient } from '@/lib/queryClient'; import AppRoutes from '@/routers/index'; import { stackClientApp } from '@/stack/client'; @@ -29,6 +30,7 @@ import { useAuthStore } from './store/authStore'; const HAS_STACK_KEYS = hasStackKeys(); function App() { + const host = useHost(); const navigate = useNavigate(); const { setInitState } = useAuthStore(); const { token } = useAuthStore(); @@ -69,14 +71,14 @@ function App() { } }; - window.ipcRenderer?.on('auth-share-token-received', handleShareCode); - window.electronAPI?.onUpdateNotification(handleUpdateNotification); + host?.ipcRenderer?.on('auth-share-token-received', handleShareCode); + host?.electronAPI?.onUpdateNotification(handleUpdateNotification); return () => { - window.ipcRenderer?.off('auth-share-token-received', handleShareCode); - window.electronAPI?.removeAllListeners('update-notification'); + host?.ipcRenderer?.off('auth-share-token-received', handleShareCode); + host?.electronAPI?.removeAllListeners('update-notification'); }; - }, [navigate, setInitState]); + }, [host, navigate, setInitState]); // render wrapper const renderWrapper = (children: React.ReactNode) => { diff --git a/src/api/brain.ts b/src/api/brain.ts new file mode 100644 index 000000000..46b85725d --- /dev/null +++ b/src/api/brain.ts @@ -0,0 +1,179 @@ +// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +/** + * Brain REST API - MCP and Skills endpoints. + * All calls go through Brain HTTP API (getBaseURL). No IPC fallback. + */ + +import { + fetchDelete, + fetchGet, + fetchPost, + fetchPostForm, + fetchPut, +} from './http'; + +export async function mcpList(): Promise<{ + mcpServers: Record; +}> { + const res = await fetchGet('/mcp/list'); + return res && typeof res.mcpServers === 'object' ? res : { mcpServers: {} }; +} + +export async function mcpInstall( + name: string, + mcp: Record +): Promise<{ success: boolean }> { + return fetchPost('/mcp/install', { name, mcp }); +} + +export async function mcpRemove(name: string): Promise<{ success: boolean }> { + return fetchDelete(`/mcp/${encodeURIComponent(name)}`); +} + +export async function mcpUpdate( + name: string, + mcp: Record +): Promise<{ success: boolean }> { + return fetchPut(`/mcp/${encodeURIComponent(name)}`, mcp); +} + +export async function skillsScan(): Promise<{ + success: boolean; + skills: Array<{ + name: string; + description: string; + path: string; + scope: string; + skillDirName: string; + isExample: boolean; + }>; +}> { + const res = await fetchGet('/skills'); + return res?.skills !== undefined ? res : { success: true, skills: [] }; +} + +export async function skillWrite( + skillDirName: string, + content: string +): Promise<{ success: boolean }> { + return fetchPost(`/skills/${encodeURIComponent(skillDirName)}`, { content }); +} + +export async function skillImportZip( + zipBuffer: ArrayBuffer, + replacements?: string[] +): Promise<{ + success: boolean; + error?: string; + conflicts?: Array<{ folderName: string; skillName: string }>; +}> { + const formData = new FormData(); + formData.append('file', new Blob([zipBuffer]), 'skill.zip'); + if (replacements?.length) { + formData.append('replacements', replacements.join(',')); + } + const res = await fetchPostForm('/skills/import', formData); + return (res ?? { success: false, error: 'Import failed' }) as { + success: boolean; + error?: string; + conflicts?: Array<{ folderName: string; skillName: string }>; + }; +} + +export async function skillRead( + skillDirName: string +): Promise<{ success: boolean; content: string }> { + return fetchGet(`/skills/${encodeURIComponent(skillDirName)}`); +} + +export async function skillDelete( + skillDirName: string +): Promise<{ success: boolean }> { + return fetchDelete(`/skills/${encodeURIComponent(skillDirName)}`); +} + +export async function skillListFiles( + skillDirName: string +): Promise<{ success: boolean; files: string[] }> { + const res = await fetchGet( + `/skills/${encodeURIComponent(skillDirName)}/files` + ); + return res?.files !== undefined ? res : { success: true, files: [] }; +} + +export async function skillGetPathByName( + skillName: string +): Promise<{ path: string } | null> { + const res = await fetchGet( + `/skills/path?name=${encodeURIComponent(skillName)}` + ); + return res?.path ? { path: res.path } : null; +} + +// --- Skill config (REST API, no Electron coupling) --- + +export async function skillConfigLoad( + userId: string +): Promise<{ success: boolean; config?: Record }> { + const res = await fetchGet( + `/skills/config?user_id=${encodeURIComponent(userId)}` + ); + return res?.config !== undefined ? res : { success: false }; +} + +export async function skillConfigInit( + userId: string +): Promise<{ success: boolean; config?: Record }> { + const res = await fetchPost('/skills/config/init', { user_id: userId }); + return res?.config !== undefined ? res : { success: false }; +} + +export async function skillConfigUpdate( + userId: string, + skillName: string, + config: { + enabled?: boolean; + scope?: { isGlobal?: boolean; selectedAgents?: string[] }; + addedAt?: number; + isExample?: boolean; + } +): Promise<{ success: boolean }> { + return fetchPut(`/skills/config/${encodeURIComponent(skillName)}`, { + user_id: userId, + ...config, + }); +} + +export async function skillConfigDelete( + userId: string, + skillName: string +): Promise<{ success: boolean }> { + return fetchDelete( + `/skills/config/${encodeURIComponent(skillName)}?user_id=${encodeURIComponent(userId)}` + ); +} + +export async function skillConfigToggle( + userId: string, + skillName: string, + enabled: boolean +): Promise<{ success: boolean; config?: Record }> { + const res = await fetchPost( + `/skills/config/${encodeURIComponent(skillName)}/toggle`, + { user_id: userId, enabled } + ); + return res ?? { success: false }; +} diff --git a/src/api/http.ts b/src/api/http.ts index 70fa418ce..da84ed9f8 100644 --- a/src/api/http.ts +++ b/src/api/http.ts @@ -16,19 +16,91 @@ import { showCreditsToast } from '@/components/Toast/creditsToast'; import { showStorageToast } from '@/components/Toast/storageToast'; import { showTrafficToast } from '@/components/Toast/trafficToast'; import { getAuthStore } from '@/store/authStore'; +import { + getConnectionConfig, + setConnectionConfig, +} from '@/store/connectionStore'; +import { + EventSourceMessage, + fetchEventSource, +} from '@microsoft/fetch-event-source'; const defaultHeaders = { 'Content-Type': 'application/json', }; -let baseUrl = ''; +export function getDefaultBrainEndpoint(): string { + const envEndpoint = import.meta.env.VITE_BRAIN_ENDPOINT; + if (envEndpoint && typeof envEndpoint === 'string') { + return envEndpoint.replace(/\/$/, ''); + } + if (import.meta.env.DEV) { + return 'http://localhost:5001'; + } + return ''; +} + +function persistSessionIdFromResponse(response: Response): void { + const sessionId = response.headers.get('x-session-id'); + if (!sessionId) { + return; + } + const current = getConnectionConfig().sessionId; + if (current !== sessionId) { + setConnectionConfig({ sessionId }); + } +} + +function shouldAttachAuthHeader(url: string): boolean { + return !url.includes('http://') && !url.includes('https://'); +} + +function buildBrainHeaders( + url: string, + customHeaders: Record = {}, + includeContentType = true +): Record { + const { token } = getAuthStore(); + const conn = getConnectionConfig(); + const headers: Record = { + ...(includeContentType ? defaultHeaders : {}), + 'X-Channel': conn.channel, + ...customHeaders, + }; + if (conn.sessionId) { + headers['X-Session-ID'] = conn.sessionId; + } + if (token && shouldAttachAuthHeader(url)) { + headers['Authorization'] = `Bearer ${token}`; + } + return headers; +} + +/** Reset cached baseUrl (e.g. when backend restarts). */ +export function resetBaseURL(): void { + setConnectionConfig({ brainEndpoint: '' }); +} + export async function getBaseURL() { - if (baseUrl) { - return baseUrl; + const cfg = getConnectionConfig(); + if (cfg.brainEndpoint) { + return cfg.brainEndpoint.replace(/\/$/, ''); + } + // Electron: get port from IPC + const port = await (window as any).ipcRenderer?.invoke('get-backend-port'); + if (port && port > 0) { + const resolved = `http://localhost:${port}`; + setConnectionConfig({ brainEndpoint: resolved }); + return resolved; } - const port = await window.ipcRenderer.invoke('get-backend-port'); - baseUrl = `http://localhost:${port}`; - return baseUrl; + // Pure Web: use VITE_BRAIN_ENDPOINT (dev default http://localhost:5001) + const envEndpoint = getDefaultBrainEndpoint(); + if (envEndpoint && typeof envEndpoint === 'string') { + const resolved = envEndpoint.replace(/\/$/, ''); // trim trailing slash + setConnectionConfig({ brainEndpoint: resolved }); + return resolved; + } + return ''; } async function fetchRequest( @@ -39,17 +111,7 @@ async function fetchRequest( ): Promise { const baseURL = await getBaseURL(); const fullUrl = `${baseURL}${url}`; - const { token } = getAuthStore(); - - const headers: Record = { - ...defaultHeaders, - ...customHeaders, - }; - - // Cases without token: url is a complete http:// path - if (!url.includes('http://') && token) { - headers['Authorization'] = `Bearer ${token}`; - } + const headers = buildBrainHeaders(url, customHeaders); const options: RequestInit = { method, @@ -82,23 +144,34 @@ async function handleResponse( ): Promise { try { const res = await responsePromise; + persistSessionIdFromResponse(res); if (res.status === 204) { return { code: 0, text: '' }; } const contentType = res.headers.get('content-type') || ''; - if (res.body && !contentType.includes('application/json')) { - return { - isStream: true, - body: res.body, - reader: res.body.getReader(), - }; + if (!contentType.includes('application/json')) { + if (!res.ok) { + const detail = await res.text().catch(() => ''); + const msg = detail?.trim() || `HTTP ${res.status}`; + const err = new Error(msg); + (err as any).status = res.status; + (err as any).response = res; + throw err; + } + if (res.body) { + return { + isStream: true, + body: res.body, + reader: res.body.getReader(), + }; + } + return null; } const resData = await res.json(); if (!resData) { return null; } - const { code, text } = resData; // showCreditsToast() if (code === 1 || code === 300) { @@ -123,9 +196,13 @@ async function handleResponse( } if (!res.ok) { - const err: any = new Error( - resData?.detail || resData?.message || `HTTP error ${res.status}` - ); + const detail = resData?.detail; + const msg = + (Array.isArray(detail) ? detail[0] : detail) || + resData?.message || + `HTTP ${res.status}`; + const err: any = new Error(typeof msg === 'string' ? msg : String(msg)); + err.status = res.status; err.response = { data: resData, status: res.status }; throw err; } @@ -163,6 +240,78 @@ export const fetchPut = (url: string, data?: any, headers?: any) => export const fetchDelete = (url: string, data?: any, headers?: any) => fetchRequest('DELETE', url, data, headers); +/** POST FormData to Brain base URL (for file uploads). */ +export async function fetchPostForm( + url: string, + formData: FormData, + customHeaders: Record = {} +): Promise { + const baseURL = await getBaseURL(); + const fullUrl = `${baseURL}${url}`; + const headers = buildBrainHeaders(url, customHeaders, false); + return handleResponse( + fetch(fullUrl, { method: 'POST', headers, body: formData }) + ); +} + +export async function uploadFileToBrain(file: globalThis.File): Promise<{ + file_id: string; + filename: string; + size: number; +}> { + const formData = new FormData(); + formData.append('file', file); + return fetchPostForm('/files', formData); +} + +export interface SSETransportOptions { + url: string; + method?: 'GET' | 'POST'; + body?: Record | string; + signal?: AbortSignal; + extraHeaders?: Record; + openWhenHidden?: boolean; + onmessage: (event: EventSourceMessage) => void | Promise; + onopen?: (response: Response) => void | Promise; + onerror?: (err: any) => number | null | undefined | void; + onclose?: () => void; +} + +export async function sseTransport( + options: SSETransportOptions +): Promise { + const baseURL = await getBaseURL(); + const fullUrl = + options.url.startsWith('http://') || options.url.startsWith('https://') + ? options.url + : `${baseURL}${options.url}`; + + const headers = buildBrainHeaders(options.url, options.extraHeaders); + const body = + typeof options.body === 'string' + ? options.body + : options.body + ? JSON.stringify(options.body) + : undefined; + + await fetchEventSource(fullUrl, { + method: options.method || 'POST', + openWhenHidden: options.openWhenHidden ?? true, + signal: options.signal, + headers, + body, + onmessage: options.onmessage, + async onopen(response) { + persistSessionIdFromResponse(response); + if (options.onopen) { + await options.onopen(response); + } + }, + onerror: options.onerror, + onclose: options.onclose, + }); +} + // =============== porxy =============== // get proxy base URL @@ -170,11 +319,9 @@ async function getProxyBaseURL() { const isDev = import.meta.env.DEV; if (isDev) { - const proxyUrl = import.meta.env.VITE_PROXY_URL; - if (!proxyUrl) { - return 'http://localhost:3001'; - } - return proxyUrl; + // Use empty base so request goes to same origin; Vite proxy forwards /api to VITE_PROXY_URL + // This avoids CORS when running dev:web (browser at 5173, server at 3001) + return ''; } else { const baseUrl = import.meta.env.VITE_BASE_URL; if (!baseUrl) { diff --git a/src/client/desktop/README.md b/src/client/desktop/README.md new file mode 100644 index 000000000..3f1662a7c --- /dev/null +++ b/src/client/desktop/README.md @@ -0,0 +1,6 @@ +# Desktop client (Electron) + +Desktop-only components and logic. Uses `useHost()` from `@/host`; when `host.electronAPI` is present. + +- WindowControls, HardwareBridge-related UI +- IPC handlers for window, file, CDP, etc. diff --git a/src/client/index.ts b/src/client/index.ts new file mode 100644 index 000000000..85d04ef4e --- /dev/null +++ b/src/client/index.ts @@ -0,0 +1,23 @@ +// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +// Client adapters: desktop, web, cli, browser_extension, channel adapters. +// See docs/design/04-client.md. + +export { + getClientType, + isDesktop, + isElectron, + isWeb, + type ClientType, +} from './platform'; diff --git a/src/client/platform.ts b/src/client/platform.ts new file mode 100644 index 000000000..9c717066f --- /dev/null +++ b/src/client/platform.ts @@ -0,0 +1,49 @@ +// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +// Client platform detection. See docs/design/04-client.md. + +export type ClientType = + | 'desktop' + | 'web' + | 'cli' + | 'browser_extension' + | 'whatsapp' + | 'telegram' + | 'slack' + | 'discord' + | 'lark'; + +/** True when running inside Electron (desktop app). */ +export function isElectron(): boolean { + return ( + typeof window !== 'undefined' && + !!(window as any).electronAPI && + !!(window as any).ipcRenderer + ); +} + +/** Current client type. Web build = 'web'; Electron = 'desktop'. */ +export function getClientType(): ClientType { + if (typeof window === 'undefined') return 'web'; + if (isElectron()) return 'desktop'; + return 'web'; +} + +export function isDesktop(): boolean { + return getClientType() === 'desktop'; +} + +export function isWeb(): boolean { + return getClientType() === 'web'; +} diff --git a/src/client/web/README.md b/src/client/web/README.md new file mode 100644 index 000000000..ed547c596 --- /dev/null +++ b/src/client/web/README.md @@ -0,0 +1,7 @@ +# Web client + +Web-specific components. Used when `host.electronAPI` is null (from `useHost()`). + +- No Electron API +- Brain endpoint from VITE_BRAIN_ENDPOINT or user config +- File upload UI (no local file picker) diff --git a/src/components/AddWorker/ToolSelect.tsx b/src/components/AddWorker/ToolSelect.tsx index 5bc863484..35437aedb 100644 --- a/src/components/AddWorker/ToolSelect.tsx +++ b/src/components/AddWorker/ToolSelect.tsx @@ -12,6 +12,7 @@ // limitations under the License. // ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import { mcpInstall } from '@/api/brain'; import { fetchGet, fetchPost, @@ -22,6 +23,7 @@ import { import githubIcon from '@/assets/github.svg'; import IntegrationList from '@/components/IntegrationList'; import { Badge } from '@/components/ui/badge'; +import { useHost } from '@/host'; import { capitalizeFirstLetter, getProxyBaseURL } from '@/lib'; import { useAuthStore } from '@/store/authStore'; import { CircleAlert, Store, X } from 'lucide-react'; @@ -66,6 +68,8 @@ const ToolSelect = forwardRef< { installMcp: (id: number, env?: any, activeMcp?: any) => Promise }, ToolSelectProps >(({ onShowEnvConfig, onSelectedToolsChange, initialSelectedTools }, ref) => { + const host = useHost(); + const electronAPI = host?.electronAPI; const { t } = useTranslation(); // state management - remove internal selected state, use parent passed initialSelectedTools const [keyword, setKeyword] = useState(''); @@ -408,8 +412,8 @@ const ToolSelect = forwardRef< await proxyFetchPost('/api/v1/configs', configPayload); } - if (window.electronAPI?.envWrite) { - await window.electronAPI.envWrite(email, { key: envVarKey, value }); + if (electronAPI?.envWrite) { + await electronAPI.envWrite(email, { key: envVarKey, value }); } }; // MCP install related @@ -635,20 +639,16 @@ const ToolSelect = forwardRef< await proxyFetchPost('/api/v1/mcp/install?mcp_id=' + id); setInstalled((prev) => ({ ...prev, [id]: true })); const installedMcp = mcpList.find((mcp) => mcp.id === id); - if (window.ipcRenderer && installedMcp) { - const env: { [key: string]: string } = {}; + if (installedMcp?.install_command) { + const installCmd = { ...installedMcp.install_command }; if (envValue) { + const env: { [key: string]: string } = {}; Object.keys(envValue).map((key) => { env[key] = envValue[key]?.value; }); - installedMcp.install_command!.env = env; + installCmd.env = env; } - - await window.ipcRenderer.invoke( - 'mcp-install', - installedMcp.key, - installedMcp.install_command - ); + await mcpInstall(installedMcp.key, installCmd); } // after install successfully, automatically add to selected list if (installedMcp) { diff --git a/src/components/AddWorker/index.tsx b/src/components/AddWorker/index.tsx index 64fc05f49..823b5a453 100644 --- a/src/components/AddWorker/index.tsx +++ b/src/components/AddWorker/index.tsx @@ -12,6 +12,7 @@ // limitations under the License. // ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import { mcpList as fetchMcpConfig } from '@/api/brain'; import { fetchPost } from '@/api/http'; import githubIcon from '@/assets/github.svg'; import { Button } from '@/components/ui/button'; @@ -297,9 +298,11 @@ export function AddWorker({ setNameError(t('workforce.worker-name-already-exists')); return; } - let mcpLocal: any = {}; - if (window.ipcRenderer) { - mcpLocal = await window.ipcRenderer.invoke('mcp-list'); + let mcpLocal: any = { mcpServers: {} }; + try { + mcpLocal = await fetchMcpConfig(); + } catch { + // Backend may not be ready } const localTool: string[] = []; const mcpList: string[] = []; diff --git a/src/components/BrowserAgentWorkspace/index.tsx b/src/components/BrowserAgentWorkspace/index.tsx index 6b393f64b..3c704d584 100644 --- a/src/components/BrowserAgentWorkspace/index.tsx +++ b/src/components/BrowserAgentWorkspace/index.tsx @@ -14,6 +14,7 @@ import { fetchPut } from '@/api/http'; import useChatStoreAdapter from '@/hooks/useChatStoreAdapter'; +import { useHost } from '@/host'; import { TaskStatus } from '@/types/constants'; import { ArrowDown, @@ -34,6 +35,7 @@ import { Button } from '../ui/button'; export default function BrowserAgentWorkspace() { //Get Chatstore for the active project's task const { chatStore, projectStore } = useChatStoreAdapter(); + const host = useHost(); const [isSingleMode, setIsSingleMode] = useState(false); const scrollContainerRef = useRef(null); @@ -104,14 +106,14 @@ export default function BrowserAgentWorkspace() { const webviewContainer = document.getElementById('webview-container'); if (webviewContainer) { const rect = webviewContainer.getBoundingClientRect(); - window.electronAPI.setSize({ + host?.electronAPI?.setSize?.({ x: rect.left, y: rect.top, width: rect.width, height: rect.height, }); } - }, []); + }, [host]); const handleTakeControl = (id: string) => { console.log('handleTakeControl', id); @@ -123,7 +125,7 @@ export default function BrowserAgentWorkspace() { setTimeout(() => { getSize(); // show corresponding webview - window.electronAPI.showWebview(id); + host?.electronAPI?.showWebview?.(id); }, 400); }; @@ -152,15 +154,15 @@ export default function BrowserAgentWorkspace() { const [_url, setUrl] = useState(''); useEffect(() => { - window.ipcRenderer?.on('url-updated', (_event: any, newUrl: any) => { + host?.ipcRenderer?.on('url-updated', (_event: any, newUrl: any) => { setUrl(newUrl); }); // optional: clear listener when uninstall return () => { - window.ipcRenderer.removeAllListeners('url-updated'); + host?.ipcRenderer?.removeAllListeners?.('url-updated'); }; - }, []); + }, [host]); if (!chatStore) { return
Loading...
; @@ -176,7 +178,7 @@ export default function BrowserAgentWorkspace() { action: 'resume', }); setIsTakeControl(false); - window.electronAPI.hideAllWebview(); + host?.electronAPI?.hideAllWebview?.(); }} size="sm" variant="success" diff --git a/src/components/ChatBox/BottomBox/InputBox.tsx b/src/components/ChatBox/BottomBox/InputBox.tsx index be5892709..56291c0f4 100644 --- a/src/components/ChatBox/BottomBox/InputBox.tsx +++ b/src/components/ChatBox/BottomBox/InputBox.tsx @@ -20,6 +20,7 @@ import { PopoverTrigger, } from '@/components/ui/popover'; import { Textarea } from '@/components/ui/textarea'; +import { useHost } from '@/host'; import { processDroppedFiles } from '@/lib/fileUtils'; import { cn } from '@/lib/utils'; import type { TriggerInput } from '@/types'; @@ -48,6 +49,8 @@ let activeExpandedDialogId: string | null = null; export interface FileAttachment { fileName: string; filePath: string; + fileId?: string; + source?: 'local' | 'upload'; } /** @@ -142,6 +145,7 @@ export const Inputbox = ({ onTriggerCreated, hideExpandButton = false, }: InputboxProps) => { + const host = useHost(); const { t } = useTranslation(); const internalTextareaRef = useRef(null); const textareaRef = externalTextareaRef || internalTextareaRef; @@ -155,22 +159,25 @@ export const Inputbox = ({ const [isExpandedDialogOpen, setIsExpandedDialogOpen] = useState(false); const [triggerDialogOpen, setTriggerDialogOpen] = useState(false); const expandedTextareaRef = useRef(null); - const instanceIdRef = useRef( - `inputbox-${Math.random().toString(36).substr(2, 9)}` + const [instanceId] = useState( + () => `inputbox-${crypto.randomUUID().slice(0, 9)}` ); // Handle dialog open/close with singleton tracking - const handleExpandedDialogChange = useCallback((open: boolean) => { - if (open) { - activeExpandedDialogId = instanceIdRef.current; - setIsExpandedDialogOpen(true); - } else { - if (activeExpandedDialogId === instanceIdRef.current) { - activeExpandedDialogId = null; + const handleExpandedDialogChange = useCallback( + (open: boolean) => { + if (open) { + activeExpandedDialogId = instanceId; + setIsExpandedDialogOpen(true); + } else { + if (activeExpandedDialogId === instanceId) { + activeExpandedDialogId = null; + } + setIsExpandedDialogOpen(false); } - setIsExpandedDialogOpen(false); - } - }, []); + }, + [instanceId] + ); // Keyboard shortcut handler for Cmd+P / Ctrl+P // Opens dialog if none is open, or closes if this instance owns the open dialog @@ -180,10 +187,7 @@ export const Inputbox = ({ e.preventDefault(); // If this instance has the dialog open, close it - if ( - isExpandedDialogOpen && - activeExpandedDialogId === instanceIdRef.current - ) { + if (isExpandedDialogOpen && activeExpandedDialogId === instanceId) { handleExpandedDialogChange(false); } // If no dialog is open, open this one @@ -196,7 +200,7 @@ export const Inputbox = ({ window.addEventListener('keydown', handleKeyDown); return () => window.removeEventListener('keydown', handleKeyDown); - }, [isExpandedDialogOpen, handleExpandedDialogChange]); + }, [isExpandedDialogOpen, handleExpandedDialogChange, instanceId]); const openRemainingPopover = () => { if (hoverCloseTimerRef.current) { @@ -308,7 +312,7 @@ export const Inputbox = ({ console.log('[Drag-Drop] Processing dropped files:', dropped.length); - const result = await processDroppedFiles(dropped, files); + const result = await processDroppedFiles(dropped, files, host); if (result.success) { console.log('[Drag-Drop] Setting files:', result.files); diff --git a/src/components/ChatBox/MessageItem/AgentMessageCard.tsx b/src/components/ChatBox/MessageItem/AgentMessageCard.tsx index a4773feef..af9381bd0 100644 --- a/src/components/ChatBox/MessageItem/AgentMessageCard.tsx +++ b/src/components/ChatBox/MessageItem/AgentMessageCard.tsx @@ -12,6 +12,7 @@ // limitations under the License. // ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import { useHost } from '@/host'; import { Check, Copy, FileText } from 'lucide-react'; import { useCallback, useMemo, useState } from 'react'; import { useTranslation } from 'react-i18next'; @@ -41,6 +42,8 @@ export function AgentMessageCard({ className, attaches, }: AgentMessageCardProps) { + const host = useHost(); + const ipcRenderer = host?.ipcRenderer; // use content hash to track if typewriter effect is completed const contentHash = useMemo(() => { return `${id}-${content}`; @@ -102,7 +105,7 @@ export function AgentMessageCard({
{ e.stopPropagation(); - window.ipcRenderer.invoke('reveal-in-folder', file.filePath); + ipcRenderer?.invoke('reveal-in-folder', file.filePath); }} key={'attache-' + file.fileName} className="flex w-full cursor-pointer items-center gap-2 rounded-2xl border border-solid border-task-border-default bg-message-fill-default py-1 pl-2" diff --git a/src/components/ChatBox/MessageItem/MarkDown.tsx b/src/components/ChatBox/MessageItem/MarkDown.tsx index 43e2f6c37..38b6003b0 100644 --- a/src/components/ChatBox/MessageItem/MarkDown.tsx +++ b/src/components/ChatBox/MessageItem/MarkDown.tsx @@ -13,6 +13,7 @@ // ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= import { Dialog, DialogContent } from '@/components/ui/dialog'; +import { useHost } from '@/host'; import { isHtmlDocument } from '@/lib/htmlFontStyles'; import '@/style/markdown-styles.css'; import DOMPurify from 'dompurify'; @@ -70,6 +71,8 @@ export const MarkDown = memo( /** Base directory for resolving relative image paths (e.g. markdown file's directory). */ contentBasePath?: string | null; }) => { + const host = useHost(); + const electronAPI = host?.electronAPI; const [displayedContent, setDisplayedContent] = useState(''); const [html, setHtml] = useState(''); const [previewImage, setPreviewImage] = useState(null); @@ -160,12 +163,9 @@ export const MarkDown = memo( try { const resolvedPath = resolveRelativePath(contentBasePath, src); - if ( - typeof window !== 'undefined' && - window.electronAPI?.readFileAsDataUrl - ) { + if (electronAPI?.readFileAsDataUrl) { const dataUrl = - await window.electronAPI.readFileAsDataUrl(resolvedPath); + await electronAPI.readFileAsDataUrl(resolvedPath); // Add cursor-pointer class and data attributes for click handling const newTag = ``; @@ -198,7 +198,7 @@ export const MarkDown = memo( }; processMarkdown(); - }, [displayedContent, contentBasePath]); + }, [displayedContent, contentBasePath, electronAPI]); // Add click handlers for images useEffect(() => { diff --git a/src/components/ChatBox/MessageItem/UserMessageCard.tsx b/src/components/ChatBox/MessageItem/UserMessageCard.tsx index c3eb5253f..c05002b1d 100644 --- a/src/components/ChatBox/MessageItem/UserMessageCard.tsx +++ b/src/components/ChatBox/MessageItem/UserMessageCard.tsx @@ -12,6 +12,7 @@ // limitations under the License. // ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import { useHost } from '@/host'; import { cn } from '@/lib/utils'; import { Check, Copy, FileText, Image, Sparkles } from 'lucide-react'; import { useCallback, useRef, useState } from 'react'; @@ -59,6 +60,8 @@ export function UserMessageCard({ className, attaches, }: UserMessageCardProps) { + const host = useHost(); + const ipcRenderer = host?.ipcRenderer; const [_hoveredFilePath, setHoveredFilePath] = useState(null); const [isRemainingOpen, setIsRemainingOpen] = useState(false); const [copied, setCopied] = useState(false); @@ -103,9 +106,16 @@ export function UserMessageCard({ return ; }; - const handleOpenSkillFolder = (skillName: string) => { - window.electronAPI?.openSkillFolder?.(skillName); - }; + const handleOpenSkillFolder = useCallback( + async (skillName: string) => { + const { skillGetPathByName } = await import('@/api/brain'); + const res = await skillGetPathByName(skillName); + if (res?.path && ipcRenderer?.invoke) { + ipcRenderer.invoke('reveal-in-folder', res.path); + } + }, + [ipcRenderer] + ); const contentNodes = parseContentWithSkillTags(content); const hasSkillTags = contentNodes.some((n) => n.type === 'skill'); @@ -175,10 +185,7 @@ export function UserMessageCard({ } onClick={(e) => { e.stopPropagation(); - window.ipcRenderer.invoke( - 'reveal-in-folder', - file.filePath - ); + ipcRenderer?.invoke('reveal-in-folder', file.filePath); }} > {/* File icon */} @@ -241,7 +248,7 @@ export function UserMessageCard({ } onClick={(e) => { e.stopPropagation(); - window.ipcRenderer.invoke( + ipcRenderer?.invoke( 'reveal-in-folder', file.filePath ); diff --git a/src/components/ChatBox/TaskBox/TaskCard.tsx b/src/components/ChatBox/TaskBox/TaskCard.tsx index 60531feae..ed1e7c7db 100644 --- a/src/components/ChatBox/TaskBox/TaskCard.tsx +++ b/src/components/ChatBox/TaskBox/TaskCard.tsx @@ -14,6 +14,7 @@ import { Button } from '@/components/ui/button'; import { Progress } from '@/components/ui/progress'; +import { useHost } from '@/host'; import { TaskItem } from './TaskItem'; import { TaskState, TaskStateType } from '@/components/TaskState'; @@ -58,6 +59,8 @@ export function TaskCard({ clickable = true, chatId, }: TaskCardProps) { + const host = useHost(); + const electronAPI = host?.electronAPI; const [isExpanded, setIsExpanded] = useState(true); const contentRef = useRef(null); const [contentHeight, setContentHeight] = useState('auto'); @@ -395,7 +398,7 @@ export function TaskCard({ chatStore.activeTaskId as string, task.agent?.agent_id ); - window.electronAPI.hideAllWebview(); + electronAPI?.hideAllWebview(); } }} key={`taskList-${task.id}`} diff --git a/src/components/ChatBox/UserQueryGroup.tsx b/src/components/ChatBox/UserQueryGroup.tsx index 08c106b8d..eb1fecc6f 100644 --- a/src/components/ChatBox/UserQueryGroup.tsx +++ b/src/components/ChatBox/UserQueryGroup.tsx @@ -334,6 +334,7 @@ export const UserQueryGroup: React.FC = ({ activeTaskId as string, file ); + chatState.setNuwFileNum(activeTaskId as string, 0); chatState.setActiveWorkspace( activeTaskId as string, 'documentWorkSpace' @@ -429,6 +430,7 @@ export const UserQueryGroup: React.FC = ({ transition={{ delay: 0.3 }} onClick={() => { chatState.setSelectedFile(activeTaskId as string, file); + chatState.setNuwFileNum(activeTaskId as string, 0); chatState.setActiveWorkspace( activeTaskId as string, 'documentWorkSpace' diff --git a/src/components/ChatBox/index.tsx b/src/components/ChatBox/index.tsx index dedbede29..03d567261 100644 --- a/src/components/ChatBox/index.tsx +++ b/src/components/ChatBox/index.tsx @@ -18,8 +18,11 @@ import { fetchPut, proxyFetchDelete, proxyFetchGet, + uploadFileToBrain, } from '@/api/http'; +import { isWeb } from '@/client/platform'; import useChatStoreAdapter from '@/hooks/useChatStoreAdapter'; +import { useHost } from '@/host'; import { generateUniqueId, replayActiveTask } from '@/lib'; import { proxyUpdateTriggerExecution } from '@/service/triggerApi'; import { useAuthStore } from '@/store/authStore'; @@ -45,6 +48,7 @@ const getChatStoreTotalTokens = (chatStore: VanillaChatStore): number => { }; export default function ChatBox(): JSX.Element { + const host = useHost(); const [message, setMessage] = useState(''); //Get Chatstore for the active project's task @@ -375,11 +379,6 @@ export default function ChatBox(): JSX.Element { } }, [skill_prompt, searchParams, setSearchParams]); - useEffect(() => { - if (!chatStore) return; - console.log('ChatStore Data: ', chatStore); - }, [chatStore]); - const scrollToBottom = useCallback(() => { if (scrollContainerRef.current) { setTimeout(() => { @@ -482,6 +481,7 @@ export default function ChatBox(): JSX.Element { reply: tempMessageContent, }); chatStore.setAttaches(_taskId, []); + chatStore.setIsPending(_taskId, false); if (chatStore.tasks[_taskId].askList.length === 0) { chatStore.setActiveAsk(_taskId, ''); } else { @@ -493,7 +493,6 @@ export default function ChatBox(): JSX.Element { let message = activeAskList.shift(); chatStore.setActiveAskList(_taskId, [...activeAskList]); chatStore.setActiveAsk(_taskId, message?.agent_name || ''); - chatStore.setIsPending(_taskId, false); chatStore.addMessages(_taskId, message!); } } else { @@ -743,20 +742,64 @@ export default function ChatBox(): JSX.Element { // File selection handler const handleFileSelect = async () => { try { - const result = await window.electronAPI.selectFile({ + const taskId = chatStore.activeTaskId as string; + const existingFiles = chatStore.tasks[taskId].attaches || []; + + if (isWeb()) { + const input = document.createElement('input'); + input.type = 'file'; + input.multiple = true; + input.onchange = async () => { + if (!input.files?.length) { + return; + } + + const uploadedFiles: File[] = []; + for (const selectedFile of Array.from(input.files)) { + try { + const result = await uploadFileToBrain(selectedFile); + uploadedFiles.push({ + fileName: result.filename, + filePath: result.file_id, + fileId: result.file_id, + source: 'upload', + } as File); + } catch (error) { + console.error('Select File Upload Error:', error); + toast.error(`Failed to upload ${selectedFile.name}`); + } + } + + if (uploadedFiles.length === 0) { + return; + } + + const files = [ + ...existingFiles, + ...uploadedFiles.filter( + (uploaded) => + !existingFiles.some( + (existing) => existing.filePath === uploaded.filePath + ) + ), + ]; + chatStore.setAttaches(taskId, files); + }; + input.click(); + return; + } + + const result = await host?.electronAPI?.selectFile({ title: t('chat.select-file'), filters: [{ name: t('chat.all-files'), extensions: ['*'] }], }); - if (result.success && result.files && result.files.length > 0) { - const taskId = chatStore.activeTaskId as string; + if (result?.success && result.files && result.files.length > 0) { const files = [ - ...(chatStore.tasks[taskId].attaches || []), + ...existingFiles, ...result.files.filter( (r: File) => - !chatStore.tasks[taskId].attaches?.some( - (f: File) => f.filePath === r.filePath - ) + !existingFiles.some((f: File) => f.filePath === r.filePath) ), ]; chatStore.setAttaches(taskId, files); diff --git a/src/components/Dialog/CloseNotice.tsx b/src/components/Dialog/CloseNotice.tsx index 33411d979..7a62ca683 100644 --- a/src/components/Dialog/CloseNotice.tsx +++ b/src/components/Dialog/CloseNotice.tsx @@ -12,6 +12,7 @@ // limitations under the License. // ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import { useHost } from '@/host'; import { useCallback } from 'react'; import { useTranslation } from 'react-i18next'; import { Button } from '../ui/button'; @@ -35,10 +36,12 @@ export default function CloseNoticeDialog({ onOpenChange, trigger, }: Props) { + const host = useHost(); + const electronAPI = host?.electronAPI; const { t } = useTranslation(); const onSubmit = useCallback(() => { - window.electronAPI.closeWindow(true); - }, []); + electronAPI?.closeWindow(true); + }, [electronAPI]); return ( diff --git a/src/components/Folder/index.tsx b/src/components/Folder/index.tsx index c2d32771a..2f07f03db 100644 --- a/src/components/Folder/index.tsx +++ b/src/components/Folder/index.tsx @@ -33,9 +33,10 @@ import { import { useEffect, useRef, useState } from 'react'; import FolderComponent from './FolderComponent'; -import { proxyFetchGet } from '@/api/http'; +import { fetchGet, getBaseURL } from '@/api/http'; import { MarkDown } from '@/components/ChatBox/MessageItem/MarkDown'; import useChatStoreAdapter from '@/hooks/useChatStoreAdapter'; +import { useHost } from '@/host'; import { deferInlineScriptsUntilLoad, injectFontStyles, @@ -50,6 +51,25 @@ const IMAGE_EXTENSIONS = ['png', 'jpg', 'jpeg', 'gif', 'bmp', 'webp', 'svg']; const AUDIO_EXTENSIONS = ['mp3', 'wav', 'ogg', 'flac', 'aac', 'm4a', 'wma']; const VIDEO_EXTENSIONS = ['mp4', 'webm', 'mov', 'avi', 'mkv', 'flv', 'wmv']; +function FileLoadingSpinner({ fileName }: { fileName?: string } = {}) { + return ( +
+
+

+ {fileName ? ( + <> + Loading{' '} + {fileName} + ... + + ) : ( + 'Loading...' + )} +

+
+ ); +} + type FileTypeTarget = { name?: string; path?: string; @@ -110,6 +130,7 @@ function isVideoFile(file: FileTypeTarget) { // Type definitions interface FileTreeNode { + id: string; name: string; path: string; type?: string; @@ -117,6 +138,7 @@ interface FileTreeNode { icon?: React.ElementType; children?: FileTreeNode[]; isRemote?: boolean; + relativePath?: string; } interface FileInfo { @@ -130,6 +152,198 @@ interface FileInfo { isRemote?: boolean; } +type FileIdentity = Pick; + +function normalizeTreePath(path: string): string { + return path + .replace(/\\/g, '/') + .replace(/^\.?\//, '') + .replace(/\/+/g, '/') + .replace(/^\/+/, ''); +} + +function getNormalizedTreeRelativePath(file: FileInfo): string { + const normalizedRelativePath = normalizeTreePath(file.relativePath || ''); + if (normalizedRelativePath) { + return normalizedRelativePath; + } + + const normalizedName = normalizeTreePath(file.name || ''); + if (normalizedName) { + return normalizedName; + } + + return ''; +} + +function getComparableRelativePath(file?: FileIdentity | null): string | null { + if (!file) return null; + + const normalizedRelativePath = normalizeLookupPath(file.relativePath || ''); + if (normalizedRelativePath) { + return normalizedRelativePath; + } + + if (!file.path) return null; + + try { + const url = new URL(file.path, window.location.origin); + const pathParam = url.searchParams.get('path'); + return pathParam + ? normalizeLookupPath(decodeURIComponent(pathParam)) + : null; + } catch { + return null; + } +} + +export function isSameFileIdentity( + left?: FileIdentity | null, + right?: FileIdentity | null +): boolean { + if (!left || !right) return false; + + if (left.path && right.path && left.path === right.path) { + return true; + } + + const leftRelativePath = getComparableRelativePath(left); + const rightRelativePath = getComparableRelativePath(right); + + if (leftRelativePath && rightRelativePath) { + return leftRelativePath === rightRelativePath; + } + + return ( + !!left.name && + !!right.name && + left.name === right.name && + !leftRelativePath && + !rightRelativePath && + !left.path && + !right.path + ); +} + +export function findMatchingFile( + files: FileInfo[], + target?: FileInfo | null +): FileInfo | undefined { + if (!target) return undefined; + + if (target.path) { + const exactPathMatch = files.find((file) => file.path === target.path); + if (exactPathMatch) { + return exactPathMatch; + } + } + + const targetRelativePath = getComparableRelativePath(target); + if (targetRelativePath) { + const relativePathMatch = files.find( + (file) => getComparableRelativePath(file) === targetRelativePath + ); + if (relativePathMatch) { + return relativePathMatch; + } + } + + if (target.name) { + const sameNameMatches = files.filter((file) => file.name === target.name); + if (sameNameMatches.length === 1) { + return sameNameMatches[0]; + } + } + + return undefined; +} + +export function buildFileTree(files: FileInfo[]): FileTreeNode { + const root: FileTreeNode = { + id: 'root', + name: 'root', + path: '', + children: [], + isFolder: true, + }; + + const folderMap = new Map(); + folderMap.set('', root); + + const sortedFiles = [...files].sort((left, right) => { + const leftRelativePath = getNormalizedTreeRelativePath(left); + const rightRelativePath = getNormalizedTreeRelativePath(right); + const leftDepth = leftRelativePath.split('/').filter(Boolean).length; + const rightDepth = rightRelativePath.split('/').filter(Boolean).length; + + if (leftDepth !== rightDepth) { + return leftDepth - rightDepth; + } + + return leftRelativePath.localeCompare(rightRelativePath); + }); + + for (const file of sortedFiles) { + const normalizedRelativePath = getNormalizedTreeRelativePath(file); + const pathSegments = normalizedRelativePath.split('/').filter(Boolean); + const folderSegments = pathSegments.slice(0, -1); + const fileName = pathSegments[pathSegments.length - 1] || file.name; + + let parentNode = root; + let currentFolderPath = ''; + + for (const segment of folderSegments) { + currentFolderPath = currentFolderPath + ? `${currentFolderPath}/${segment}` + : segment; + + let folderNode = folderMap.get(currentFolderPath); + if (!folderNode) { + folderNode = { + id: `folder:${currentFolderPath}`, + name: segment, + path: currentFolderPath, + isFolder: true, + children: [], + relativePath: currentFolderPath, + }; + parentNode.children!.push(folderNode); + folderMap.set(currentFolderPath, folderNode); + } + + parentNode = folderNode; + } + + parentNode.children!.push({ + id: `file:${normalizedRelativePath || file.path || file.name}`, + name: fileName || file.name, + path: file.path, + type: file.type, + isFolder: file.isFolder, + icon: file.icon, + children: file.isFolder ? [] : undefined, + isRemote: file.isRemote, + relativePath: file.relativePath, + }); + } + + const sortTree = (node: FileTreeNode) => { + if (!node.children?.length) return; + + node.children.sort((left, right) => { + if (!!left.isFolder !== !!right.isFolder) { + return left.isFolder ? -1 : 1; + } + return left.name.localeCompare(right.name); + }); + + node.children.forEach(sortTree); + }; + + sortTree(root); + return root; +} + // FileTree component to render nested file structure interface FileTreeProps { node: FileTreeNode; @@ -155,7 +369,7 @@ export const FileTree: React.FC = ({ return (
0 ? 'ml-4' : ''}> {node.children.map((child) => { - const isExpanded = expandedFolders.has(child.path); + const isExpanded = expandedFolders.has(child.id); const fileInfo: FileInfo = { name: child.name, path: child.path, @@ -163,20 +377,21 @@ export const FileTree: React.FC = ({ isFolder: child.isFolder, icon: child.icon, isRemote: child.isRemote, + relativePath: child.relativePath, }; return ( -
+
) : isImageFile(selectedFile) ? (
- +
) : (
@@ -771,14 +1048,7 @@ export default function Folder({ data: _data }: { data?: Agent }) {
                   
) ) : ( -
-
-
-

- {t('chat.loading')} -

-
-
+ ) ) : (
@@ -839,23 +1109,40 @@ function toFileUrl(filePath: string): string { function ImageLoader({ selectedFile }: { selectedFile: FileInfo }) { const [src, setSrc] = useState(''); + const [loadError, setLoadError] = useState(false); useEffect(() => { setSrc(''); + setLoadError(false); + if (selectedFile.isRemote) { setSrc((selectedFile.content as string) || selectedFile.path); return; } - // Use file:// source so Chromium can stream/seek large media files. setSrc(toFileUrl(selectedFile.path)); }, [selectedFile]); + if (loadError) { + return ( +
+

{selectedFile.name}

+

+ Failed to load image. Try selecting again. +

+
+ ); + } + + if (!src) { + return ; + } + return ( {selectedFile.name} console.error('Image load error:', err)} + onError={() => setLoadError(true)} /> ); } @@ -866,13 +1153,17 @@ function AudioLoader({ selectedFile }: { selectedFile: FileInfo }) { useEffect(() => { setSrc(''); if (selectedFile.isRemote) { - setSrc(selectedFile.content || selectedFile.path); + setSrc(selectedFile.content || selectedFile.path || ''); return; } // Use file:// source so Chromium can stream/seek large media files. setSrc(toFileUrl(selectedFile.path)); }, [selectedFile]); + if (!src) { + return ; + } + return (

@@ -896,13 +1187,17 @@ function VideoLoader({ selectedFile }: { selectedFile: FileInfo }) { useEffect(() => { setSrc(''); if (selectedFile.isRemote) { - setSrc(selectedFile.content || selectedFile.path); + setSrc(selectedFile.content || selectedFile.path || ''); return; } // Use file:// source so Chromium can stream/seek large media files. setSrc(toFileUrl(selectedFile.path)); }, [selectedFile]); + if (!src) { + return ; + } + return (

{/* Floating notch-style zoom controls */} @@ -1241,7 +1732,7 @@ function HtmlRenderer({ ref={iframeRef} srcDoc={processedHtml} className="bg-white h-full w-full border-0" - sandbox="allow-scripts allow-forms" + sandbox="allow-scripts allow-forms allow-downloads" title={selectedFile.name} tabIndex={0} onLoad={() => iframeRef.current?.focus()} diff --git a/src/components/Layout/index.tsx b/src/components/Layout/index.tsx index a75f496c0..c8ee26336 100644 --- a/src/components/Layout/index.tsx +++ b/src/components/Layout/index.tsx @@ -18,6 +18,7 @@ import { InstallDependencies } from '@/components/InstallStep/InstallDependencie import TopBar from '@/components/TopBar'; import useChatStoreAdapter from '@/hooks/useChatStoreAdapter'; import { useInstallationSetup } from '@/hooks/useInstallationSetup'; +import { useHost } from '@/host'; import { useAuthStore } from '@/store/authStore'; import { useInstallationUI } from '@/store/installationStore'; import { useEffect, useState } from 'react'; @@ -27,6 +28,7 @@ import HistorySidebar from '../HistorySidebar'; import InstallationErrorDialog from '../InstallStep/InstallationErrorDialog/InstallationErrorDialog'; const Layout = () => { + const host = useHost(); const { initState, isFirstLaunch, @@ -53,22 +55,23 @@ const Layout = () => { useInstallationSetup(); useEffect(() => { + if (!host?.ipcRenderer || !host?.electronAPI) return; + const handleBeforeClose = () => { const currentStatus = chatStore.tasks[chatStore.activeTaskId as string]?.status; if (['running', 'pause'].includes(currentStatus)) { setNoticeOpen(true); } else { - window.electronAPI.closeWindow(true); + host.electronAPI.closeWindow(true); } }; - window.ipcRenderer.on('before-close', handleBeforeClose); - + host.ipcRenderer.on('before-close', handleBeforeClose); return () => { - window.ipcRenderer.removeAllListeners('before-close'); + host.ipcRenderer?.removeAllListeners('before-close'); }; - }, [chatStore.tasks, chatStore.activeTaskId]); + }, [chatStore.tasks, chatStore.activeTaskId, host]); // Determine what to show based on states const shouldShowOnboarding = diff --git a/src/components/TerminalAgentWorkspace/index.tsx b/src/components/TerminalAgentWorkspace/index.tsx index 54cf8e250..266dbf30e 100644 --- a/src/components/TerminalAgentWorkspace/index.tsx +++ b/src/components/TerminalAgentWorkspace/index.tsx @@ -15,6 +15,7 @@ import { fetchPut } from '@/api/http'; import Terminal from '@/components/Terminal'; import useChatStoreAdapter from '@/hooks/useChatStoreAdapter'; +import { useHost } from '@/host'; import { ArrowDown, ArrowUp, @@ -34,6 +35,8 @@ import { Button } from '../ui/button'; export default function TerminalAgentWorkspace() { //Get Chatstore for the active project's task + const host = useHost(); + const electronAPI = host?.electronAPI; const { chatStore, projectStore } = useChatStoreAdapter(); const { t } = useTranslation(); const [isSingleMode, setIsSingleMode] = useState(false); @@ -124,7 +127,7 @@ export default function TerminalAgentWorkspace() { action: 'resume', }); setIsTakeControl(false); - window.electronAPI.hideAllWebview(); + electronAPI?.hideAllWebview(); }} className="rounded-full" > diff --git a/src/components/TopBar/index.tsx b/src/components/TopBar/index.tsx index 1dc18956b..e16da9639 100644 --- a/src/components/TopBar/index.tsx +++ b/src/components/TopBar/index.tsx @@ -25,6 +25,7 @@ import EndNoticeDialog from '@/components/Dialog/EndNotice'; import { Button } from '@/components/ui/button'; import { TooltipSimple } from '@/components/ui/tooltip'; import useChatStoreAdapter from '@/hooks/useChatStoreAdapter'; +import { useHost } from '@/host'; import { share } from '@/lib/share'; import { useAuthStore } from '@/store/authStore'; import { useInstallationUI } from '@/store/installationStore'; @@ -50,6 +51,7 @@ import { toast } from 'sonner'; function HeaderWin() { const { t } = useTranslation(); + const host = useHost(); const titlebarRef = useRef(null); const controlsRef = useRef(null); const [platform, setPlatform] = useState(''); @@ -67,12 +69,12 @@ function HeaderWin() { isInstalling || installationState === 'waiting-backend'; useEffect(() => { - const p = window.electronAPI.getPlatform(); - setPlatform(p); - }, []); + setPlatform(host?.electronAPI?.getPlatform?.() ?? 'web'); + }, [host]); const exportLog = async () => { + if (!host?.electronAPI?.exportLog) return; try { - const response = await window.electronAPI.exportLog(); + const response = await host.electronAPI.exportLog(); if (!response.success) { alert(t('layout.export-cancelled') + response.error); @@ -84,7 +86,7 @@ function HeaderWin() { alert(t('layout.log-saved') + response.savedPath); } } catch (e: any) { - alert(t('layout.export-error') + e.message); + alert(t('layout.export-error') + (e as Error).message); } }; @@ -141,9 +143,9 @@ function HeaderWin() { try { const task = chatStore.tasks[taskId]; - // Stop the task if it's running + // Stop the task if it's running (use projectId - task_lock is keyed by project) if (task && task.status === ChatTaskStatus.RUNNING) { - await fetchPut(`/task/${taskId}/take-control`, { + await fetchPut(`/task/${projectId}/take-control`, { action: 'stop', }); } @@ -362,7 +364,8 @@ function HeaderWin() { )} - {chatStore.activeTaskId && + {host?.electronAPI && + chatStore.activeTaskId && chatStore.tasks[chatStore.activeTaskId as string] && (
)}
- {/* Custom window controls only for Linux (Windows and macOS use native controls) */} - {platform !== 'darwin' && platform !== 'win32' && ( + {/* Custom window controls only for Linux. Web: hidden. */} + {host?.electronAPI && platform !== 'darwin' && platform !== 'win32' && (
window.electronAPI.minimizeWindow()} + onClick={() => host?.electronAPI?.minimizeWindow()} >
window.electronAPI.toggleMaximizeWindow()} + onClick={() => host?.electronAPI?.toggleMaximizeWindow()} >
window.electronAPI.closeWindow()} + onClick={() => host?.electronAPI?.closeWindow()} >
diff --git a/src/components/WindowControls/index.tsx b/src/components/WindowControls/index.tsx index f5725998b..a28ad3468 100644 --- a/src/components/WindowControls/index.tsx +++ b/src/components/WindowControls/index.tsx @@ -12,20 +12,22 @@ // limitations under the License. // ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import { useHost } from '@/host'; import { Minus, Square, X } from 'lucide-react'; import { useEffect, useRef, useState } from 'react'; import './index.css'; +/** Renders when host provides window controls. */ export default function WindowControls() { + const host = useHost(); const controlsRef = useRef(null); const [platform, setPlatform] = useState(''); useEffect(() => { - const p = window.electronAPI.getPlatform(); + if (!host?.electronAPI?.getPlatform) return; + const p = host.electronAPI.getPlatform(); setPlatform(p); - // Hide custom controls on macOS (uses native traffic lights) - // and on Windows (now uses native frame with native controls) if (p === 'darwin' || p === 'win32') { if (controlsRef.current) { controlsRef.current.style.display = 'none'; @@ -33,10 +35,8 @@ export default function WindowControls() { } }, []); - // Don't render custom controls on macOS or Windows (both use native controls) - if (platform === 'darwin' || platform === 'win32') { - return null; - } + if (!host?.electronAPI) return null; + if (platform === 'darwin' || platform === 'win32') return null; return (
window.electronAPI.minimizeWindow()} + onClick={() => host?.electronAPI?.minimizeWindow()} >
window.electronAPI.toggleMaximizeWindow()} + onClick={() => host?.electronAPI?.toggleMaximizeWindow()} >
@@ -64,7 +64,7 @@ export default function WindowControls() { e.preventDefault(); // Trigger window close - this will go through the before-close handler // which checks if tasks are running and shows confirmation if needed - window.electronAPI.closeWindow(false); + host?.electronAPI?.closeWindow(false); }} onMouseDown={(e) => { e.stopPropagation(); diff --git a/src/components/WorkFlow/node.tsx b/src/components/WorkFlow/node.tsx index c787944fb..5660d9157 100644 --- a/src/components/WorkFlow/node.tsx +++ b/src/components/WorkFlow/node.tsx @@ -15,6 +15,7 @@ import { AddWorker } from '@/components/AddWorker'; import { Button } from '@/components/ui/button'; import useChatStoreAdapter from '@/hooks/useChatStoreAdapter'; +import { useHost } from '@/host'; import { getToolkitIcon } from '@/lib/toolkitIcons'; import { useAuthStore, useWorkerList } from '@/store/authStore'; import { @@ -72,6 +73,8 @@ interface NodeProps { } export function Node({ id, data }: NodeProps) { + const host = useHost(); + const electronAPI = host?.electronAPI; const [isExpanded, setIsExpanded] = useState(data.isExpanded); const [selectedTask, setSelectedTask] = useState(null); const [selectedState, setSelectedState] = useState('all'); @@ -501,7 +504,7 @@ export function Node({ id, data }: NodeProps) { data.agent?.agent_id as string ); - window.electronAPI.hideAllWebview(); + electronAPI?.hideAllWebview(); }} > {browserImages.length > 0 && ( @@ -646,7 +649,7 @@ export function Node({ id, data }: NodeProps) { chatStore.activeTaskId as string, task.agent?.agent_id ); - window.electronAPI.hideAllWebview(); + electronAPI?.hideAllWebview(); } }} key={`taskList-${task.id}-${task.failure_count}`} diff --git a/src/components/WorkspaceMenu/index.tsx b/src/components/WorkspaceMenu/index.tsx index cb19f9ec7..7353fe3b9 100644 --- a/src/components/WorkspaceMenu/index.tsx +++ b/src/components/WorkspaceMenu/index.tsx @@ -18,6 +18,7 @@ import { } from '@/components/MenuButton/MenuButton'; import { Button } from '@/components/ui/button'; import useChatStoreAdapter from '@/hooks/useChatStoreAdapter'; +import { useHost } from '@/host'; import { useWorkerList } from '@/store/authStore'; import { useWorkflowViewportStore } from '@/store/workflowViewportStore'; import { AnimatePresence, motion } from 'framer-motion'; @@ -44,6 +45,7 @@ export function WorkSpaceMenu({ isChatBoxVisible = true, }: WorkSpaceMenuProps) { const { t } = useTranslation(); + const host = useHost(); const { chatStore } = useChatStoreAdapter(); const workerList = useWorkerList(); @@ -112,8 +114,8 @@ export function WorkSpaceMenu({ }, [chatStore, baseWorker, workerList, taskAssigning]); useEffect(() => { - if (!chatStore) return; - const cleanup = window.electronAPI.onWebviewNavigated( + if (!chatStore || !host?.electronAPI?.onWebviewNavigated) return; + const cleanup = host.electronAPI.onWebviewNavigated( (id: string, url: string) => { if (!chatStore.activeTaskId) return; const currentTask = getCurrentTask(); @@ -205,8 +207,9 @@ export function WorkSpaceMenu({ // capture webview const captureWebview = () => { + if (!host?.ipcRenderer) return; webviews.map((webview) => { - window.ipcRenderer + host.ipcRenderer .invoke('capture-webview', webview.id) .then((base64: string) => { const currentTask = getCurrentTask(); @@ -243,9 +246,8 @@ export function WorkSpaceMenu({ } ); - // Cleanup function to remove listener when component unmounts or dependencies change return cleanup; - }, [chatStore, activeTaskId, webViewUrls, taskAssigning]); + }, [chatStore, activeTaskId, webViewUrls, taskAssigning, host]); if (!chatStore) { return
Loading...
; @@ -337,7 +339,7 @@ export function WorkSpaceMenu({ } chatStore.setActiveWorkspace(chatStore.activeTaskId, val); - window.electronAPI.hideAllWebview(); + host?.electronAPI?.hideAllWebview?.(); }; return ( diff --git a/src/components/update/index.tsx b/src/components/update/index.tsx index 60d179877..ea0fbd984 100644 --- a/src/components/update/index.tsx +++ b/src/components/update/index.tsx @@ -13,19 +13,22 @@ // ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= import { Progress } from '@/components/ui/progress'; +import { useHost } from '@/host'; import type { ProgressInfo } from 'electron-updater'; import { useCallback, useEffect, useState } from 'react'; import { useTranslation } from 'react-i18next'; import { toast } from 'sonner'; const Update = () => { + const host = useHost(); + const ipcRenderer = host?.ipcRenderer; const [downloadProgress, setDownloadProgress] = useState(0); const [isDownloading, setIsDownloading] = useState(false); const { t } = useTranslation(); - const checkUpdate = () => { - window.ipcRenderer.invoke('check-update'); - }; + const checkUpdate = useCallback(() => { + ipcRenderer?.invoke('check-update'); + }, [ipcRenderer]); const onUpdateCanAvailable = useCallback( (_event: Electron.IpcRendererEvent, info: VersionInfo) => { @@ -37,14 +40,14 @@ const Update = () => { onClick: () => { setIsDownloading(true); setDownloadProgress(0); - window.ipcRenderer.invoke('start-download'); + host?.ipcRenderer?.invoke('start-download'); }, }, duration: Infinity, }); } }, - [t] + [host?.ipcRenderer, t] ); const onUpdateError = useCallback( @@ -95,33 +98,35 @@ const Update = () => { description: t('update.click-to-install-update'), action: { label: t('update.install'), - onClick: () => window.ipcRenderer.invoke('quit-and-install'), + onClick: () => ipcRenderer?.invoke('quit-and-install'), }, duration: Infinity, }); }, - [t] + [ipcRenderer, t] ); useEffect(() => { - if (sessionStorage.getItem('updateElectronShown')) { + if (!ipcRenderer || sessionStorage.getItem('updateElectronShown')) { return; } sessionStorage.setItem('updateElectronShown', '1'); - window.ipcRenderer?.on('update-can-available', onUpdateCanAvailable); - window.ipcRenderer?.on('update-error', onUpdateError); - window.ipcRenderer?.on('download-progress', onDownloadProgress); - window.ipcRenderer?.on('update-downloaded', onUpdateDownloaded); + ipcRenderer.on('update-can-available', onUpdateCanAvailable); + ipcRenderer.on('update-error', onUpdateError); + ipcRenderer.on('download-progress', onDownloadProgress); + ipcRenderer.on('update-downloaded', onUpdateDownloaded); checkUpdate(); return () => { - window.ipcRenderer?.off('update-can-available', onUpdateCanAvailable); - window.ipcRenderer?.off('update-error', onUpdateError); - window.ipcRenderer?.off('download-progress', onDownloadProgress); - window.ipcRenderer?.off('update-downloaded', onUpdateDownloaded); + ipcRenderer.off('update-can-available', onUpdateCanAvailable); + ipcRenderer.off('update-error', onUpdateError); + ipcRenderer.off('download-progress', onDownloadProgress); + ipcRenderer.off('update-downloaded', onUpdateDownloaded); }; }, [ + checkUpdate, + ipcRenderer, onUpdateCanAvailable, onUpdateError, onDownloadProgress, diff --git a/src/context/ConnectionContext.tsx b/src/context/ConnectionContext.tsx new file mode 100644 index 000000000..2d87b1400 --- /dev/null +++ b/src/context/ConnectionContext.tsx @@ -0,0 +1,111 @@ +// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +/** + * ConnectionProvider: Phase 2 - config-driven Brain connection. + * Resolves brainEndpoint (Electron: get-backend-port) and sets X-Channel. + * getBaseURL in http.ts reads from connectionStore. + */ + +import { getDefaultBrainEndpoint } from '@/api/http'; +import { useHost } from '@/host'; +import type { ConnectionChannel } from '@/store/connectionStore'; +import { + getConnectionConfig, + setConnectionConfig, +} from '@/store/connectionStore'; +import React, { useEffect } from 'react'; + +export { + getConnectionConfig, + setConnectionConfig, +} from '@/store/connectionStore'; +export type { + ConnectionChannel, + ConnectionConfig, +} from '@/store/connectionStore'; + +interface ConnectionProviderProps { + channel?: ConnectionChannel; + children: React.ReactNode; +} + +export function ConnectionProvider({ + channel = 'desktop', + children, +}: ConnectionProviderProps) { + const host = useHost(); + + useEffect(() => { + const hasDesktop = !!(host?.electronAPI && host?.ipcRenderer); + const effectiveChannel = hasDesktop ? channel : 'web'; + setConnectionConfig({ channel: effectiveChannel }); + + const resolveEndpoint = async () => { + let resolvedEndpoint = ''; + if (hasDesktop && host?.electronAPI?.getBackendPort) { + try { + const port = await host.electronAPI.getBackendPort(); + if (port && port > 0) { + resolvedEndpoint = `http://localhost:${port}`; + setConnectionConfig({ brainEndpoint: resolvedEndpoint }); + return resolvedEndpoint; + } + } catch { + // IPC not ready + } + return resolvedEndpoint; + } + // Web: VITE_BRAIN_ENDPOINT (dev default http://localhost:5001) + const envEndpoint = getDefaultBrainEndpoint(); + if (envEndpoint && typeof envEndpoint === 'string') { + resolvedEndpoint = envEndpoint.replace(/\/$/, ''); + setConnectionConfig({ brainEndpoint: resolvedEndpoint }); + } else if (effectiveChannel === 'web') { + console.error( + '[ConnectionProvider] VITE_BRAIN_ENDPOINT not set for production web mode' + ); + } + return resolvedEndpoint; + }; + + const ensureSessionId = async (endpoint: string) => { + if (effectiveChannel !== 'web' || !endpoint) { + return; + } + if (getConnectionConfig().sessionId) { + return; + } + try { + const response = await fetch(`${endpoint}/health`, { + headers: { 'X-Channel': effectiveChannel }, + }); + const sessionId = response.headers.get('x-session-id'); + if (sessionId) { + setConnectionConfig({ sessionId }); + } + } catch { + // Brain may not be ready yet; session will be created on the first request. + } + }; + + resolveEndpoint().then((endpoint) => { + if (endpoint) { + void ensureSessionId(endpoint); + } + }); + }, [channel, host]); + + return <>{children}; +} diff --git a/src/hooks/useInstallationSetup.ts b/src/hooks/useInstallationSetup.ts index e8655462e..c61feba70 100644 --- a/src/hooks/useInstallationSetup.ts +++ b/src/hooks/useInstallationSetup.ts @@ -12,15 +12,19 @@ // limitations under the License. // ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import { checkBackendHealth, resetBaseURL } from '@/api/http'; +import { useHost } from '@/host'; import { useAuthStore } from '@/store/authStore'; +import { resetConnectionConfig } from '@/store/connectionStore'; import { useInstallationStore } from '@/store/installationStore'; import { useCallback, useEffect, useRef } from 'react'; /** - * Hook that sets up Electron IPC listeners and handles installation state synchronization - * This should be called once in your App component or Layout component + * Hook that sets up Electron IPC listeners and handles installation state synchronization. + * In Web mode (no Electron): polls Brain health via VITE_BRAIN_ENDPOINT, skips local install. */ export const useInstallationSetup = () => { + const host = useHost(); const { initState, setInitState, email } = useAuthStore(); const hasCheckedOnMount = useRef(false); @@ -48,97 +52,66 @@ export const useInstallationSetup = () => { (state) => state.setNeedsBackendRestart ); - // Shared function to poll backend status + // Shared function to poll backend/Brain status const startBackendPolling = useCallback(() => { console.log('[useInstallationSetup] Starting backend polling'); - // Immediately check backend status once - const checkBackendStatus = async () => { + const checkViaHealth = async (): Promise => { try { - const backendPort = await window.electronAPI.getBackendPort(); - if (backendPort && backendPort > 0) { - console.log( - '[useInstallationSetup] Backend immediately detected on port:', - backendPort - ); + const ok = await checkBackendHealth(); + if (ok) { + backendReady.current = true; + setSuccess(); + setInitState('done'); + setNeedsBackendRestart(false); + return true; + } + } catch (e) { + console.log('[useInstallationSetup] Health check failed:', e); + } + return false; + }; - // Verify backend is actually responding + // Electron: use getBackendPort + localhost health + const checkElectronBackend = async (): Promise => { + if (!host?.electronAPI?.getBackendPort) return false; + try { + const backendPort = await host.electronAPI.getBackendPort(); + if (backendPort && backendPort > 0) { const response = await fetch( `http://localhost:${backendPort}/health` ).catch(() => null); - if (response && response.ok) { - console.log( - '[useInstallationSetup] Backend health check passed immediately' - ); + if (response?.ok) { backendReady.current = true; setSuccess(); setInitState('done'); setNeedsBackendRestart(false); - return true; // Backend is ready, no need to poll + return true; } } - } catch (error) { - console.log( - '[useInstallationSetup] Initial backend check failed:', - error - ); + } catch (e) { + console.log('[useInstallationSetup] Electron backend check failed:', e); } - return false; // Backend not ready, need to poll + return false; }; - // Check immediately, then start polling if needed - checkBackendStatus().then((isReady) => { + const hasDesktop = !!(host?.electronAPI && host?.ipcRenderer); + const doCheck = hasDesktop ? checkElectronBackend : checkViaHealth; + + doCheck().then((isReady) => { if (isReady) { - console.log( - '[useInstallationSetup] Backend already ready, skipping polling' - ); + console.log('[useInstallationSetup] Backend ready, skipping polling'); return; } - console.log('[useInstallationSetup] Backend not ready, starting polling'); - - // Poll backend status every 2 seconds to ensure we catch when it's ready - // This is a fallback in case the backend-ready event is missed - const pollInterval = setInterval(async () => { - try { - const backendPort = await window.electronAPI.getBackendPort(); - if (backendPort && backendPort > 0) { - console.log( - '[useInstallationSetup] Backend poll detected ready on port:', - backendPort - ); - - // Verify backend is actually responding - const response = await fetch( - `http://localhost:${backendPort}/health` - ).catch(() => null); - if (response && response.ok) { - console.log('[useInstallationSetup] Backend health check passed'); - clearInterval(pollInterval); - - if (!backendReady.current) { - backendReady.current = true; - setSuccess(); - setInitState('done'); - // Clear the flag after backend is ready - setNeedsBackendRestart(false); - } - } - } - } catch (error) { - console.log( - '[useInstallationSetup] Backend poll check failed:', - error - ); - } + const pollInterval = setInterval(() => { + doCheck().then((ready) => { + if (ready) clearInterval(pollInterval); + }); }, 2000); - - // Clear polling after 30 seconds to prevent infinite polling - setTimeout(() => { - clearInterval(pollInterval); - }, 30000); + setTimeout(() => clearInterval(pollInterval), 30000); }); - }, [setSuccess, setInitState, setNeedsBackendRestart]); + }, [setSuccess, setInitState, setNeedsBackendRestart, host]); // Monitor for backend restart after logout useEffect(() => { @@ -168,9 +141,19 @@ export const useInstallationSetup = () => { hasCheckedOnMount.current = true; + // Web mode: skip Electron install, poll Brain health directly + if (!host?.electronAPI || !host?.ipcRenderer) { + console.log('[useInstallationSetup] Web mode: polling Brain health'); + installationCompleted.current = true; + setWaitingBackend(); + startBackendPolling(); + return; + } + const checkToolInstalled = async () => { + if (!host?.ipcRenderer) return { success: false }; try { - const result = await window.ipcRenderer.invoke('check-tool-installed'); + const result = await host.ipcRenderer.invoke('check-tool-installed'); if (result.success) { if (result.isInstalled) { @@ -179,8 +162,6 @@ export const useInstallationSetup = () => { ); installationCompleted.current = true; setWaitingBackend(); - - // Start polling for backend when tools are already installed startBackendPolling(); } @@ -202,9 +183,10 @@ export const useInstallationSetup = () => { }; const checkBackendStatus = async (_toolResult?: any) => { + if (!host?.electronAPI?.getInstallationStatus) return; try { const installationStatus = - await window.electronAPI.getInstallationStatus(); + await host.electronAPI.getInstallationStatus(); if (installationStatus.success && installationStatus.isInstalling) { startInstallation(); @@ -286,6 +268,9 @@ export const useInstallationSetup = () => { console.log('[useInstallationSetup] Backend ready event received:', data); if (data.success && data.port) { + // Reset cached baseURL so next getBaseURL fetches fresh port (handles restart) + resetBaseURL(); + resetConnectionConfig(); console.log( `[useInstallationSetup] Backend is ready on port ${data.port}` ); @@ -312,18 +297,21 @@ export const useInstallationSetup = () => { } }; - window.electronAPI.onInstallDependenciesStart(handleInstallStart); - window.electronAPI.onInstallDependenciesLog(handleInstallLog); - window.electronAPI.onInstallDependenciesComplete(handleInstallComplete); - window.electronAPI.onBackendReady(handleBackendReady); + if (!host?.electronAPI) return; + + host.electronAPI.onInstallDependenciesStart(handleInstallStart); + host.electronAPI.onInstallDependenciesLog(handleInstallLog); + host.electronAPI.onInstallDependenciesComplete(handleInstallComplete); + host.electronAPI.onBackendReady(handleBackendReady); return () => { - window.electronAPI.removeAllListeners('install-dependencies-start'); - window.electronAPI.removeAllListeners('install-dependencies-log'); - window.electronAPI.removeAllListeners('install-dependencies-complete'); - window.electronAPI.removeAllListeners('backend-ready'); + host.electronAPI.removeAllListeners('install-dependencies-start'); + host.electronAPI.removeAllListeners('install-dependencies-log'); + host.electronAPI.removeAllListeners('install-dependencies-complete'); + host.electronAPI.removeAllListeners('backend-ready'); }; }, [ + host, startInstallation, addLog, setSuccess, diff --git a/src/hooks/useIntegrationManagement.ts b/src/hooks/useIntegrationManagement.ts index d562cc0dc..120de1544 100644 --- a/src/hooks/useIntegrationManagement.ts +++ b/src/hooks/useIntegrationManagement.ts @@ -20,6 +20,7 @@ import { proxyFetchPost, proxyFetchPut, } from '@/api/http'; +import { useHost } from '@/host'; import { useAuthStore } from '@/store/authStore'; import { useCallback, useEffect, useRef, useState } from 'react'; @@ -36,6 +37,9 @@ export interface IntegrationItem { * Hook for managing integration configurations, OAuth, and installation state */ export function useIntegrationManagement(items: IntegrationItem[]) { + const host = useHost(); + const electronAPI = host?.electronAPI; + const ipcRenderer = host?.ipcRenderer; const { email, checkAgentTool } = useAuthStore(); // Local installed status @@ -155,11 +159,11 @@ export function useIntegrationManagement(items: IntegrationItem[]) { } } - if (window.electronAPI?.envWrite) { - await window.electronAPI.envWrite(email, { key: envVarKey, value }); + if (electronAPI?.envWrite) { + await electronAPI.envWrite(email, { key: envVarKey, value }); } }, - [configs, email] + [configs, electronAPI, email] ); // Process OAuth callback @@ -262,11 +266,11 @@ export function useIntegrationManagement(items: IntegrationItem[]) { if (!data.provider || !data.code) return; processOauth(data); }; - window.ipcRenderer?.on('oauth-authorized', handler); + ipcRenderer?.on('oauth-authorized', handler); return () => { - window.ipcRenderer?.off('oauth-authorized', handler); + ipcRenderer?.off('oauth-authorized', handler); }; - }, [processOauth]); + }, [ipcRenderer, processOauth]); // Listen to OAuth callback URL notification useEffect(() => { @@ -275,11 +279,11 @@ export function useIntegrationManagement(items: IntegrationItem[]) { setCallBackUrl(data.url); } }; - window.ipcRenderer?.on('oauth-callback-url', handler); + ipcRenderer?.on('oauth-callback-url', handler); return () => { - window.ipcRenderer?.off('oauth-callback-url', handler); + ipcRenderer?.off('oauth-callback-url', handler); }; - }, []); + }, [ipcRenderer]); // Process cached OAuth event when items are ready useEffect(() => { @@ -311,9 +315,9 @@ export function useIntegrationManagement(items: IntegrationItem[]) { if ( item.env_vars && item.env_vars.length > 0 && - window.electronAPI?.envRemove + electronAPI?.envRemove ) { - await window.electronAPI.envRemove(email, item.env_vars[0]); + await electronAPI.envRemove(email, item.env_vars[0]); } } catch (_e) { // Ignore error @@ -349,7 +353,7 @@ export function useIntegrationManagement(items: IntegrationItem[]) { prev.filter((c: any) => c.config_group?.toLowerCase() !== groupKey) ); }, - [configs, email, checkAgentTool] + [checkAgentTool, configs, electronAPI, email] ); // Helper to create MCP object from integration item diff --git a/src/host/README.md b/src/host/README.md new file mode 100644 index 000000000..555eddd9f --- /dev/null +++ b/src/host/README.md @@ -0,0 +1,26 @@ +# Host 抽象层 + +统一桌面(Electron)与 Web 的能力注入,避免在业务代码中显式判断运行环境。 + +## 使用方式 + +```tsx +import { useHost } from '@/host'; + +function MyComponent() { + const host = useHost(); + // host.electronAPI / host.ipcRenderer 在 Web 下为 null + if (host?.electronAPI?.someMethod) { + host.electronAPI.someMethod(); + } +} +``` + +## 初始化 + +`main.tsx` 中通过 `createHost()` 创建 host,由 `HostProvider` 注入。`createHost()` 是唯一读取 `window` 的地方。 + +## 后续扩展 + +- 桌面端若用其他技术栈(Tauri、原生等)重构,只需提供新的 host 实现 +- CLI、Browser Extension 等可复用同一套 React 组件,注入不同的 host diff --git a/src/host/context.tsx b/src/host/context.tsx new file mode 100644 index 000000000..88165f106 --- /dev/null +++ b/src/host/context.tsx @@ -0,0 +1,35 @@ +// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +// Host context: inject desktop or web capabilities. Components use useHost(), +// never direct global Electron APIs. + +import React, { createContext, useContext, useMemo } from 'react'; +import type { AppHost } from './types'; + +const HostContext = createContext(null); + +export function HostProvider({ + host, + children, +}: { + host: AppHost; + children: React.ReactNode; +}) { + const value = useMemo(() => host, [host]); + return {children}; +} + +export function useHost(): AppHost | null { + return useContext(HostContext); +} diff --git a/src/host/createHost.ts b/src/host/createHost.ts new file mode 100644 index 000000000..8dbe8fafe --- /dev/null +++ b/src/host/createHost.ts @@ -0,0 +1,27 @@ +// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +// Create host from environment. Single place that reads window. + +import type { AppHost } from './types'; + +export function createHost(): AppHost { + if (typeof window === 'undefined') { + return { electronAPI: null, ipcRenderer: null }; + } + const win = window as any; + return { + electronAPI: win.electronAPI ?? null, + ipcRenderer: win.ipcRenderer ?? null, + }; +} diff --git a/src/host/index.ts b/src/host/index.ts new file mode 100644 index 000000000..928b0ca1b --- /dev/null +++ b/src/host/index.ts @@ -0,0 +1,17 @@ +// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +export { HostProvider, useHost } from './context'; +export { createHost } from './createHost'; +export type { AppHost } from './types'; diff --git a/src/host/types.ts b/src/host/types.ts new file mode 100644 index 000000000..e6f9f04e7 --- /dev/null +++ b/src/host/types.ts @@ -0,0 +1,20 @@ +// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +// Host abstraction: desktop (Electron) vs web. No explicit platform checks. +// See docs/design/04-client.md. + +export interface AppHost { + electronAPI: any; + ipcRenderer: any; +} diff --git a/src/lib/env.ts b/src/lib/env.ts new file mode 100644 index 000000000..ef61a8bee --- /dev/null +++ b/src/lib/env.ts @@ -0,0 +1,14 @@ +// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +// Deprecated: use useHost() from '@/host' instead. diff --git a/src/lib/fileUtils.ts b/src/lib/fileUtils.ts index 81282ce09..4cf367ca6 100644 --- a/src/lib/fileUtils.ts +++ b/src/lib/fileUtils.ts @@ -12,22 +12,81 @@ // limitations under the License. // ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import { uploadFileToBrain } from '@/api/http'; +import { isWeb } from '@/client/platform'; import type { FileAttachment } from '@/components/ChatBox/BottomBox/InputBox'; +import type { AppHost } from '@/host'; +import { createHost } from '@/host'; /** * Process dropped files: resolve paths via Electron, send through IPC, * and merge with existing attachments (deduplicated by filePath). */ export async function processDroppedFiles( - droppedFiles: File[], - existingFiles: FileAttachment[] + droppedFiles: globalThis.File[], + existingFiles: FileAttachment[], + host?: AppHost | null ): Promise< | { success: true; files: FileAttachment[]; added: number } | { success: false; error: string } > { + if (isWeb()) { + const uploadedFiles: FileAttachment[] = []; + + for (const droppedFile of droppedFiles) { + try { + const result = await uploadFileToBrain(droppedFile); + uploadedFiles.push({ + fileName: result.filename, + filePath: result.file_id, + fileId: result.file_id, + source: 'upload', + }); + } catch (error) { + console.error('[Drag-Drop] Upload failed:', droppedFile.name, error); + } + } + + if (uploadedFiles.length === 0) { + return { + success: false, + error: 'Failed to upload dropped files.', + }; + } + + const mergedFiles = [ + ...existingFiles.filter( + (existing) => + !uploadedFiles.find( + (uploaded) => uploaded.filePath === existing.filePath + ) + ), + ...uploadedFiles.filter( + (uploaded) => + !existingFiles.find( + (existing) => existing.filePath === uploaded.filePath + ) + ), + ]; + + return { + success: true, + files: mergedFiles, + added: uploadedFiles.length, + }; + } + + const electronAPI = host?.electronAPI ?? createHost().electronAPI; + if (!electronAPI) { + return { + success: false, + error: 'Desktop file access is unavailable.', + }; + } + const fileData = droppedFiles.map((f) => { try { - return { name: f.name, path: window.electronAPI.getPathForFile(f) }; + return { name: f.name, path: electronAPI.getPathForFile(f) }; } catch { console.error('[Drag-Drop] Failed to get path for:', f.name); return { name: f.name, path: undefined }; @@ -42,7 +101,7 @@ export async function processDroppedFiles( }; } - const result = await window.electronAPI.processDroppedFiles(validFiles); + const result = await electronAPI.processDroppedFiles(validFiles); if (!result.success || !result.files) { return { success: false, diff --git a/src/lib/htmlFontStyles.ts b/src/lib/htmlFontStyles.ts index 40ce1cd6d..57069469d 100644 --- a/src/lib/htmlFontStyles.ts +++ b/src/lib/htmlFontStyles.ts @@ -180,8 +180,13 @@ export function deferInlineScriptsUntilLoad(html: string): string { 'var __eigentCurrentScript=document.currentScript;', 'if(__eigentCurrentScript&&__eigentCurrentScript.nonce){__eigentScript.nonce=__eigentCurrentScript.nonce;}', `__eigentScript.text=${serializedContent};`, + 'try{', '(document.head||document.body||document.documentElement).appendChild(__eigentScript);', - '__eigentScript.remove();', + '}catch(__eigentErr){', + "console.error('[HtmlRenderer] Deferred inline script execution failed:',__eigentErr);", + '}finally{', + 'if(__eigentScript.parentNode){__eigentScript.remove();}', + '}', '};', "if(document.readyState==='complete'){__eigentRun();}else{window.addEventListener('load',__eigentRun,{once:true});}", '})();', diff --git a/src/lib/index.ts b/src/lib/index.ts index 55ea93e08..21f5e2d29 100644 --- a/src/lib/index.ts +++ b/src/lib/index.ts @@ -12,6 +12,7 @@ // limitations under the License. // ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import { createHost } from '@/host'; import { getAuthStore } from '@/store/authStore'; export function getProxyBaseURL() { @@ -86,11 +87,12 @@ export async function uploadLog(taskId: string, type?: string | undefined) { if (import.meta.env.VITE_USE_LOCAL_PROXY !== 'true' && !type) { try { const { email, token } = getAuthStore(); + const electronAPI = createHost().electronAPI; const baseUrl = import.meta.env.DEV ? import.meta.env.VITE_PROXY_URL : import.meta.env.VITE_BASE_URL; - await window.electronAPI.uploadLog(email, taskId, baseUrl, token); + await electronAPI?.uploadLog(email, taskId, baseUrl, token); } catch (error) { console.error('Failed to upload log:', error); } diff --git a/src/lib/oauth.ts b/src/lib/oauth.ts index ebab89eb1..089317ea1 100644 --- a/src/lib/oauth.ts +++ b/src/lib/oauth.ts @@ -12,10 +12,14 @@ // limitations under the License. // ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import { createHost } from '@/host'; + const EnvOauthInfoMap = { notion: 'NOTION_TOKEN', }; +const getElectronAPI = () => createHost().electronAPI; + export class OAuth { public client_name: string = 'Eigent'; public client_uri: string = 'https://eigent.ai/'; @@ -165,8 +169,9 @@ export class OAuth { body: params.toString(), }).then((res) => res.json()); - if (window.electronAPI?.envWrite) { - await window.electronAPI.envWrite(email, { + const electronAPI = getElectronAPI(); + if (electronAPI?.envWrite) { + await electronAPI.envWrite(email, { key: EnvOauthInfoMap[provider as keyof typeof EnvOauthInfoMap], value: newToken.access_token, }); diff --git a/src/lib/skillToolkit.ts b/src/lib/skillToolkit.ts index 3a84bd945..3414ec406 100644 --- a/src/lib/skillToolkit.ts +++ b/src/lib/skillToolkit.ts @@ -12,6 +12,8 @@ // limitations under the License. // ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import { getConnectionConfig } from '@/store/connectionStore'; + /** * Skill toolkit utilities aligned with CAMEL's skill_toolkit: * https://github.com/camel-ai/camel/blob/master/camel/toolkits/skill_toolkit.py @@ -133,9 +135,8 @@ export function skillNameToDirName(name: string): string { return cleaned || 'skill'; } -/** Check if running in Electron with skills API available */ +/** Check if skills API is available (Brain REST only, no IPC). */ export function hasSkillsFsApi(): boolean { - return ( - typeof window !== 'undefined' && !!(window as any).electronAPI?.skillsScan - ); + if (typeof window === 'undefined') return false; + return !!getConnectionConfig?.()?.brainEndpoint; } diff --git a/src/main.tsx b/src/main.tsx index 75bc50bd0..3c281a282 100644 --- a/src/main.tsx +++ b/src/main.tsx @@ -12,6 +12,7 @@ // limitations under the License. // ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import { isWeb } from '@/client/platform'; import '@fontsource/inter/400.css'; import '@fontsource/inter/500.css'; import '@fontsource/inter/600.css'; @@ -19,27 +20,37 @@ import '@fontsource/inter/700.css'; import '@fontsource/inter/800.css'; import { Suspense } from 'react'; import ReactDOM from 'react-dom/client'; -import { HashRouter } from 'react-router-dom'; +import { BrowserRouter, HashRouter } from 'react-router-dom'; import App from './App'; import { ThemeProvider } from './components/ThemeProvider'; import { TooltipProvider } from './components/ui/tooltip'; +import { ConnectionProvider } from './context/ConnectionContext'; +import { createHost, HostProvider } from './host'; import './i18n'; +import { injectHost } from './store/chatStore'; import './style/index.css'; // If you want use Node.js, the`nodeIntegration` needs to be enabled in the Main process. // import './demos/node' +const host = createHost(); +injectHost(host); +const Router = isWeb() ? BrowserRouter : HashRouter; +const initialChannel = isWeb() ? 'web' : 'desktop'; + ReactDOM.createRoot(document.getElementById('root') as HTMLElement).render( - //
}> - - - - - - - + + + + + + + + + + + - // ); postMessage({ payload: 'removeLoading' }, '*'); diff --git a/src/pages/Agents/components/SkillUploadDialog.tsx b/src/pages/Agents/components/SkillUploadDialog.tsx index 65d228a59..ad2ccb79b 100644 --- a/src/pages/Agents/components/SkillUploadDialog.tsx +++ b/src/pages/Agents/components/SkillUploadDialog.tsx @@ -12,6 +12,7 @@ // limitations under the License. // ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import { skillImportZip } from '@/api/brain'; import ConfirmModal from '@/components/ui/alertDialog'; import { Button } from '@/components/ui/button'; import { @@ -22,6 +23,7 @@ import { } from '@/components/ui/dialog'; import { parseSkillMd } from '@/lib/skillToolkit'; import { useSkillsStore } from '@/store/skillsStore'; + import { AlertCircle, File, Upload, X } from 'lucide-react'; import { useCallback, useRef, useState } from 'react'; import { useTranslation } from 'react-i18next'; @@ -109,7 +111,7 @@ export default function SkillUploadDialog({ } try { - const result = await (window as any).electronAPI.skillImportZip( + const result = await skillImportZip( pendingFileBuffer, Array.from(newConfirmed) ); @@ -162,7 +164,7 @@ export default function SkillUploadDialog({ } try { - const result = await (window as any).electronAPI.skillImportZip( + const result = await skillImportZip( pendingFileBuffer, Array.from(confirmedReplacements) ); @@ -203,12 +205,8 @@ export default function SkillUploadDialog({ setIsUploading(true); try { - // Zip import: read file in renderer and send buffer to main (no path in sandbox) + // Zip import: Brain REST (Web + Electron) or IPC fallback (Electron only) if (isZipToUse) { - if (!(window as any).electronAPI?.skillImportZip) { - toast.error(t('agents.skill-add-error')); - return; - } let buffer: ArrayBuffer; try { buffer = await fileToUse.arrayBuffer(); @@ -218,9 +216,7 @@ export default function SkillUploadDialog({ } // First, check for conflicts - const result = await (window as any).electronAPI.skillImportZip( - buffer - ); + const result = await skillImportZip(buffer); if (result?.conflicts && result.conflicts.length > 0) { // Store conflicts and show dialog for first conflict diff --git a/src/pages/Browser/CDP.tsx b/src/pages/Browser/CDP.tsx index 12d1e9293..683d8b2ca 100644 --- a/src/pages/Browser/CDP.tsx +++ b/src/pages/Browser/CDP.tsx @@ -12,8 +12,10 @@ // limitations under the License. // ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import { fetchDelete, fetchGet, fetchPost } from '@/api/http'; import AlertDialog from '@/components/ui/alertDialog'; import { Button } from '@/components/ui/button'; +import { useHost } from '@/host'; import { Globe, Link2, Loader2, Plus, Trash2 } from 'lucide-react'; import { useEffect, useState } from 'react'; import { useTranslation } from 'react-i18next'; @@ -28,6 +30,8 @@ interface CdpBrowser { } export default function CDP() { + const host = useHost(); + const electronAPI = host?.electronAPI; const { t } = useTranslation(); const [cdpBrowsers, setCdpBrowsers] = useState([]); const [deletingBrowser, setDeletingBrowser] = useState(null); @@ -38,42 +42,55 @@ export default function CDP() { const [connectPort, setConnectPort] = useState(''); const [connectChecking, setConnectChecking] = useState(false); const [connectError, setConnectError] = useState(''); + const isDesktopMode = !!electronAPI?.getCdpBrowsers; const loadCdpBrowsers = async () => { - if (window.electronAPI?.getCdpBrowsers) { - try { - const browsers = await window.electronAPI.getCdpBrowsers(); + try { + if (electronAPI?.getCdpBrowsers) { + const browsers = await electronAPI.getCdpBrowsers(); setCdpBrowsers(browsers); - } catch (error) { - console.error('Failed to load CDP browsers:', error); + return; } + + const browsers = await fetchGet('/browser/cdp/list'); + setCdpBrowsers(Array.isArray(browsers) ? browsers : []); + } catch (error) { + console.error('Failed to load CDP browsers:', error); } }; useEffect(() => { loadCdpBrowsers(); - }, []); + }, [electronAPI]); useEffect(() => { - if (!window.electronAPI?.onCdpPoolChanged) return; - const cleanup = window.electronAPI.onCdpPoolChanged( - (browsers: CdpBrowser[]) => { - setCdpBrowsers(browsers); - } - ); + if (!electronAPI?.onCdpPoolChanged) return; + const cleanup = electronAPI.onCdpPoolChanged((browsers: CdpBrowser[]) => { + setCdpBrowsers(browsers); + }); return cleanup; - }, []); + }, [electronAPI]); const handleRemoveBrowser = async (browserId: string) => { setDeletingBrowser(browserId); try { - if (window.electronAPI?.removeCdpBrowser) { - const result = await window.electronAPI.removeCdpBrowser(browserId); + if (electronAPI?.removeCdpBrowser && isDesktopMode) { + const result = await electronAPI.removeCdpBrowser(browserId); if (result.success) { toast.success(t('layout.browser-removed')); } else { toast.error(result.error || t('layout.failed-to-remove-browser')); } + } else if (browserToRemove) { + const result = await fetchDelete( + `/browser/cdp/${browserToRemove.port}` + ); + if (result?.success) { + toast.success(t('layout.browser-removed')); + await loadCdpBrowsers(); + } else { + toast.error(result?.error || t('layout.failed-to-remove-browser')); + } } } catch (error: any) { toast.error(error?.message || t('layout.failed-to-remove-browser')); @@ -88,8 +105,13 @@ export default function CDP() { toast.loading(t('layout.launching-browser', { port: '...' }), { id: 'launch-browser', }); - const result = await window.electronAPI?.launchCdpBrowser(); + const result = isDesktopMode + ? await electronAPI?.launchCdpBrowser() + : await fetchPost('/browser/cdp/launch'); if (result?.success) { + if (!isDesktopMode) { + await loadCdpBrowsers(); + } toast.success(t('layout.browser-launched', { port: result.port }), { id: 'launch-browser', }); @@ -126,23 +148,24 @@ export default function CDP() { setConnectChecking(true); setConnectError(''); - let timeoutId: ReturnType | null = null; try { - const controller = new AbortController(); - timeoutId = setTimeout(() => controller.abort(), 3000); - const response = await fetch(`http://localhost:${portNum}/json/version`, { - signal: controller.signal, - }); - clearTimeout(timeoutId); - timeoutId = null; + if (electronAPI?.addCdpBrowser && isDesktopMode) { + const controller = new AbortController(); + const timeoutId = setTimeout(() => controller.abort(), 3000); + const response = await fetch( + `http://localhost:${portNum}/json/version`, + { + signal: controller.signal, + } + ); + clearTimeout(timeoutId); - if (!response.ok) { - setConnectError(t('layout.no-browser-on-port', { port: portNum })); - return; - } + if (!response.ok) { + setConnectError(t('layout.no-browser-on-port', { port: portNum })); + return; + } - if (window.electronAPI?.addCdpBrowser) { - const addResult = await window.electronAPI.addCdpBrowser( + const addResult = await electronAPI.addCdpBrowser( portNum, true, `External Browser (${portNum})` @@ -154,8 +177,17 @@ export default function CDP() { return; } } else { - setConnectError(t('layout.failed-to-add-browser')); - return; + const connectResult = await fetchPost('/browser/cdp/connect', { + port: portNum, + name: `External Browser (${portNum})`, + }); + if (!connectResult?.success) { + setConnectError( + connectResult?.error || t('layout.failed-to-add-browser') + ); + return; + } + await loadCdpBrowsers(); } toast.success(t('layout.connected-browser', { port: portNum })); @@ -163,7 +195,6 @@ export default function CDP() { } catch { setConnectError(t('layout.no-browser-on-port', { port: portNum })); } finally { - if (timeoutId) clearTimeout(timeoutId); setConnectChecking(false); } }; diff --git a/src/pages/Browser/Cookies.tsx b/src/pages/Browser/Cookies.tsx index 141e50d37..f14ed76b7 100644 --- a/src/pages/Browser/Cookies.tsx +++ b/src/pages/Browser/Cookies.tsx @@ -15,6 +15,7 @@ import { fetchDelete, fetchGet, fetchPost } from '@/api/http'; import AlertDialog from '@/components/ui/alertDialog'; import { Button } from '@/components/ui/button'; +import { useHost } from '@/host'; import { Cookie, Plus, RefreshCw, Trash2 } from 'lucide-react'; import { useEffect, useState } from 'react'; import { useTranslation } from 'react-i18next'; @@ -33,6 +34,8 @@ interface GroupedDomain { } export default function Cookies() { + const host = useHost(); + const electronAPI = host?.electronAPI; const { t } = useTranslation(); const [loginLoading, setLoginLoading] = useState(false); const [cookiesLoading, setCookiesLoading] = useState(false); @@ -195,8 +198,8 @@ export default function Cookies() { }; const handleRestartApp = () => { - if (window.electronAPI && window.electronAPI.restartApp) { - window.electronAPI.restartApp(); + if (electronAPI?.restartApp) { + electronAPI.restartApp(); } else { toast.error('Restart function not available'); } diff --git a/src/pages/Connectors/MCP.tsx b/src/pages/Connectors/MCP.tsx index 67cd499c4..c6d815c58 100644 --- a/src/pages/Connectors/MCP.tsx +++ b/src/pages/Connectors/MCP.tsx @@ -12,6 +12,7 @@ // limitations under the License. // ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import { mcpInstall, mcpRemove, mcpUpdate } from '@/api/brain'; import { fetchGet, fetchPost, @@ -413,18 +414,15 @@ export default function SettingMCP() { }; await proxyFetchPut(`/api/v1/mcp/users/${showConfig.id}`, mcpData); - if (window.ipcRenderer) { - //Partial payload to empty env {} - const payload: any = { - description: configForm.mcp_desc, - command: configForm.command, - args: arrayToArgsJson(configForm.argsArr), - }; - if (configForm.env && Object.keys(configForm.env).length > 0) { - payload.env = configForm.env; - } - window.ipcRenderer.invoke('mcp-update', mcpData.mcp_name, payload); + const payload: Record = { + description: configForm.mcp_desc, + command: configForm.command, + args: arrayToArgsJson(configForm.argsArr), + }; + if (configForm.env && Object.keys(configForm.env).length > 0) { + payload.env = configForm.env; } + await mcpUpdate(mcpData.mcp_name, payload); setShowConfig(null); fetchList(); @@ -496,10 +494,10 @@ export default function SettingMCP() { setInstalling(false); return; } - if (window.ipcRenderer) { - const mcpServers = data['mcpServers']; + const mcpServers = data['mcpServers']; + if (mcpServers && typeof mcpServers === 'object') { for (const [key, value] of Object.entries(mcpServers)) { - await window.ipcRenderer.invoke('mcp-install', key, value); + await mcpInstall(key, value as Record); } } } @@ -522,11 +520,7 @@ export default function SettingMCP() { try { checkAgentTool(deleteTarget.mcp_name); await proxyFetchDelete(`/api/v1/mcp/users/${deleteTarget.id}`); - // notify main process - if (window.ipcRenderer) { - console.log('deleteTarget', deleteTarget.mcp_key); - await window.ipcRenderer.invoke('mcp-remove', deleteTarget.mcp_key); - } + await mcpRemove(deleteTarget.mcp_key); setDeleteTarget(null); fetchList(); } finally { diff --git a/src/pages/Connectors/components/MCPMarket.tsx b/src/pages/Connectors/components/MCPMarket.tsx index e4ff96351..0c16c82b7 100644 --- a/src/pages/Connectors/components/MCPMarket.tsx +++ b/src/pages/Connectors/components/MCPMarket.tsx @@ -12,6 +12,7 @@ // limitations under the License. // ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +import { mcpInstall, mcpRemove } from '@/api/brain'; import { proxyFetchDelete, proxyFetchGet, proxyFetchPost } from '@/api/http'; import githubIcon from '@/assets/github.svg'; import AnthropicIcon from '@/assets/mcp/Anthropic.svg?url'; @@ -218,13 +219,8 @@ export default function MCPMarket({ } setInstalled((prev) => ({ ...prev, [id]: true })); setInstalledIds((prev) => [...prev, id]); - // notify main process - if (window.ipcRenderer && mcpItem) { - await window.ipcRenderer.invoke( - 'mcp-install', - mcpItem.key, - mcpItem.install_command - ); + if (mcpItem?.install_command) { + await mcpInstall(mcpItem.key, mcpItem.install_command); } } catch (e) { console.error('Error installing MCP:', e); @@ -256,10 +252,7 @@ export default function MCPMarket({ } console.log('deleteTarget', deleteTarget); await proxyFetchDelete(`/api/v1/mcp/users/${id}`); - // notify main process - if (window.ipcRenderer) { - await window.ipcRenderer.invoke('mcp-remove', deleteTarget.key); - } + await mcpRemove(deleteTarget.key); setInstalledIds((prev) => prev.filter((item) => item !== deleteTarget.id) ); diff --git a/src/pages/Home.tsx b/src/pages/Home.tsx index f3d7aaf25..5bd32b864 100644 --- a/src/pages/Home.tsx +++ b/src/pages/Home.tsx @@ -12,12 +12,13 @@ // limitations under the License. // ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= -import { checkLocalServerStale } from '@/api/http'; +import { checkLocalServerStale, uploadFileToBrain } from '@/api/http'; import ChatBox from '@/components/ChatBox'; import Folder from '@/components/Folder'; import UpdateElectron from '@/components/update'; import Workflow from '@/components/WorkFlow'; import useChatStoreAdapter from '@/hooks/useChatStoreAdapter'; +import { useHost } from '@/host'; import { ChatTaskStatus } from '@/types/constants'; import { ReactFlowProvider } from '@xyflow/react'; import { AnimatePresence, motion } from 'framer-motion'; @@ -114,18 +115,15 @@ function ConnectionStatusIcon({ } export default function Home() { + const host = useHost(); const { t } = useTranslation(); - //Get Chatstore for the active project's task const { chatStore, projectStore } = useChatStoreAdapter(); const { - activeTab, activeWorkspaceTab, setActiveWorkspaceTab, chatPanelPosition, - hasTriggers, setHasTriggers, - hasAgentFiles, setHasAgentFiles, unviewedTabs, markTabAsUnviewed, @@ -134,7 +132,7 @@ export default function Home() { const { wsConnectionStatus, triggerReconnect } = useTriggerStore(); const authStore = useAuthStore.getState(); - const [activeWebviewId, setActiveWebviewId] = useState(null); + const [_activeWebviewId, setActiveWebviewId] = useState(null); const [isChatBoxVisible, setIsChatBoxVisible] = useState(true); const [addWorkerDialogOpen, setAddWorkerDialogOpen] = useState(false); const [triggerDialogOpen, setTriggerDialogOpen] = useState(false); @@ -151,27 +149,28 @@ export default function Home() { // Get the active project's folder path const activeProjectId = projectStore.activeProjectId; - if (!activeProjectId) return; + if (host?.ipcRenderer && !activeProjectId) return; - // Upload files using electron API + // Upload files using Electron API or Brain upload endpoint in pure Web mode. for (const file of Array.from(files)) { try { - const reader = new FileReader(); - reader.onload = async () => { - if (reader.result && window.ipcRenderer) { - await window.ipcRenderer.invoke('save-file-to-agent-folder', { + if (host?.ipcRenderer) { + const content = await file.arrayBuffer(); + if (activeProjectId) { + await host.ipcRenderer.invoke('save-file-to-agent-folder', { projectId: activeProjectId, fileName: file.name, - content: reader.result, + content, }); - // Mark the inbox tab as having new content - setHasAgentFiles(true); - if (activeWorkspaceTab !== 'inbox') { - markTabAsUnviewed('inbox'); - } } - }; - reader.readAsArrayBuffer(file); + } else { + await uploadFileToBrain(file); + } + // Mark the inbox tab as having new content + setHasAgentFiles(true); + if (activeWorkspaceTab !== 'inbox') { + markTabAsUnviewed('inbox'); + } } catch (error) { console.error('Error uploading file:', error); } @@ -181,17 +180,18 @@ export default function Home() { e.target.value = ''; }; - // One-time check: warn if local server is outdated after a git pull - useEffect(() => { - checkLocalServerStale(); - }, []); - // Detect files and triggers when project loads useEffect(() => { const detectAgentFiles = async () => { - if (!projectStore.activeProjectId || !authStore.email) return; + if ( + !projectStore.activeProjectId || + !authStore.email || + !host?.ipcRenderer + ) { + return; + } try { - const files = await window.ipcRenderer?.invoke( + const files = await host.ipcRenderer.invoke( 'get-project-file-list', authStore.email, projectStore.activeProjectId @@ -212,21 +212,24 @@ export default function Home() { authStore.email, setHasAgentFiles, setHasTriggers, + host, ]); - // Add webview-show listener in useEffect with cleanup + // One-time check: warn if local server is outdated after a git pull + useEffect(() => { + checkLocalServerStale(); + }, []); + useEffect(() => { + if (!host?.ipcRenderer) return; const handleWebviewShow = (_event: any, id: string) => { setActiveWebviewId(id); }; - - window.ipcRenderer?.on('webview-show', handleWebviewShow); - - // Cleanup: remove listener on unmount + host.ipcRenderer.on('webview-show', handleWebviewShow); return () => { - window.ipcRenderer?.off('webview-show', handleWebviewShow); + host.ipcRenderer?.off('webview-show', handleWebviewShow); }; - }, []); // Empty dependency array means this only runs once + }, [host]); // Extract complex dependency to a variable const taskAssigning = @@ -270,12 +273,15 @@ export default function Home() { // capture webview const captureWebview = async () => { + if (!host?.ipcRenderer) { + return; + } const activeTask = chatStore.tasks[chatStore.activeTaskId as string]; if (!activeTask || activeTask.status === ChatTaskStatus.FINISHED) { return; } webviews.map((webview) => { - window.ipcRenderer + host.ipcRenderer .invoke('capture-webview', webview.id) .then((base64: string) => { const currentTask = @@ -329,20 +335,21 @@ export default function Home() { clearInterval(intervalTimer); } }; - }, [chatStore, taskAssigning]); + }, [chatStore, taskAssigning, host]); const getSize = useCallback(() => { + if (!host?.electronAPI?.setSize) return; const webviewContainer = document.getElementById('webview-container'); if (webviewContainer) { const rect = webviewContainer.getBoundingClientRect(); - window.electronAPI.setSize({ + host.electronAPI.setSize({ x: rect.left, y: rect.top, width: rect.width, height: rect.height, }); } - }, []); + }, [host]); useEffect(() => { if (!chatStore) return; diff --git a/src/pages/Login.tsx b/src/pages/Login.tsx index fdd5a3c61..7320b3c7a 100644 --- a/src/pages/Login.tsx +++ b/src/pages/Login.tsx @@ -20,7 +20,13 @@ import { useLocation, useNavigate } from 'react-router-dom'; import { proxyFetchGet, proxyFetchPost } from '@/api/http'; import WindowControls from '@/components/WindowControls'; +import { useHost } from '@/host'; import { hasStackKeys } from '@/lib'; +import { + DESKTOP_LOGIN_CALLBACK_URL, + getExternalLoginUrl, + getWebLoginCallbackUrl, +} from '@/pages/loginUtils'; import { useTranslation } from 'react-i18next'; import background from '@/assets/background.png'; @@ -31,6 +37,7 @@ const IS_LOCAL_MODE = import.meta.env.VITE_USE_LOCAL_PROXY === 'true'; let lock = false; export default function Login() { + const host = useHost(); // Always call hooks unconditionally - React Hooks must be called in the same order const stackApp = useStackApp(); const app = HAS_STACK_KEYS ? stackApp : null; @@ -48,7 +55,9 @@ export default function Login() { const [generalError, setGeneralError] = useState(''); const [callbackUrl, setCallbackUrl] = useState(null); const titlebarRef = useRef(null); + const handledWebTokenRef = useRef(null); const [platform, setPlatform] = useState(''); + const isDesktopHost = !!host?.ipcRenderer && !!host?.electronAPI; const getLoginErrorMessage = useCallback( (data: any) => { @@ -213,12 +222,13 @@ export default function Login() { [location.pathname, handleLoginByStack, handleGetToken, setIsLoading] ); - // Listen for direct token callback from Electron (eigent.ai login redirect) - useEffect(() => { - const handleTokenReceived = async (_event: any, token: string) => { + const handleTokenLogin = useCallback( + async (token: string) => { if (!token) return; + + setGeneralError(''); setIsLoading(true); - // Temporarily set token so proxyFetchGet can use it for auth + setModelType('cloud'); setAuth({ email: '', token, username: '', user_id: 0 }); setLocalProxyValue(import.meta.env.VITE_USE_LOCAL_PROXY || null); try { @@ -237,49 +247,85 @@ export default function Login() { userInfo.id || JSON.parse(atob(token.split('.')[1])).id || 0, }); } + navigate('/', { replace: true }); } catch (e) { console.error('Failed to fetch user info:', e); + setGeneralError(t('layout.login-failed-please-try-again')); + } finally { + setIsLoading(false); } - navigate('/'); + }, + [ + navigate, + setAuth, + setGeneralError, + setIsLoading, + setLocalProxyValue, + setModelType, + t, + ] + ); + + // Listen for direct token callback from Electron (eigent.ai login redirect) + useEffect(() => { + if (!host?.ipcRenderer) return; + + const handleTokenReceived = async (_event: any, token: string) => { + await handleTokenLogin(token); }; - window.ipcRenderer?.on('auth-token-received', handleTokenReceived); + host.ipcRenderer.on('auth-token-received', handleTokenReceived); return () => { - window.ipcRenderer?.off('auth-token-received', handleTokenReceived); + host.ipcRenderer?.off('auth-token-received', handleTokenReceived); }; - }, [setAuth, setLocalProxyValue, navigate]); + }, [handleTokenLogin, host]); // Listen for auth code callback from Electron (Stack Auth OAuth flow) useEffect(() => { - window.ipcRenderer?.on('auth-code-received', handleAuthCode); - + if (!host?.ipcRenderer) return; + host.ipcRenderer.on('auth-code-received', handleAuthCode); return () => { - window.ipcRenderer?.off('auth-code-received', handleAuthCode); + host.ipcRenderer?.off('auth-code-received', handleAuthCode); }; - }, [handleAuthCode]); + }, [handleAuthCode, host]); useEffect(() => { - const p = window.electronAPI.getPlatform(); - setPlatform(p); + if (IS_LOCAL_MODE || isDesktopHost) return; - if (platform === 'darwin') { + const token = new URLSearchParams(location.search).get('token'); + if (!token || handledWebTokenRef.current === token) return; + + handledWebTokenRef.current = token; + handleTokenLogin(token); + }, [handleTokenLogin, isDesktopHost, location.search]); + + useEffect(() => { + if (!host?.electronAPI?.getPlatform) { + setPlatform('web'); + return; + } + const p = host.electronAPI.getPlatform(); + setPlatform(p); + if (p === 'darwin') { titlebarRef.current?.classList.add('mac'); } - }, [platform]); + }, [host]); // Handle before-close event for login page useEffect(() => { + if (!host?.ipcRenderer || !host?.electronAPI) return; + const handleBeforeClose = () => { - window.electronAPI.closeWindow(true); + host.electronAPI.closeWindow(true); }; - window.ipcRenderer?.on('before-close', handleBeforeClose); + host.ipcRenderer.on('before-close', handleBeforeClose); return () => { - window.ipcRenderer?.off('before-close', handleBeforeClose); + host.ipcRenderer?.off('before-close', handleBeforeClose); }; - }, []); + }, [host]); // Hybrid/app mode: prepare auth callback URL on mount (don't auto-open browser) useEffect(() => { @@ -287,12 +333,14 @@ export default function Login() { const prepareCallbackUrl = async () => { let cbUrl: string; - if (import.meta.env.PROD) { - cbUrl = 'eigent://auth/callback'; + if (!isDesktopHost) { + cbUrl = getWebLoginCallbackUrl(window.location.origin); + } else if (import.meta.env.PROD) { + cbUrl = DESKTOP_LOGIN_CALLBACK_URL; } else { - cbUrl = 'eigent://auth/callback'; + cbUrl = DESKTOP_LOGIN_CALLBACK_URL; try { - const url = await window.ipcRenderer?.invoke('get-auth-callback-url'); + const url = await host?.ipcRenderer?.invoke('get-auth-callback-url'); if (url) cbUrl = url; } catch (e) { // Fallback to eigent:// protocol @@ -302,7 +350,7 @@ export default function Login() { }; prepareCallbackUrl(); - }, []); + }, [host, isDesktopHost]); // Render local mode: "Start Eigent" button only const renderLocalMode = () => ( @@ -351,11 +399,19 @@ export default function Login() {