diff --git a/clawbot/agent/runtime.py b/clawbot/agent/runtime.py index 3bfc6e8..e014453 100644 --- a/clawbot/agent/runtime.py +++ b/clawbot/agent/runtime.py @@ -5,6 +5,7 @@ from clawbot.core.config.loader import load_config from clawbot.core.config.schema import Config +from clawbot.providers.chatgpt_provider import ChatGPTProvider from clawbot.providers.custom_provider import CustomProvider from clawbot.providers.litellm_provider import LiteLLMProvider from clawbot.providers.openai_codex_provider import OpenAICodexProvider @@ -29,6 +30,9 @@ def make_provider(config: Config) -> Any: if provider_name == "openai_codex" or (model or "").startswith("openai-codex/"): return OpenAICodexProvider(default_model=model) + if provider_name == "chatgpt" or (model or "").startswith("chatgpt/"): + return ChatGPTProvider(default_model=model) + if provider_name == "custom": return CustomProvider( api_key=p.api_key if p else "no-key", diff --git a/clawbot/providers/chatgpt_provider.py b/clawbot/providers/chatgpt_provider.py new file mode 100644 index 0000000..6ff23fe --- /dev/null +++ b/clawbot/providers/chatgpt_provider.py @@ -0,0 +1,95 @@ +"""ChatGPT Plus Provider — OAuth-based, same login as OpenAI Codex.""" + +from __future__ import annotations + +import asyncio + +from loguru import logger + +from clawbot.providers.base import LLMResponse +from clawbot.providers.openai_codex_provider import ( + OpenAICodexProvider, + _build_headers, + _convert_messages, + _convert_tools, + _get_agent_oauth_token, + _request_codex, +) + +DEFAULT_CHATGPT_URL = "https://chatgpt.com/backend-api/codex/responses" +DEFAULT_MODEL = "chatgpt/gpt-4o" + + +class ChatGPTProvider(OpenAICodexProvider): + """ChatGPT Plus provider. + + Uses the same OpenAI OAuth login as OpenAI Codex (shared ``codex.json`` + token file). Strips the ``chatgpt/`` model prefix before sending the + request so the bare model name (e.g. ``gpt-4o``) reaches the API. + """ + + def __init__(self, default_model: str = DEFAULT_MODEL): + super().__init__(default_model=default_model) + + async def chat( + self, + messages: list, + tools: list | None = None, + model: str | None = None, + max_tokens: int = 4096, + temperature: float = 0.7, + ): + model = model or self.default_model + system_prompt, input_items = _convert_messages(messages) + + token = await asyncio.to_thread(_get_agent_oauth_token, "CHATGPT_OAUTH_TOKEN") + headers = _build_headers(token.account_id, token.access) + + body = { + "model": _strip_chatgpt_prefix(model), + "store": False, + "stream": True, + "instructions": system_prompt, + "input": input_items, + "text": {"verbosity": "medium"}, + "include": ["reasoning.encrypted_content"], + "tool_choice": "auto", + "parallel_tool_calls": True, + } + + if tools: + body["tools"] = _convert_tools(tools) + + try: + try: + content, tool_calls, finish_reason = await _request_codex( + DEFAULT_CHATGPT_URL, headers, body, verify=True + ) + except Exception as e: + if "CERTIFICATE_VERIFY_FAILED" not in str(e): + raise + logger.warning( + "SSL certificate verification failed for ChatGPT API; retrying with verify=False" + ) + content, tool_calls, finish_reason = await _request_codex( + DEFAULT_CHATGPT_URL, headers, body, verify=False + ) + return LLMResponse( + content=content, + tool_calls=tool_calls, + finish_reason=finish_reason, + ) + except Exception as e: + return LLMResponse( + content=f"Error calling ChatGPT: {str(e)}", + finish_reason="error", + ) + + def get_default_model(self) -> str: + return self.default_model + + +def _strip_chatgpt_prefix(model: str) -> str: + if model.startswith("chatgpt/"): + return model.split("/", 1)[1] + return model diff --git a/clawbot/providers/openai_codex_provider.py b/clawbot/providers/openai_codex_provider.py index d068bdd..3357c09 100644 --- a/clawbot/providers/openai_codex_provider.py +++ b/clawbot/providers/openai_codex_provider.py @@ -5,11 +5,15 @@ import asyncio import hashlib import json +import os +import time from typing import Any, AsyncGenerator import httpx from loguru import logger +from oauth_cli_kit import OPENAI_CODEX_PROVIDER, OAuthToken from oauth_cli_kit import get_token as get_codex_token +from oauth_cli_kit.flow import _refresh_token from clawbot.providers.base import LLMProvider, LLMResponse, ToolCallRequest @@ -17,6 +21,33 @@ DEFAULT_ORIGINATOR = "clawbot" +def _get_agent_oauth_token(env_var: str = "OPENAI_CODEX_OAUTH_TOKEN") -> OAuthToken: + """Return an OAuth token for OpenAI, preferring the per-agent env var. + + Agent containers receive the token as a JSON string injected from the + agent's stored provider config under the env var declared in the registry + (e.g. ``OPENAI_CODEX_OAUTH_TOKEN``, ``CHATGPT_OAUTH_TOKEN``), exactly like + any other API key. Falls back to the global FileTokenStorage for local runs. + """ + env_val = os.environ.get(env_var) + if env_val: + try: + data = json.loads(env_val) + token = OAuthToken( + access=data["access"], + refresh=data["refresh"], + expires=int(data["expires"]), + account_id=data.get("account_id"), + ) + now_ms = int(time.time() * 1000) + if token.expires - now_ms <= 60 * 1000: + token = _refresh_token(token.refresh, OPENAI_CODEX_PROVIDER) + return token + except Exception: + pass + return get_codex_token() + + class OpenAICodexProvider(LLMProvider): """Use Codex OAuth to call the Responses API.""" @@ -35,7 +66,7 @@ async def chat( model = model or self.default_model system_prompt, input_items = _convert_messages(messages) - token = await asyncio.to_thread(get_codex_token) + token = await asyncio.to_thread(_get_agent_oauth_token) headers = _build_headers(token.account_id, token.access) body: dict[str, Any] = { diff --git a/clawbot/providers/registry.py b/clawbot/providers/registry.py index 1b61ab3..389ac8f 100644 --- a/clawbot/providers/registry.py +++ b/clawbot/providers/registry.py @@ -176,7 +176,7 @@ def label(self) -> str: ProviderSpec( name="openai_codex", keywords=("openai-codex", "codex"), - env_key="", # OAuth-based, no API key + env_key="OPENAI_CODEX_OAUTH_TOKEN", display_name="OpenAI Codex", litellm_prefix="", # Not routed through LiteLLM skip_prefixes=(), @@ -190,6 +190,24 @@ def label(self) -> str: model_overrides=(), is_oauth=True, # OAuth-based authentication ), + # ChatGPT Plus: uses OAuth (same OpenAI login as Codex), not API key. + ProviderSpec( + name="chatgpt", + keywords=("chatgpt",), + env_key="CHATGPT_OAUTH_TOKEN", + display_name="ChatGPT Plus", + litellm_prefix="", # Not routed through LiteLLM + skip_prefixes=(), + env_extras=(), + is_gateway=False, + is_local=False, + detect_by_key_prefix="", + detect_by_base_keyword="", + default_api_base="https://chatgpt.com/backend-api", + strip_model_prefix=False, + model_overrides=(), + is_oauth=True, # OAuth-based authentication (shared with OpenAI Codex) + ), # Github Copilot: uses OAuth, not API key. ProviderSpec( name="github_copilot", diff --git a/clawforce-ui/src/components/agent-detail/constants.ts b/clawforce-ui/src/components/agent-detail/constants.ts index d6939de..d722fbb 100644 --- a/clawforce-ui/src/components/agent-detail/constants.ts +++ b/clawforce-ui/src/components/agent-detail/constants.ts @@ -128,6 +128,10 @@ export const CHANNEL_DEFS: { key: string; label: string; icon: React.ReactNode; ]; export const PROVIDER_DEFS: ProviderDef[] = [ + // Subscription-based (OAuth) — no API key required + { field: "chatgpt", label: "ChatGPT Plus", keywords: ["chatgpt"], oauth: true }, + { field: "openai_codex", label: "OpenAI Codex", keywords: ["openai-codex", "codex"], oauth: true }, + // API key / token providers { field: "anthropic", label: "Anthropic", keywords: ["anthropic", "claude"] }, { field: "openai", label: "OpenAI", keywords: ["openai", "gpt", "o1", "o3", "o4"] }, { field: "openrouter", label: "OpenRouter", keywords: ["openrouter"] }, @@ -139,6 +143,8 @@ export const PROVIDER_DEFS: ProviderDef[] = [ { field: "together", label: "Together AI", keywords: ["together"] }, { field: "bedrock", label: "AWS Bedrock", keywords: ["bedrock"] }, { field: "azure", label: "Azure OpenAI", keywords: ["azure"] }, + // GitHub Copilot — paste token from `gh auth token` or VS Code Copilot extension + { field: "github_copilot", label: "GitHub Copilot", keywords: ["github_copilot", "copilot"] }, { field: "moonshot", label: "Moonshot / Kimi", keywords: ["moonshot", "kimi"] }, { field: "dashscope", label: "DashScope / Qwen", keywords: ["dashscope", "qwen"] }, { field: "zhipu", label: "Zhipu AI", keywords: ["zhipu", "glm"] }, @@ -146,8 +152,6 @@ export const PROVIDER_DEFS: ProviderDef[] = [ { field: "aihubmix", label: "AiHubMix", keywords: ["aihubmix"] }, { field: "siliconflow", label: "SiliconFlow", keywords: ["siliconflow"] }, { field: "vllm", label: "vLLM / Local", keywords: ["vllm"] }, - { field: "openai_codex", label: "OpenAI Codex", keywords: ["openai-codex", "codex"], oauth: true }, - { field: "github_copilot", label: "GitHub Copilot", keywords: ["github_copilot", "copilot"], oauth: true }, ]; export const SECURITY_PRESETS = { diff --git a/clawforce-ui/src/components/agent-detail/settings/ModelProviderSection.tsx b/clawforce-ui/src/components/agent-detail/settings/ModelProviderSection.tsx index 6d41480..59d6b4d 100644 --- a/clawforce-ui/src/components/agent-detail/settings/ModelProviderSection.tsx +++ b/clawforce-ui/src/components/agent-detail/settings/ModelProviderSection.tsx @@ -48,6 +48,7 @@ export function ModelProviderSection({ setApiKey(savedKey); } }, [savedKey]); + const [models, setModels] = useState([]); const [loadingModels, setLoadingModels] = useState(false); const [modelError, setModelError] = useState(""); @@ -56,8 +57,16 @@ export function ModelProviderSection({ const dropdownRef = useRef(null); const searchRef = useRef(null); - // Non-OAuth providers that need API keys + // OAuth state + const [oauthAuthorized, setOauthAuthorized] = useState(null); + const [oauthLoading, setOauthLoading] = useState(false); + const [oauthPending, setOauthPending] = useState(false); // waiting for user to finish in browser + const [oauthError, setOauthError] = useState(""); + const [oauthAccountId, setOauthAccountId] = useState(null); + const pollRef = useRef | null>(null); + const providerDef = PROVIDER_DEFS.find((p) => p.field === selectedProvider); + // Non-OAuth providers need API keys const needsKey = providerDef && !providerDef.oauth; // Close dropdown on outside click @@ -73,9 +82,79 @@ export function ModelProviderSection({ return () => document.removeEventListener("mousedown", onClickOutside); }, [modelDropdownOpen]); - // Auto-fetch models when provider changes (static providers, or if there's a saved key) + // Stop polling when component unmounts or provider changes + useEffect(() => { + return () => { + if (pollRef.current !== null) clearInterval(pollRef.current); + }; + }, []); + + // Check OAuth status when an OAuth provider is selected + useEffect(() => { + if (!providerDef?.oauth) { + setOauthAuthorized(null); + setOauthError(""); + setOauthAccountId(null); + setOauthPending(false); + if (pollRef.current !== null) { clearInterval(pollRef.current); pollRef.current = null; } + return; + } + setOauthAuthorized(null); + setOauthError(""); + setOauthAccountId(null); + setOauthPending(false); + if (pollRef.current !== null) { clearInterval(pollRef.current); pollRef.current = null; } + api.providers.oauthStatus(selectedProvider, agentId) + .then((r) => { + setOauthAuthorized(r.authorized); + setOauthAccountId(r.account_id ?? null); + }) + .catch(() => setOauthAuthorized(false)); + }, [selectedProvider, providerDef?.oauth]); + + function startPolling(provider: string) { + if (pollRef.current !== null) clearInterval(pollRef.current); + pollRef.current = setInterval(async () => { + try { + const r = await api.providers.oauthStatus(provider, agentId); + if (r.authorized) { + setOauthAuthorized(true); + setOauthAccountId(r.account_id ?? null); + setOauthPending(false); + if (pollRef.current !== null) { clearInterval(pollRef.current); pollRef.current = null; } + } + } catch { /* ignore poll errors */ } + }, 2000); + } + + function cancelPolling() { + if (pollRef.current !== null) { clearInterval(pollRef.current); pollRef.current = null; } + setOauthPending(false); + } + + // Load model list once OAuth provider is authorized; auto-select first model if none chosen + useEffect(() => { + if (!providerDef?.oauth || !oauthAuthorized) return; + setLoadingModels(true); + setModelError(""); + api.providers.listModels(selectedProvider, "", "") + .then((r) => { + setModels(r.models); + // Auto-select the first model when no model is set for this provider yet + const currentProviderPrefix = `${selectedProvider}/`; + const hasModel = model && model.startsWith(currentProviderPrefix); + if (!hasModel && r.models.length > 0) { + onModelChange(`${selectedProvider}/${r.models[0].id}`); + } + }) + .catch(() => {}) + .finally(() => setLoadingModels(false)); + }, [selectedProvider, oauthAuthorized, providerDef?.oauth]); + + // Auto-fetch models when provider changes (static/API-key providers only) useEffect(() => { if (!selectedProvider) { setModels([]); return; } + if (providerDef?.oauth) return; // handled by the OAuth effect above const isStatic = ["bedrock", "azure"].includes(selectedProvider); const hasSavedKey = !!(savedKey && savedKey.length > 0); if (isStatic || hasSavedKey) { @@ -88,7 +167,23 @@ export function ModelProviderSection({ .catch(() => {}) .finally(() => setLoadingModels(false)); } - }, [selectedProvider, agentId, savedKey]); + }, [selectedProvider, agentId, savedKey, providerDef?.oauth]); + + async function handleOAuthAuthorize() { + setOauthLoading(true); + setOauthError(""); + cancelPolling(); + try { + const r = await api.providers.oauthAuthorize(selectedProvider, agentId); + window.open(r.auth_url, "_blank", "noopener,noreferrer"); + setOauthPending(true); + startPolling(selectedProvider); + } catch (err) { + setOauthError((err instanceof Error ? err.message : String(err)).replace(/^API \d+: /, "")); + } finally { + setOauthLoading(false); + } + } function doFetch() { if (!selectedProvider) return; @@ -129,6 +224,7 @@ export function ModelProviderSection({ if (needsKey && apiKey) { onProviderKeyChange(selectedProvider, apiKey); } + // OAuth providers: no key to propagate — credentials live in the OS credential store onModelChange(fullModel); setModelDropdownOpen(false); setModelSearch(""); @@ -149,9 +245,9 @@ export function ModelProviderSection({ return (
- {/* Row 1: Provider + API Key */} + {/* Row 1: Provider + API Key / OAuth */}
-
+
+ + {/* OAuth authorization UI */} + {selectedProvider && providerDef?.oauth && ( +
+ + {oauthAuthorized === null && !oauthPending && ( +

Checking status…

+ )} + {oauthAuthorized === true && ( +
+ + + Connected{oauthAccountId ? ` · ${oauthAccountId.slice(0, 8)}…` : ""} + + +
+ )} + {oauthPending && ( +
+
+ + + + + Waiting for authorization… + +
+

Complete sign-in in the browser tab that just opened.

+
+ )} + {oauthAuthorized === false && !oauthPending && ( +
+ + {oauthError &&

{oauthError}

} +

Opens a new tab to authorize

+
+ )} +
+ )} + + {/* API key input for non-OAuth providers */} {selectedProvider && needsKey && (
@@ -203,7 +367,11 @@ export function ModelProviderSection({ {loadingModels ? "Loading models…" : models.length === 0 - ? (needsKey ? (savedKey ? "Click Fetch models to load" : "Enter API key and fetch models") : "Select a provider first") + ? (needsKey + ? (savedKey ? "Click Fetch models to load" : "Enter API key and fetch models") + : providerDef?.oauth + ? (oauthAuthorized ? "Select a model…" : "Connect first to browse models") + : "Select a provider first") : currentModelDisplay || "Select a model…"} diff --git a/clawforce-ui/src/lib/api.ts b/clawforce-ui/src/lib/api.ts index b2fe1ec..6f11668 100644 --- a/clawforce-ui/src/lib/api.ts +++ b/clawforce-ui/src/lib/api.ts @@ -341,6 +341,15 @@ export const api = { "/providers/models", { provider, api_key: apiKey, agent_id: agentId || "" }, ), + oauthStatus: (provider: string, agentId?: string) => + request<{ provider: string; authorized: boolean; account_id?: string }>( + `/providers/oauth/${encodeURIComponent(provider)}/status${agentId ? `?agent_id=${encodeURIComponent(agentId)}` : ""}`, + ), + oauthAuthorize: (provider: string, agentId?: string) => + post<{ auth_url: string }>( + `/providers/oauth/${encodeURIComponent(provider)}/authorize`, + { agent_id: agentId || "" }, + ), }, admin: { getSettings: () => diff --git a/clawforce/apis/providers.py b/clawforce/apis/providers.py index 9963752..403e640 100644 --- a/clawforce/apis/providers.py +++ b/clawforce/apis/providers.py @@ -1,7 +1,25 @@ """Proxy endpoint to list models from LLM providers using a user-supplied API key.""" +import asyncio +import json +import os +import time +import urllib.parse + import httpx -from fastapi import APIRouter, Depends, HTTPException, status + +try: + import docker as _docker_module # type: ignore[import] +except ImportError: + _docker_module = None # type: ignore[assignment] + +from fastapi import APIRouter, Depends, HTTPException, Request, status +from loguru import logger +from oauth_cli_kit import OPENAI_CODEX_PROVIDER, OAuthProviderConfig, OAuthToken, get_token +from oauth_cli_kit.flow import _exchange_code_for_token_async +from oauth_cli_kit.pkce import _create_state, _generate_pkce +from oauth_cli_kit.server import _start_local_server +from oauth_cli_kit.storage import FileTokenStorage from pydantic import BaseModel from clawforce.auth import get_current_user @@ -10,6 +28,15 @@ router = APIRouter(tags=["providers"]) +# OAuth provider configurations — maps provider config field name → OAuthProviderConfig. +# OpenAI Codex and ChatGPT Plus share the same OpenAI login (same codex.json token file). +# GitHub Copilot is NOT here — it has no public PKCE OAuth app; users supply a token +# obtained from the GitHub CLI (`gh auth token`) or the VS Code Copilot extension. +OAUTH_PROVIDER_CONFIGS: dict[str, OAuthProviderConfig] = { + "openai_codex": OPENAI_CODEX_PROVIDER, + "chatgpt": OPENAI_CODEX_PROVIDER, +} + # Provider base URLs for model listing (OpenAI-compatible /v1/models pattern) PROVIDER_ENDPOINTS: dict[str, dict] = { "anthropic": { @@ -146,6 +173,37 @@ {"id": "gpt-35-turbo", "name": "GPT-3.5 Turbo"}, ], }, + "openai_codex": { + "url": None, + "static": True, + "prefix": "openai-codex", + "models": [ + {"id": "gpt-5.1-codex", "name": "GPT-5.1 Codex"}, + {"id": "codex-mini-latest", "name": "Codex Mini (Latest)"}, + ], + }, + "chatgpt": { + "url": None, + "static": True, + "prefix": "chatgpt", + "models": [ + {"id": "gpt-4o", "name": "GPT-4o"}, + {"id": "gpt-4o-mini", "name": "GPT-4o Mini"}, + {"id": "o3", "name": "o3"}, + {"id": "o4-mini", "name": "o4 Mini"}, + ], + }, + "github_copilot": { + "url": None, + "static": True, + "prefix": "github_copilot", + "models": [ + {"id": "gpt-4o", "name": "GPT-4o (Copilot)"}, + {"id": "claude-sonnet-4-5", "name": "Claude Sonnet 4.5 (Copilot)"}, + {"id": "o3-mini", "name": "o3 Mini (Copilot)"}, + {"id": "gemini-2.0-flash-001", "name": "Gemini 2.0 Flash (Copilot)"}, + ], + }, } @@ -245,3 +303,383 @@ async def list_provider_models( status_code=status.HTTP_502_BAD_GATEWAY, detail=f"Failed to fetch models: {str(e)[:200]}", ) + + +@router.get("/api/providers/oauth/{provider}/status") +async def oauth_status( + provider: str, + agent_id: str = "", + _: dict = Depends(get_current_user), + agent_config_store: AgentConfigStore = Depends(get_agent_config_store), +): + """Check whether a valid OAuth token exists for a provider. + + When *agent_id* is provided the check looks at the token stored in that + agent's config (same place API keys live). Otherwise falls back to the + global FileTokenStorage used by local/direct runs. + """ + oauth_cfg = OAUTH_PROVIDER_CONFIGS.get(provider) + if not oauth_cfg: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Unknown OAuth provider: {provider}", + ) + + if agent_id: + config = agent_config_store.get_config(agent_id) or {} + token_json = (config.get("providers") or {}).get(provider, {}).get("api_key", "") + if token_json: + try: + data = json.loads(token_json) + token = OAuthToken( + access=data["access"], + refresh=data["refresh"], + expires=int(data["expires"]), + account_id=data.get("account_id"), + ) + now_ms = int(time.time() * 1000) + if token.expires - now_ms > 0: + return { + "provider": provider, + "authorized": True, + "account_id": token.account_id, + } + except Exception: + pass + return {"provider": provider, "authorized": False, "account_id": None} + + try: + token = await asyncio.to_thread(get_token, oauth_cfg) + return {"provider": provider, "authorized": True, "account_id": token.account_id} + except RuntimeError: + return {"provider": provider, "authorized": False, "account_id": None} + + +_OAUTH_CALLBACK_TIMEOUT = 300.0 # 5 minutes for the user to complete sign-in in the browser + +# Registry of in-flight OAuth flows: state → (code_future, verifier) +# The ephemeral callback container delivers the auth code here via +# POST /api/providers/oauth/internal/deliver. +_active_oauth_flows: dict[str, tuple["asyncio.Future[str]", str]] = {} + + +# --------------------------------------------------------------------------- +# Internal deliver endpoint — called by the callback container, not the browser +# --------------------------------------------------------------------------- + + +class _OAuthDeliverRequest(BaseModel): + code: str + state: str + + +@router.post("/api/providers/oauth/internal/deliver") +async def oauth_internal_deliver(body: _OAuthDeliverRequest): + """Receive the auth code from the ephemeral OAuth callback container. + + The callback container POSTs here after the browser lands on its + ``/auth/callback`` endpoint. No user auth required — the ``state`` value + already acts as a one-time bearer token (PKCE security model). + """ + entry = _active_oauth_flows.get(body.state) + if not entry: + # Flow may have already completed or timed out — ignore silently. + return {"ok": True} + code_future, _ = entry + if not code_future.done(): + loop = asyncio.get_event_loop() + loop.call_soon_threadsafe(code_future.set_result, body.code) + return {"ok": True} + + +# --------------------------------------------------------------------------- +# Docker callback container helpers +# --------------------------------------------------------------------------- + + +def _get_docker_client(): + """Return a Docker client if the daemon socket is accessible, else None.""" + if _docker_module is None: + return None + try: + client = _docker_module.DockerClient(base_url="unix:///var/run/docker.sock") + client.ping() + return client + except Exception: + return None + + +def _spawn_callback_container( + docker_client, + redirect_uri: str, + notify_url: str, + state: str, +): + """Start an ephemeral container that listens on the OAuth callback port. + + The container runs ``clawforce.oauth_callback_server``, binds the port + extracted from *redirect_uri* on ``127.0.0.1`` of the host, and POSTs the + auth code to *notify_url* once the browser lands on ``/auth/callback``. + + Host resolution strategy (tried in order, first success wins): + + 1. ``host.docker.internal`` via ``extra_hosts: host-gateway`` — Docker on Linux + 2. ``host.containers.internal`` without extra_hosts — Podman (auto-injects this) + 3. ``host.docker.internal`` without extra_hosts — Docker Desktop (Mac / Windows, + auto-injects this hostname) + + Returns the container object, or ``None`` if all attempts fail. + """ + parsed = urllib.parse.urlparse(redirect_uri) + port = parsed.port or 1455 + image = os.environ.get("AGENT_IMAGE", "ghcr.io/saolalab/clawforce:latest") + name = f"clawforce-oauth-cb-{state[:12]}" + + # Attempts: (notify_url_to_use, extra_hosts_dict) + # host.containers.internal — Podman auto-injects this into every container. + # host.docker.internal — Docker Desktop injects this; Docker on Linux needs + # the explicit host-gateway mapping. + attempts: list[tuple[str, dict]] = [ + (notify_url, {"host.docker.internal": "host-gateway"}), + (notify_url.replace("host.docker.internal", "host.containers.internal"), {}), + (notify_url, {}), + ] + + last_exc: Exception | None = None + for effective_url, extra_hosts in attempts: + # Clean up any container left from a failed previous attempt. + try: + docker_client.containers.get(name).remove(force=True) + except Exception: + pass + + kwargs: dict = { + "image": image, + "command": ["python", "-m", "clawforce.oauth_callback_server"], + "detach": True, + "name": name, + "environment": {"OAUTH_NOTIFY_URL": effective_url, "OAUTH_PORT": str(port)}, + "ports": {f"{port}/tcp": ("127.0.0.1", port)}, + "remove": False, + } + if extra_hosts: + kwargs["extra_hosts"] = extra_hosts + + try: + container = docker_client.containers.run(**kwargs) + logger.info( + "OAuth callback container started: {} (port {}, notify→{})", + name, + port, + effective_url, + ) + return container + except Exception as exc: + last_exc = exc + logger.debug("OAuth container attempt failed ({}): {}", effective_url, exc) + + logger.warning("Could not start OAuth callback container: {}", last_exc) + return None + + +def _stop_container(container) -> None: + try: + container.stop(timeout=3) + except Exception: + pass + try: + container.remove() + except Exception: + pass + + +def _build_notify_url(request: Request) -> str: + """Build the URL the callback container will POST the auth code to. + + Uses ``host.docker.internal`` so the callback container (a sibling + container on the same Docker host) can reach the clawforce server through + the host's port mapping. Falls back to ``localhost`` for non-Docker runs. + """ + host = request.headers.get("host", "localhost:8080") + scheme = request.headers.get("x-forwarded-proto") or request.url.scheme + # Replace 'localhost' with host.docker.internal so sibling containers can + # reach the clawforce server via the host machine's port mapping. + container_host = host.replace("localhost", "host.docker.internal") + return f"{scheme}://{container_host}/api/providers/oauth/internal/deliver" + + +# --------------------------------------------------------------------------- +# Core PKCE flow +# --------------------------------------------------------------------------- + + +async def _run_oauth_flow( + oauth_cfg: OAuthProviderConfig, + url_ready: "asyncio.Future[str]", + notify_url: str, +) -> OAuthToken: + """Run the PKCE OAuth flow using an ephemeral Docker callback container. + + 1. Generates a PKCE pair and state token. + 2. Tries to spawn a short-lived Docker container that binds the provider's + callback port (e.g. 1455) and relays the auth code back via *notify_url*. + 3. Falls back to ``_start_local_server`` when Docker is unavailable (local + dev without socket access). + 4. Resolves *url_ready* immediately so the endpoint can return the auth URL + to the frontend, then waits up to 5 minutes for the code to arrive. + """ + verifier, challenge = _generate_pkce() + state = _create_state() + + params = { + "response_type": "code", + "client_id": oauth_cfg.client_id, + "redirect_uri": oauth_cfg.redirect_uri, + "scope": oauth_cfg.scope, + "code_challenge": challenge, + "code_challenge_method": "S256", + "state": state, + "id_token_add_organizations": "true", + "codex_cli_simplified_flow": "true", + "originator": oauth_cfg.default_originator, + } + auth_url = f"{oauth_cfg.authorize_url}?{urllib.parse.urlencode(params)}" + + loop = asyncio.get_event_loop() + code_future: asyncio.Future[str] = loop.create_future() + _active_oauth_flows[state] = (code_future, verifier) + + container = None + local_server = None + + docker_client = await asyncio.to_thread(_get_docker_client) + if docker_client is not None: + container = await asyncio.to_thread( + _spawn_callback_container, + docker_client, + oauth_cfg.redirect_uri, + notify_url, + state, + ) + + if container is None: + # Fallback: start the local server directly on the callback port. + # Works when clawforce is run directly on the host (not in Docker). + def _on_code(code: str) -> None: + if not code_future.done(): + loop.call_soon_threadsafe(code_future.set_result, code) + + local_server, server_error = _start_local_server(state, on_code=_on_code) + if not local_server: + raise RuntimeError( + f"OAuth callback server could not start on port 1455: {server_error}. " + "Make sure the Docker socket is mounted (-v /var/run/docker.sock:/var/run/docker.sock) " + "or that port 1455 is not already in use." + ) + + if not url_ready.done(): + url_ready.set_result(auth_url) + + try: + code = await asyncio.wait_for(code_future, timeout=_OAUTH_CALLBACK_TIMEOUT) + except asyncio.TimeoutError: + raise RuntimeError( + f"OAuth timed out — no browser callback received within " + f"{int(_OAUTH_CALLBACK_TIMEOUT)}s." + ) + finally: + _active_oauth_flows.pop(state, None) + if container is not None: + await asyncio.to_thread(_stop_container, container) + if local_server is not None: + await asyncio.to_thread(local_server.shutdown) + local_server.server_close() + + token: OAuthToken = await _exchange_code_for_token_async(code, verifier, oauth_cfg)() + FileTokenStorage(token_filename=oauth_cfg.token_filename).save(token) + return token + + +class OAuthAuthorizeRequest(BaseModel): + agent_id: str = "" + + +@router.post("/api/providers/oauth/{provider}/authorize") +async def oauth_authorize( + provider: str, + body: OAuthAuthorizeRequest, + request: Request, + _: dict = Depends(get_current_user), + agent_config_store: AgentConfigStore = Depends(get_agent_config_store), +): + """Start an OAuth browser login flow for a provider. + + Spawns an ephemeral Docker container that binds the provider's callback port + (e.g. 1455) on the host, builds the authorization URL, and returns it + immediately so the frontend can open it in a new tab. Once the user + completes sign-in the callback container relays the code back, the token is + saved, and ``GET /api/providers/oauth/{provider}/status`` returns + ``{"authorized": true}``. + + Falls back to a local port-1455 server when the Docker socket is + unavailable (direct-run / local development without Docker). + """ + oauth_cfg = OAUTH_PROVIDER_CONFIGS.get(provider) + if not oauth_cfg: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Unknown OAuth provider: {provider}", + ) + + notify_url = _build_notify_url(request) + loop = asyncio.get_event_loop() + url_ready: asyncio.Future[str] = loop.create_future() + + agent_id = body.agent_id + + async def _bg() -> None: + try: + tok = await _run_oauth_flow(oauth_cfg, url_ready, notify_url=notify_url) + logger.info("OAuth [{}] authorized: {}", provider, tok.account_id) + + # Persist token JSON in the agent's config so inject_to_env() delivers + # it as CLAWFORCE_OPENAI_OAUTH_TOKEN — same pipeline as API keys. + if agent_id: + token_json = json.dumps( + { + "access": tok.access, + "refresh": tok.refresh, + "expires": tok.expires, + "account_id": tok.account_id, + } + ) + providers_update: dict = { + provider: {"api_key": token_json}, + } + # chatgpt and openai_codex share the same OpenAI login — store in both + sibling = {"chatgpt": "openai_codex", "openai_codex": "chatgpt"}.get(provider) + if sibling: + providers_update[sibling] = {"api_key": token_json} + agent_config_store.update_config(agent_id, {"providers": providers_update}) + logger.info("OAuth [{}] token saved to agent {}", provider, agent_id) + except Exception as exc: + logger.warning("OAuth [{}] error: {}", provider, exc) + if not url_ready.done(): + url_ready.set_exception(exc) + + asyncio.create_task(_bg()) + + try: + auth_url = await asyncio.wait_for(asyncio.shield(url_ready), timeout=15.0) + except asyncio.TimeoutError: + raise HTTPException( + status_code=status.HTTP_504_GATEWAY_TIMEOUT, + detail="Timed out starting OAuth flow (could not start callback container).", + ) + except Exception as exc: + raise HTTPException( + status_code=status.HTTP_502_BAD_GATEWAY, + detail=f"OAuth authorization failed: {str(exc)[:200]}", + ) + + return {"auth_url": auth_url} diff --git a/clawforce/oauth_callback_server.py b/clawforce/oauth_callback_server.py new file mode 100644 index 0000000..10e8f6d --- /dev/null +++ b/clawforce/oauth_callback_server.py @@ -0,0 +1,132 @@ +"""Minimal OAuth callback HTTP server. + +Run as an ephemeral Docker container by clawforce during an OAuth login flow:: + + docker run --rm -p 1455:1455 \\ + -e OAUTH_NOTIFY_URL=http://host.docker.internal:8080/api/providers/oauth/internal/deliver \\ + -e OAUTH_PORT=1455 \\ + clawforce:latest python -m clawforce.oauth_callback_server + +Environment variables +--------------------- +OAUTH_NOTIFY_URL + The clawforce endpoint to POST ``{"code": "...", "state": "..."}`` to once + the browser lands on ``/auth/callback``. +OAUTH_PORT + Port to listen on (default: 1455). +""" + +import http.client +import http.server +import json +import os +import urllib.parse + +NOTIFY_URL: str = os.environ.get("OAUTH_NOTIFY_URL", "") +PORT: int = int(os.environ.get("OAUTH_PORT", "1455")) + +_SUCCESS_HTML = b""" + + + + + Authorization successful + + + +
+
+

Authorization successful

+

You can close this tab and return to clawforce.

+ +
+ +""" + +_ERROR_HTML = b""" + + + + Authorization error + + + +
+

Authorization failed

+

The provider returned an error. Please close this tab and try again.

+
+ +""" + + +class _CallbackHandler(http.server.BaseHTTPRequestHandler): + def do_GET(self) -> None: # noqa: N802 + parsed = urllib.parse.urlparse(self.path) + if not parsed.path.startswith("/auth/callback"): + self.send_response(404) + self.end_headers() + return + + params = urllib.parse.parse_qs(parsed.query) + code = (params.get("code") or [""])[0] + state = (params.get("state") or [""])[0] + error = (params.get("error") or [""])[0] + + if error or not code: + self.send_response(200) + self.send_header("Content-Type", "text/html; charset=utf-8") + self.end_headers() + self.wfile.write(_ERROR_HTML) + return + + if code and state and NOTIFY_URL: + try: + parsed_notify = urllib.parse.urlparse(NOTIFY_URL) + host = parsed_notify.netloc + path = parsed_notify.path or "/" + payload = json.dumps({"code": code, "state": state}).encode() + conn = http.client.HTTPConnection(host, timeout=10) + conn.request( + "POST", + path, + body=payload, + headers={"Content-Type": "application/json"}, + ) + conn.getresponse() + except Exception: + pass + + self.send_response(200) + self.send_header("Content-Type", "text/html; charset=utf-8") + self.end_headers() + self.wfile.write(_SUCCESS_HTML) + + def log_message(self, *_args: object) -> None: + pass + + +def main() -> None: + server = http.server.HTTPServer(("0.0.0.0", PORT), _CallbackHandler) + server.serve_forever() + + +if __name__ == "__main__": + main() diff --git a/clawlib/config/schema.py b/clawlib/config/schema.py index a7bc81b..01e5b70 100644 --- a/clawlib/config/schema.py +++ b/clawlib/config/schema.py @@ -192,7 +192,6 @@ class ProvidersConfig(Base): groq: ProviderConfig = Field(default_factory=ProviderConfig) zhipu: ProviderConfig = Field(default_factory=ProviderConfig) dashscope: ProviderConfig = Field(default_factory=ProviderConfig) - vllm: ProviderConfig = Field(default_factory=ProviderConfig) gemini: ProviderConfig = Field(default_factory=ProviderConfig) moonshot: ProviderConfig = Field(default_factory=ProviderConfig) minimax: ProviderConfig = Field(default_factory=ProviderConfig) @@ -200,6 +199,7 @@ class ProvidersConfig(Base): siliconflow: ProviderConfig = Field(default_factory=ProviderConfig) openai_codex: ProviderConfig = Field(default_factory=ProviderConfig) github_copilot: ProviderConfig = Field(default_factory=ProviderConfig) + chatgpt: ProviderConfig = Field(default_factory=ProviderConfig) class GatewayConfig(Base): diff --git a/deploy/Dockerfile b/deploy/Dockerfile index 7d28e78..7200923 100644 --- a/deploy/Dockerfile +++ b/deploy/Dockerfile @@ -1,8 +1,11 @@ # All-in-one Clawforce image: SPA + API; same image is used as worker when using Docker runtime. # Build: docker build -t clawforce:latest -f deploy/Dockerfile . # -# Run (mount socket so the container can talk to the Docker daemon): +# Run (mount socket so the container can spawn agent and OAuth callback containers): # docker run -d -p 8080:8080 -v /var/run/docker.sock:/var/run/docker.sock -v clawforce-data:/data clawforce:latest +# +# OAuth callback ports (e.g. 1455 for ChatGPT/Codex) are handled by short-lived sibling containers +# spawned on demand — no extra -p flag needed in this run command. # Stage 1: Build React SPA (cache-friendly: deps first, then source) FROM node:20-slim AS frontend @@ -64,6 +67,9 @@ RUN chmod +x /app/deploy/entrypoint.sh # Runtime config ENV ADMIN_STORAGE_ROOT=/data ENV ADMIN_RUNTIME_BACKEND=docker +# Redirect XDG user-data dir under /data so OAuth tokens (and any other +# platformdirs-managed state) survive container recreation. +ENV XDG_DATA_HOME=/data/.local/share # Default admin credentials for first-run setup (set in entrypoint to avoid ENV secret warning). # Override for production: docker run -e ADMIN_SETUP_USERNAME=admin -e ADMIN_SETUP_PASSWORD=your-secure-password ... diff --git a/tests/test_clawlib_config.py b/tests/test_clawlib_config.py index 0975942..6abd850 100644 --- a/tests/test_clawlib_config.py +++ b/tests/test_clawlib_config.py @@ -190,7 +190,6 @@ def test_providers_config_all_providers(self): assert isinstance(cfg.gemini, ProviderConfig) assert isinstance(cfg.zhipu, ProviderConfig) assert isinstance(cfg.dashscope, ProviderConfig) - assert isinstance(cfg.vllm, ProviderConfig) assert isinstance(cfg.moonshot, ProviderConfig) assert isinstance(cfg.minimax, ProviderConfig) assert isinstance(cfg.aihubmix, ProviderConfig)