Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions clawbot/agent/runtime.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

from clawbot.core.config.loader import load_config
from clawbot.core.config.schema import Config
from clawbot.providers.chatgpt_provider import ChatGPTProvider
from clawbot.providers.custom_provider import CustomProvider
from clawbot.providers.litellm_provider import LiteLLMProvider
from clawbot.providers.openai_codex_provider import OpenAICodexProvider
Expand All @@ -29,6 +30,9 @@ def make_provider(config: Config) -> Any:
if provider_name == "openai_codex" or (model or "").startswith("openai-codex/"):
return OpenAICodexProvider(default_model=model)

if provider_name == "chatgpt" or (model or "").startswith("chatgpt/"):
return ChatGPTProvider(default_model=model)

if provider_name == "custom":
return CustomProvider(
api_key=p.api_key if p else "no-key",
Expand Down
95 changes: 95 additions & 0 deletions clawbot/providers/chatgpt_provider.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
"""ChatGPT Plus Provider — OAuth-based, same login as OpenAI Codex."""

from __future__ import annotations

import asyncio

from loguru import logger

from clawbot.providers.base import LLMResponse
from clawbot.providers.openai_codex_provider import (
OpenAICodexProvider,
_build_headers,
_convert_messages,
_convert_tools,
_get_agent_oauth_token,
_request_codex,
)

DEFAULT_CHATGPT_URL = "https://chatgpt.com/backend-api/codex/responses"
DEFAULT_MODEL = "chatgpt/gpt-4o"


class ChatGPTProvider(OpenAICodexProvider):
"""ChatGPT Plus provider.

Uses the same OpenAI OAuth login as OpenAI Codex (shared ``codex.json``
token file). Strips the ``chatgpt/`` model prefix before sending the
request so the bare model name (e.g. ``gpt-4o``) reaches the API.
"""

def __init__(self, default_model: str = DEFAULT_MODEL):
super().__init__(default_model=default_model)

async def chat(
self,
messages: list,
tools: list | None = None,
model: str | None = None,
max_tokens: int = 4096,
temperature: float = 0.7,
):
model = model or self.default_model
system_prompt, input_items = _convert_messages(messages)

token = await asyncio.to_thread(_get_agent_oauth_token, "CHATGPT_OAUTH_TOKEN")
headers = _build_headers(token.account_id, token.access)

body = {
"model": _strip_chatgpt_prefix(model),
"store": False,
"stream": True,
"instructions": system_prompt,
"input": input_items,
"text": {"verbosity": "medium"},
"include": ["reasoning.encrypted_content"],
"tool_choice": "auto",
"parallel_tool_calls": True,
}

if tools:
body["tools"] = _convert_tools(tools)

try:
try:
content, tool_calls, finish_reason = await _request_codex(
DEFAULT_CHATGPT_URL, headers, body, verify=True
)
except Exception as e:
if "CERTIFICATE_VERIFY_FAILED" not in str(e):
raise
logger.warning(
"SSL certificate verification failed for ChatGPT API; retrying with verify=False"
)
content, tool_calls, finish_reason = await _request_codex(
DEFAULT_CHATGPT_URL, headers, body, verify=False
)
return LLMResponse(
content=content,
tool_calls=tool_calls,
finish_reason=finish_reason,
)
except Exception as e:
return LLMResponse(
content=f"Error calling ChatGPT: {str(e)}",
finish_reason="error",
)

def get_default_model(self) -> str:
return self.default_model


def _strip_chatgpt_prefix(model: str) -> str:
if model.startswith("chatgpt/"):
return model.split("/", 1)[1]
return model
33 changes: 32 additions & 1 deletion clawbot/providers/openai_codex_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,49 @@
import asyncio
import hashlib
import json
import os
import time
from typing import Any, AsyncGenerator

import httpx
from loguru import logger
from oauth_cli_kit import OPENAI_CODEX_PROVIDER, OAuthToken
from oauth_cli_kit import get_token as get_codex_token
from oauth_cli_kit.flow import _refresh_token

from clawbot.providers.base import LLMProvider, LLMResponse, ToolCallRequest

DEFAULT_CODEX_URL = "https://chatgpt.com/backend-api/codex/responses"
DEFAULT_ORIGINATOR = "clawbot"


def _get_agent_oauth_token(env_var: str = "OPENAI_CODEX_OAUTH_TOKEN") -> OAuthToken:
"""Return an OAuth token for OpenAI, preferring the per-agent env var.

Agent containers receive the token as a JSON string injected from the
agent's stored provider config under the env var declared in the registry
(e.g. ``OPENAI_CODEX_OAUTH_TOKEN``, ``CHATGPT_OAUTH_TOKEN``), exactly like
any other API key. Falls back to the global FileTokenStorage for local runs.
"""
env_val = os.environ.get(env_var)
if env_val:
try:
data = json.loads(env_val)
token = OAuthToken(
access=data["access"],
refresh=data["refresh"],
expires=int(data["expires"]),
account_id=data.get("account_id"),
)
now_ms = int(time.time() * 1000)
if token.expires - now_ms <= 60 * 1000:
token = _refresh_token(token.refresh, OPENAI_CODEX_PROVIDER)
return token
except Exception:
pass
return get_codex_token()


class OpenAICodexProvider(LLMProvider):
"""Use Codex OAuth to call the Responses API."""

Expand All @@ -35,7 +66,7 @@ async def chat(
model = model or self.default_model
system_prompt, input_items = _convert_messages(messages)

token = await asyncio.to_thread(get_codex_token)
token = await asyncio.to_thread(_get_agent_oauth_token)
headers = _build_headers(token.account_id, token.access)

body: dict[str, Any] = {
Expand Down
20 changes: 19 additions & 1 deletion clawbot/providers/registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ def label(self) -> str:
ProviderSpec(
name="openai_codex",
keywords=("openai-codex", "codex"),
env_key="", # OAuth-based, no API key
env_key="OPENAI_CODEX_OAUTH_TOKEN",
display_name="OpenAI Codex",
litellm_prefix="", # Not routed through LiteLLM
skip_prefixes=(),
Expand All @@ -190,6 +190,24 @@ def label(self) -> str:
model_overrides=(),
is_oauth=True, # OAuth-based authentication
),
# ChatGPT Plus: uses OAuth (same OpenAI login as Codex), not API key.
ProviderSpec(
name="chatgpt",
keywords=("chatgpt",),
env_key="CHATGPT_OAUTH_TOKEN",
display_name="ChatGPT Plus",
litellm_prefix="", # Not routed through LiteLLM
skip_prefixes=(),
env_extras=(),
is_gateway=False,
is_local=False,
detect_by_key_prefix="",
detect_by_base_keyword="",
default_api_base="https://chatgpt.com/backend-api",
strip_model_prefix=False,
model_overrides=(),
is_oauth=True, # OAuth-based authentication (shared with OpenAI Codex)
),
# Github Copilot: uses OAuth, not API key.
ProviderSpec(
name="github_copilot",
Expand Down
8 changes: 6 additions & 2 deletions clawforce-ui/src/components/agent-detail/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,10 @@ export const CHANNEL_DEFS: { key: string; label: string; icon: React.ReactNode;
];

export const PROVIDER_DEFS: ProviderDef[] = [
// Subscription-based (OAuth) — no API key required
{ field: "chatgpt", label: "ChatGPT Plus", keywords: ["chatgpt"], oauth: true },
{ field: "openai_codex", label: "OpenAI Codex", keywords: ["openai-codex", "codex"], oauth: true },
// API key / token providers
{ field: "anthropic", label: "Anthropic", keywords: ["anthropic", "claude"] },
{ field: "openai", label: "OpenAI", keywords: ["openai", "gpt", "o1", "o3", "o4"] },
{ field: "openrouter", label: "OpenRouter", keywords: ["openrouter"] },
Expand All @@ -139,15 +143,15 @@ export const PROVIDER_DEFS: ProviderDef[] = [
{ field: "together", label: "Together AI", keywords: ["together"] },
{ field: "bedrock", label: "AWS Bedrock", keywords: ["bedrock"] },
{ field: "azure", label: "Azure OpenAI", keywords: ["azure"] },
// GitHub Copilot — paste token from `gh auth token` or VS Code Copilot extension
{ field: "github_copilot", label: "GitHub Copilot", keywords: ["github_copilot", "copilot"] },
{ field: "moonshot", label: "Moonshot / Kimi", keywords: ["moonshot", "kimi"] },
{ field: "dashscope", label: "DashScope / Qwen", keywords: ["dashscope", "qwen"] },
{ field: "zhipu", label: "Zhipu AI", keywords: ["zhipu", "glm"] },
{ field: "minimax", label: "MiniMax", keywords: ["minimax"] },
{ field: "aihubmix", label: "AiHubMix", keywords: ["aihubmix"] },
{ field: "siliconflow", label: "SiliconFlow", keywords: ["siliconflow"] },
{ field: "vllm", label: "vLLM / Local", keywords: ["vllm"] },
{ field: "openai_codex", label: "OpenAI Codex", keywords: ["openai-codex", "codex"], oauth: true },
{ field: "github_copilot", label: "GitHub Copilot", keywords: ["github_copilot", "copilot"], oauth: true },
];

export const SECURITY_PRESETS = {
Expand Down
Loading
Loading