Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
74 changes: 74 additions & 0 deletions examples/01_standalone_sdk/26_enterprise_gateway.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
#!/usr/bin/env python3
"""
Enterprise Gateway Example

Demonstrates configuring OpenHands for environments that route LLM traffic through
an API gateway requiring custom headers and optional TLS overrides.
"""

import os
import uuid
from datetime import datetime

from pydantic import SecretStr

from openhands.sdk import Conversation
from openhands.sdk.llm import LLM
from openhands.tools.preset.default import get_default_agent


def build_gateway_llm() -> LLM:
"""Create an LLM instance configured for an enterprise gateway."""
now = datetime.utcnow()
correlation_id = uuid.uuid4().hex
request_id = uuid.uuid4().hex

ssl_env = os.getenv("LLM_SSL_VERIFY")
ssl_verify: bool | str = ssl_env if ssl_env is not None else False

return LLM(
model=os.getenv("LLM_MODEL", "gemini-2.5-flash"),
base_url=os.getenv(
"LLM_BASE_URL", "https://your-corporate-proxy.company.com/api/llm"
),
# an api_key input is always required but is unused when api keys
# are passed via extra headers
api_key=SecretStr(os.getenv("LLM_API_KEY", "placeholder")),
custom_llm_provider=os.getenv("LLM_CUSTOM_LLM_PROVIDER", "openai"),
ssl_verify=ssl_verify,
extra_headers={
# Typical headers forwarded by gateways
"Authorization": os.getenv("LLM_GATEWAY_TOKEN", "Bearer YOUR_TOKEN"),
"Content-Type": "application/json",
"x-correlation-id": correlation_id,
"x-request-id": request_id,
"x-request-date": now.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3],
"x-client-id": os.getenv("LLM_CLIENT_ID", "YOUR_CLIENT_ID"),
"X-USECASE-ID": os.getenv("LLM_USECASE_ID", "YOUR_USECASE_ID"),
"x-api-key": os.getenv("LLM_GATEWAY_API_KEY", "YOUR_API_KEY"),
},
# additional optional parameters
timeout=30,
num_retries=1,
)


if __name__ == "__main__":
print("=== Enterprise Gateway Configuration Example ===")

# Build LLM with enterprise gateway configuration
llm = build_gateway_llm()

# Create agent and conversation
agent = get_default_agent(llm=llm, cli_mode=True)
conversation = Conversation(
agent=agent,
workspace=os.getcwd(),
)

# Send a message to test the enterprise gateway configuration
conversation.send_message(
"Analyze this codebase and create 3 facts about the current "
"project into FACTS.txt. Do not write code."
)
conversation.run()
48 changes: 37 additions & 11 deletions openhands-sdk/openhands/sdk/llm/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,14 @@ class LLM(BaseModel, RetryMixin, NonNativeToolCallingMixin):
)
ollama_base_url: str | None = Field(default=None)

ssl_verify: bool | str | None = Field(
default=None,
description=(
"TLS verification forwarded to LiteLLM; "
"set to False when corporate proxies break certificate chains."
),
)

drop_params: bool = Field(default=True)
modify_params: bool = Field(
default=True,
Expand Down Expand Up @@ -512,7 +520,7 @@ def completion(
log_ctx = {
"messages": formatted_messages[:], # already simple dicts
"tools": tools,
"kwargs": {k: v for k, v in call_kwargs.items()},
"kwargs": dict(call_kwargs),
"context_window": self.max_input_tokens or 0,
}
if tools and not use_native_fc:
Expand Down Expand Up @@ -629,7 +637,7 @@ def responses(
"llm_path": "responses",
"input": input_items[:],
"tools": tools,
"kwargs": {k: v for k, v in call_kwargs.items()},
"kwargs": dict(call_kwargs),
"context_window": self.max_input_tokens or 0,
}
self._telemetry.on_request(log_ctx=log_ctx)
Expand Down Expand Up @@ -665,7 +673,9 @@ def _one_attempt(**retry_kwargs) -> ResponsesAPIResponse:
api_key=api_key_value,
api_base=self.base_url,
api_version=self.api_version,
custom_llm_provider=self.custom_llm_provider,
timeout=self.timeout,
ssl_verify=self.ssl_verify,
drop_params=self.drop_params,
seed=self.seed,
**final_kwargs,
Expand Down Expand Up @@ -742,7 +752,9 @@ def _transport_call(
api_key=api_key_value,
api_base=self.base_url,
api_version=self.api_version,
custom_llm_provider=self.custom_llm_provider,
timeout=self.timeout,
ssl_verify=self.ssl_verify,
drop_params=self.drop_params,
seed=self.seed,
messages=messages,
Expand Down Expand Up @@ -1027,6 +1039,7 @@ def load_from_json(cls, json_path: str) -> LLM:
@classmethod
def load_from_env(cls, prefix: str = "LLM_") -> LLM:
TRUTHY = {"true", "1", "yes", "on"}
FALSY = {"false", "0", "no", "off"}

def _unwrap_type(t: Any) -> Any:
origin = get_origin(t)
Expand All @@ -1035,31 +1048,44 @@ def _unwrap_type(t: Any) -> Any:
args = [a for a in get_args(t) if a is not type(None)]
return args[0] if args else t

def _cast_value(raw: str, t: Any) -> Any:
t = _unwrap_type(t)
def _cast_value(field_name: str, raw: str, annotation: Any) -> Any:
stripped = raw.strip()
lowered = stripped.lower()
if field_name == "ssl_verify":
if lowered in TRUTHY:
return True
if lowered in FALSY:
return False
return stripped

t = _unwrap_type(annotation)
if t is SecretStr:
return SecretStr(raw)
return SecretStr(stripped)
if t is bool:
return raw.lower() in TRUTHY
if lowered in TRUTHY:
return True
if lowered in FALSY:
return False
return stripped.lower() in TRUTHY
if t is int:
try:
return int(raw)
return int(stripped)
except ValueError:
return None
if t is float:
try:
return float(raw)
return float(stripped)
except ValueError:
return None
origin = get_origin(t)
if (origin in (list, dict, tuple)) or (
isinstance(t, type) and issubclass(t, BaseModel)
):
try:
return json.loads(raw)
return json.loads(stripped)
except Exception:
pass
return raw
return stripped

data: dict[str, Any] = {}
fields: dict[str, Any] = {
Expand All @@ -1074,7 +1100,7 @@ def _cast_value(raw: str, t: Any) -> Any:
field_name = key[len(prefix) :].lower()
if field_name not in fields:
continue
v = _cast_value(value, fields[field_name])
v = _cast_value(field_name, value, fields[field_name])
if v is not None:
data[field_name] = v
return cls(**data)
Expand Down
72 changes: 71 additions & 1 deletion openhands-sdk/openhands/sdk/llm/utils/telemetry.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,28 @@

logger = get_logger(__name__)

_SENSITIVE_HEADER_NAMES = {
"authorization",
"proxy-authorization",
"proxy_authorization",
"cookie",
"set-cookie",
"set_cookie",
}
_SENSITIVE_HEADER_KEYWORDS = (
"api-key",
"api_key",
"access-token",
"access_token",
"auth-token",
"auth_token",
"secret",
"x-api-key",
"x-api-token",
"x-auth-token",
)
_MASK = "***"


class Telemetry(BaseModel):
"""
Expand Down Expand Up @@ -234,7 +256,7 @@ def log_llm_call(
f"{uuid.uuid4().hex[:4]}.json"
),
)
data = self._req_ctx.copy()
data = _sanitize_log_ctx(self._req_ctx)
data["response"] = (
resp # ModelResponse | ResponsesAPIResponse;
# serialized via _safe_json
Expand Down Expand Up @@ -303,6 +325,54 @@ def log_llm_call(
warnings.warn(f"Telemetry logging failed: {e}")


def _sanitize_log_ctx(ctx: dict[str, Any] | None) -> dict[str, Any]:
if not isinstance(ctx, dict):
return {}
sanitized: dict[str, Any] = {}
for key, value in ctx.items():
if key == "kwargs" and isinstance(value, dict):
sanitized["kwargs"] = _sanitize_kwargs(value)
elif key == "extra_headers" and isinstance(value, dict):
sanitized["extra_headers"] = _sanitize_headers(value)
else:
sanitized[key] = value
return sanitized


def _sanitize_kwargs(kwargs: dict[str, Any]) -> dict[str, Any]:
sanitized = dict(kwargs)
extra_headers = sanitized.get("extra_headers")
if isinstance(extra_headers, dict):
sanitized["extra_headers"] = _sanitize_headers(extra_headers)
return sanitized


def _sanitize_headers(headers: dict[str, Any]) -> dict[str, Any]:
sanitized: dict[str, Any] = {}
for key, value in headers.items():
sanitized[key] = _mask_header_value(key, value)
return sanitized


def _mask_header_value(key: Any, value: Any) -> Any:
if not isinstance(key, str):
return value
if _is_sensitive_header(key):
return _mask_value(value)
return value


def _is_sensitive_header(name: str) -> bool:
lowered = name.lower()
if lowered in _SENSITIVE_HEADER_NAMES:
return True
return any(keyword in lowered for keyword in _SENSITIVE_HEADER_KEYWORDS)


def _mask_value(_value: Any) -> str:
return _MASK


def _safe_json(obj: Any) -> Any:
# Centralized serializer for telemetry logs.
# Prefer robust serialization for Pydantic models first to avoid cycles.
Expand Down
Loading
Loading