diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index fd0ccba..000572e 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "0.1.0-alpha.12"
+ ".": "0.1.0-alpha.13"
}
\ No newline at end of file
diff --git a/.stats.yml b/.stats.yml
index 1eef7bb..5aeba9e 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,4 +1,4 @@
-configured_endpoints: 20
-openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/opencode%2Fopencode-15eeb028f79b9a065b4e54a6ea6a58631e9bd5004f97820f0c79d18e3f8bac84.yml
-openapi_spec_hash: 38c8bacb6c8e4c46852a3e81e3fb9fda
-config_hash: 348a85e725de595ca05a61f4333794ac
+configured_endpoints: 22
+openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/opencode%2Fopencode-05150c78e0e6e97b0ce97ed685ebcf1cb01dc839beccb99e9d3ead5b783cfd47.yml
+openapi_spec_hash: 833a5b6d53d98dc2beac2c4c394b20d5
+config_hash: 3695cfc829cfaae14490850b4a1ed282
diff --git a/CHANGELOG.md b/CHANGELOG.md
index b5ab3d2..0b7184c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,13 @@
# Changelog
+## 0.1.0-alpha.13 (2025-07-15)
+
+Full Changelog: [v0.1.0-alpha.12...v0.1.0-alpha.13](https://github.com/sst/opencode-sdk-python/compare/v0.1.0-alpha.12...v0.1.0-alpha.13)
+
+### Features
+
+* **api:** api update ([a51d627](https://github.com/sst/opencode-sdk-python/commit/a51d627f3a39324ca769a688b63c95dc8f5eba35))
+
## 0.1.0-alpha.12 (2025-07-12)
Full Changelog: [v0.1.0-alpha.11...v0.1.0-alpha.12](https://github.com/sst/opencode-sdk-python/compare/v0.1.0-alpha.11...v0.1.0-alpha.12)
diff --git a/api.md b/api.md
index 5d726d0..c96e445 100644
--- a/api.md
+++ b/api.md
@@ -1,7 +1,7 @@
# Shared Types
```python
-from opencode_ai.types import ProviderAuthError, UnknownError
+from opencode_ai.types import MessageAbortedError, ProviderAuthError, UnknownError
```
# Event
@@ -21,20 +21,28 @@ Methods:
Types:
```python
-from opencode_ai.types import App, AppInitResponse
+from opencode_ai.types import App, LogLevel, Mode, AppInitResponse, AppLogResponse, AppModesResponse
```
Methods:
- client.app.get() -> App
- client.app.init() -> AppInitResponse
+- client.app.log(\*\*params) -> AppLogResponse
+- client.app.modes() -> AppModesResponse
# Find
Types:
```python
-from opencode_ai.types import FindFilesResponse, FindSymbolsResponse, FindTextResponse
+from opencode_ai.types import (
+ Match,
+ Symbol,
+ FindFilesResponse,
+ FindSymbolsResponse,
+ FindTextResponse,
+)
```
Methods:
@@ -48,7 +56,7 @@ Methods:
Types:
```python
-from opencode_ai.types import FileReadResponse, FileStatusResponse
+from opencode_ai.types import File, FileReadResponse, FileStatusResponse
```
Methods:
@@ -84,10 +92,11 @@ Types:
```python
from opencode_ai.types import (
AssistantMessage,
- AssistantMessagePart,
FilePart,
Message,
+ Part,
Session,
+ StepFinishPart,
StepStartPart,
TextPart,
ToolPart,
@@ -95,7 +104,7 @@ from opencode_ai.types import (
ToolStateError,
ToolStatePending,
ToolStateRunning,
- UserMessagePart,
+ UserMessage,
SessionListResponse,
SessionDeleteResponse,
SessionAbortResponse,
diff --git a/pyproject.toml b/pyproject.toml
index 30dbe57..3f60e89 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "opencode-ai"
-version = "0.1.0-alpha.12"
+version = "0.1.0-alpha.13"
description = "The official Python library for the opencode API"
dynamic = ["readme"]
license = "Apache-2.0"
diff --git a/src/opencode_ai/_version.py b/src/opencode_ai/_version.py
index 5f662ba..974d48d 100644
--- a/src/opencode_ai/_version.py
+++ b/src/opencode_ai/_version.py
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
__title__ = "opencode_ai"
-__version__ = "0.1.0-alpha.12" # x-release-please-version
+__version__ = "0.1.0-alpha.13" # x-release-please-version
diff --git a/src/opencode_ai/resources/app.py b/src/opencode_ai/resources/app.py
index 819cb8c..bbe2789 100644
--- a/src/opencode_ai/resources/app.py
+++ b/src/opencode_ai/resources/app.py
@@ -2,9 +2,14 @@
from __future__ import annotations
+from typing import Dict
+from typing_extensions import Literal
+
import httpx
+from ..types import app_log_params
from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from .._utils import maybe_transform, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
from .._response import (
@@ -15,7 +20,9 @@
)
from ..types.app import App
from .._base_client import make_request_options
+from ..types.app_log_response import AppLogResponse
from ..types.app_init_response import AppInitResponse
+from ..types.app_modes_response import AppModesResponse
__all__ = ["AppResource", "AsyncAppResource"]
@@ -78,6 +85,76 @@ def init(
cast_to=AppInitResponse,
)
+ def log(
+ self,
+ *,
+ level: Literal["debug", "info", "error", "warn"],
+ message: str,
+ service: str,
+ extra: Dict[str, object] | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> AppLogResponse:
+ """
+ Write a log entry to the server logs
+
+ Args:
+ level: Log level
+
+ message: Log message
+
+ service: Service name for the log entry
+
+ extra: Additional metadata for the log entry
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._post(
+ "/log",
+ body=maybe_transform(
+ {
+ "level": level,
+ "message": message,
+ "service": service,
+ "extra": extra,
+ },
+ app_log_params.AppLogParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=AppLogResponse,
+ )
+
+ def modes(
+ self,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> AppModesResponse:
+ """List all modes"""
+ return self._get(
+ "/mode",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=AppModesResponse,
+ )
+
class AsyncAppResource(AsyncAPIResource):
@cached_property
@@ -137,6 +214,76 @@ async def init(
cast_to=AppInitResponse,
)
+ async def log(
+ self,
+ *,
+ level: Literal["debug", "info", "error", "warn"],
+ message: str,
+ service: str,
+ extra: Dict[str, object] | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> AppLogResponse:
+ """
+ Write a log entry to the server logs
+
+ Args:
+ level: Log level
+
+ message: Log message
+
+ service: Service name for the log entry
+
+ extra: Additional metadata for the log entry
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return await self._post(
+ "/log",
+ body=await async_maybe_transform(
+ {
+ "level": level,
+ "message": message,
+ "service": service,
+ "extra": extra,
+ },
+ app_log_params.AppLogParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=AppLogResponse,
+ )
+
+ async def modes(
+ self,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> AppModesResponse:
+ """List all modes"""
+ return await self._get(
+ "/mode",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=AppModesResponse,
+ )
+
class AppResourceWithRawResponse:
def __init__(self, app: AppResource) -> None:
@@ -148,6 +295,12 @@ def __init__(self, app: AppResource) -> None:
self.init = to_raw_response_wrapper(
app.init,
)
+ self.log = to_raw_response_wrapper(
+ app.log,
+ )
+ self.modes = to_raw_response_wrapper(
+ app.modes,
+ )
class AsyncAppResourceWithRawResponse:
@@ -160,6 +313,12 @@ def __init__(self, app: AsyncAppResource) -> None:
self.init = async_to_raw_response_wrapper(
app.init,
)
+ self.log = async_to_raw_response_wrapper(
+ app.log,
+ )
+ self.modes = async_to_raw_response_wrapper(
+ app.modes,
+ )
class AppResourceWithStreamingResponse:
@@ -172,6 +331,12 @@ def __init__(self, app: AppResource) -> None:
self.init = to_streamed_response_wrapper(
app.init,
)
+ self.log = to_streamed_response_wrapper(
+ app.log,
+ )
+ self.modes = to_streamed_response_wrapper(
+ app.modes,
+ )
class AsyncAppResourceWithStreamingResponse:
@@ -184,3 +349,9 @@ def __init__(self, app: AsyncAppResource) -> None:
self.init = async_to_streamed_response_wrapper(
app.init,
)
+ self.log = async_to_streamed_response_wrapper(
+ app.log,
+ )
+ self.modes = async_to_streamed_response_wrapper(
+ app.modes,
+ )
diff --git a/src/opencode_ai/resources/session.py b/src/opencode_ai/resources/session.py
index 5c6286d..087f3b4 100644
--- a/src/opencode_ai/resources/session.py
+++ b/src/opencode_ai/resources/session.py
@@ -24,7 +24,6 @@
from ..types.session_list_response import SessionListResponse
from ..types.session_abort_response import SessionAbortResponse
from ..types.session_delete_response import SessionDeleteResponse
-from ..types.user_message_part_param import UserMessagePartParam
from ..types.session_messages_response import SessionMessagesResponse
from ..types.session_summarize_response import SessionSummarizeResponse
@@ -159,8 +158,10 @@ def chat(
self,
id: str,
*,
+ message_id: str,
+ mode: str,
model_id: str,
- parts: Iterable[UserMessagePartParam],
+ parts: Iterable[session_chat_params.Part],
provider_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
@@ -189,6 +190,8 @@ def chat(
f"/session/{id}/message",
body=maybe_transform(
{
+ "message_id": message_id,
+ "mode": mode,
"model_id": model_id,
"parts": parts,
"provider_id": provider_id,
@@ -205,6 +208,7 @@ def init(
self,
id: str,
*,
+ message_id: str,
model_id: str,
provider_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
@@ -234,6 +238,7 @@ def init(
f"/session/{id}/init",
body=maybe_transform(
{
+ "message_id": message_id,
"model_id": model_id,
"provider_id": provider_id,
},
@@ -519,8 +524,10 @@ async def chat(
self,
id: str,
*,
+ message_id: str,
+ mode: str,
model_id: str,
- parts: Iterable[UserMessagePartParam],
+ parts: Iterable[session_chat_params.Part],
provider_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
@@ -549,6 +556,8 @@ async def chat(
f"/session/{id}/message",
body=await async_maybe_transform(
{
+ "message_id": message_id,
+ "mode": mode,
"model_id": model_id,
"parts": parts,
"provider_id": provider_id,
@@ -565,6 +574,7 @@ async def init(
self,
id: str,
*,
+ message_id: str,
model_id: str,
provider_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
@@ -594,6 +604,7 @@ async def init(
f"/session/{id}/init",
body=await async_maybe_transform(
{
+ "message_id": message_id,
"model_id": model_id,
"provider_id": provider_id,
},
diff --git a/src/opencode_ai/types/__init__.py b/src/opencode_ai/types/__init__.py
index 6c2ca47..4d6528d 100644
--- a/src/opencode_ai/types/__init__.py
+++ b/src/opencode_ai/types/__init__.py
@@ -3,28 +3,42 @@
from __future__ import annotations
from .app import App as App
+from .file import File as File
+from .mode import Mode as Mode
+from .part import Part as Part
+from .match import Match as Match
from .model import Model as Model
from .config import Config as Config
-from .shared import UnknownError as UnknownError, ProviderAuthError as ProviderAuthError
+from .shared import (
+ UnknownError as UnknownError,
+ ProviderAuthError as ProviderAuthError,
+ MessageAbortedError as MessageAbortedError,
+)
+from .symbol import Symbol as Symbol
from .message import Message as Message
from .session import Session as Session
from .keybinds import Keybinds as Keybinds
from .provider import Provider as Provider
from .file_part import FilePart as FilePart
+from .log_level import LogLevel as LogLevel
from .mcp_local import McpLocal as McpLocal
from .text_part import TextPart as TextPart
from .tool_part import ToolPart as ToolPart
from .mcp_remote import McpRemote as McpRemote
+from .user_message import UserMessage as UserMessage
+from .app_log_params import AppLogParams as AppLogParams
from .file_part_param import FilePartParam as FilePartParam
from .step_start_part import StepStartPart as StepStartPart
from .text_part_param import TextPartParam as TextPartParam
+from .app_log_response import AppLogResponse as AppLogResponse
from .file_read_params import FileReadParams as FileReadParams
from .find_text_params import FindTextParams as FindTextParams
+from .step_finish_part import StepFinishPart as StepFinishPart
from .tool_state_error import ToolStateError as ToolStateError
from .app_init_response import AppInitResponse as AppInitResponse
from .assistant_message import AssistantMessage as AssistantMessage
from .find_files_params import FindFilesParams as FindFilesParams
-from .user_message_part import UserMessagePart as UserMessagePart
+from .app_modes_response import AppModesResponse as AppModesResponse
from .file_read_response import FileReadResponse as FileReadResponse
from .find_text_response import FindTextResponse as FindTextResponse
from .tool_state_pending import ToolStatePending as ToolStatePending
@@ -39,10 +53,8 @@
from .find_symbols_response import FindSymbolsResponse as FindSymbolsResponse
from .session_init_response import SessionInitResponse as SessionInitResponse
from .session_list_response import SessionListResponse as SessionListResponse
-from .assistant_message_part import AssistantMessagePart as AssistantMessagePart
from .session_abort_response import SessionAbortResponse as SessionAbortResponse
from .session_delete_response import SessionDeleteResponse as SessionDeleteResponse
-from .user_message_part_param import UserMessagePartParam as UserMessagePartParam
from .session_summarize_params import SessionSummarizeParams as SessionSummarizeParams
from .config_providers_response import ConfigProvidersResponse as ConfigProvidersResponse
from .session_messages_response import SessionMessagesResponse as SessionMessagesResponse
diff --git a/src/opencode_ai/types/app.py b/src/opencode_ai/types/app.py
index de3df28..d60c600 100644
--- a/src/opencode_ai/types/app.py
+++ b/src/opencode_ai/types/app.py
@@ -31,5 +31,3 @@ class App(BaseModel):
path: Path
time: Time
-
- user: str
diff --git a/src/opencode_ai/types/app_log_params.py b/src/opencode_ai/types/app_log_params.py
new file mode 100644
index 0000000..8b24c11
--- /dev/null
+++ b/src/opencode_ai/types/app_log_params.py
@@ -0,0 +1,22 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Dict
+from typing_extensions import Literal, Required, TypedDict
+
+__all__ = ["AppLogParams"]
+
+
+class AppLogParams(TypedDict, total=False):
+ level: Required[Literal["debug", "info", "error", "warn"]]
+ """Log level"""
+
+ message: Required[str]
+ """Log message"""
+
+ service: Required[str]
+ """Service name for the log entry"""
+
+ extra: Dict[str, object]
+ """Additional metadata for the log entry"""
diff --git a/src/opencode_ai/types/app_log_response.py b/src/opencode_ai/types/app_log_response.py
new file mode 100644
index 0000000..f56ed8c
--- /dev/null
+++ b/src/opencode_ai/types/app_log_response.py
@@ -0,0 +1,7 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing_extensions import TypeAlias
+
+__all__ = ["AppLogResponse"]
+
+AppLogResponse: TypeAlias = bool
diff --git a/src/opencode_ai/types/app_modes_response.py b/src/opencode_ai/types/app_modes_response.py
new file mode 100644
index 0000000..8d76f89
--- /dev/null
+++ b/src/opencode_ai/types/app_modes_response.py
@@ -0,0 +1,10 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import List
+from typing_extensions import TypeAlias
+
+from .mode import Mode
+
+__all__ = ["AppModesResponse"]
+
+AppModesResponse: TypeAlias = List[Mode]
diff --git a/src/opencode_ai/types/assistant_message.py b/src/opencode_ai/types/assistant_message.py
index a391913..c6b906e 100644
--- a/src/opencode_ai/types/assistant_message.py
+++ b/src/opencode_ai/types/assistant_message.py
@@ -8,8 +8,8 @@
from .._utils import PropertyInfo
from .._models import BaseModel
from .shared.unknown_error import UnknownError
-from .assistant_message_part import AssistantMessagePart
from .shared.provider_auth_error import ProviderAuthError
+from .shared.message_aborted_error import MessageAbortedError
__all__ = ["AssistantMessage", "Path", "Time", "Tokens", "TokensCache", "Error", "ErrorMessageOutputLengthError"]
@@ -49,7 +49,8 @@ class ErrorMessageOutputLengthError(BaseModel):
Error: TypeAlias = Annotated[
- Union[ProviderAuthError, UnknownError, ErrorMessageOutputLengthError], PropertyInfo(discriminator="name")
+ Union[ProviderAuthError, UnknownError, ErrorMessageOutputLengthError, MessageAbortedError],
+ PropertyInfo(discriminator="name"),
]
@@ -60,8 +61,6 @@ class AssistantMessage(BaseModel):
api_model_id: str = FieldInfo(alias="modelID")
- parts: List[AssistantMessagePart]
-
path: Path
provider_id: str = FieldInfo(alias="providerID")
diff --git a/src/opencode_ai/types/assistant_message_part.py b/src/opencode_ai/types/assistant_message_part.py
deleted file mode 100644
index 5ebccf4..0000000
--- a/src/opencode_ai/types/assistant_message_part.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing import Union
-from typing_extensions import Annotated, TypeAlias
-
-from .._utils import PropertyInfo
-from .text_part import TextPart
-from .tool_part import ToolPart
-from .step_start_part import StepStartPart
-
-__all__ = ["AssistantMessagePart"]
-
-AssistantMessagePart: TypeAlias = Annotated[
- Union[TextPart, ToolPart, StepStartPart], PropertyInfo(discriminator="type")
-]
diff --git a/src/opencode_ai/types/config.py b/src/opencode_ai/types/config.py
index 02aa810..51faa46 100644
--- a/src/opencode_ai/types/config.py
+++ b/src/opencode_ai/types/config.py
@@ -1,13 +1,15 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import Dict, List, Union, Optional
-from typing_extensions import Annotated, TypeAlias
+from typing import TYPE_CHECKING, Dict, List, Union, Optional
+from typing_extensions import Literal, Annotated, TypeAlias
from pydantic import Field as FieldInfo
+from .config import ModeUnnamedTypeWithobjectParent0ModeUnnamedTypeWithobjectParent0Item
from .._utils import PropertyInfo
from .._models import BaseModel
from .keybinds import Keybinds
+from .log_level import LogLevel
from .mcp_local import McpLocal
from .mcp_remote import McpRemote
@@ -18,6 +20,9 @@
"ExperimentalHookFileEdited",
"ExperimentalHookSessionCompleted",
"Mcp",
+ "Mode",
+ "ModeBuild",
+ "ModePlan",
"Provider",
"ProviderModels",
"ProviderModelsCost",
@@ -50,6 +55,34 @@ class Experimental(BaseModel):
Mcp: TypeAlias = Annotated[Union[McpLocal, McpRemote], PropertyInfo(discriminator="type")]
+class ModeBuild(BaseModel):
+ model: Optional[str] = None
+
+ prompt: Optional[str] = None
+
+ tools: Optional[Dict[str, bool]] = None
+
+
+class ModePlan(BaseModel):
+ model: Optional[str] = None
+
+ prompt: Optional[str] = None
+
+ tools: Optional[Dict[str, bool]] = None
+
+
+class Mode(BaseModel):
+ build: Optional[ModeBuild] = None
+
+ plan: Optional[ModePlan] = None
+
+ if TYPE_CHECKING:
+ # Stub to indicate that arbitrary properties are accepted.
+ # To access properties that are not valid identifiers you can use `getattr`, e.g.
+ # `getattr(obj, '$type')`
+ def __getattr__(self, attr: str) -> ModeUnnamedTypeWithobjectParent0ModeUnnamedTypeWithobjectParent0Item: ...
+
+
class ProviderModelsCost(BaseModel):
input: float
@@ -109,7 +142,10 @@ class Config(BaseModel):
"""JSON schema reference for configuration validation"""
autoshare: Optional[bool] = None
- """Share newly created sessions automatically"""
+ """@deprecated Use 'share' field instead.
+
+ Share newly created sessions automatically
+ """
autoupdate: Optional[bool] = None
"""Automatically update to the latest version"""
@@ -125,14 +161,28 @@ class Config(BaseModel):
keybinds: Optional[Keybinds] = None
"""Custom keybind configurations"""
+ log_level: Optional[LogLevel] = None
+ """Minimum log level to write to log files"""
+
mcp: Optional[Dict[str, Mcp]] = None
"""MCP (Model Context Protocol) server configurations"""
+ mode: Optional[Mode] = None
+
model: Optional[str] = None
"""Model to use in the format of provider/model, eg anthropic/claude-2"""
provider: Optional[Dict[str, Provider]] = None
"""Custom provider configurations and model overrides"""
+ share: Optional[Literal["auto", "disabled"]] = None
+ """
+ Control sharing behavior: 'auto' enables automatic sharing, 'disabled' disables
+ all sharing
+ """
+
theme: Optional[str] = None
"""Theme name to use for the interface"""
+
+ username: Optional[str] = None
+ """Custom username to display in conversations instead of system username"""
diff --git a/src/opencode_ai/types/event_list_response.py b/src/opencode_ai/types/event_list_response.py
index 9ddd786..fd8832b 100644
--- a/src/opencode_ai/types/event_list_response.py
+++ b/src/opencode_ai/types/event_list_response.py
@@ -5,13 +5,14 @@
from pydantic import Field as FieldInfo
+from .part import Part
from .._utils import PropertyInfo
from .message import Message
from .session import Session
from .._models import BaseModel
from .shared.unknown_error import UnknownError
-from .assistant_message_part import AssistantMessagePart
from .shared.provider_auth_error import ProviderAuthError
+from .shared.message_aborted_error import MessageAbortedError
__all__ = [
"EventListResponse",
@@ -24,14 +25,14 @@
"EventFileEditedProperties",
"EventInstallationUpdated",
"EventInstallationUpdatedProperties",
- "EventStorageWrite",
- "EventStorageWriteProperties",
"EventMessageUpdated",
"EventMessageUpdatedProperties",
"EventMessageRemoved",
"EventMessageRemovedProperties",
"EventMessagePartUpdated",
"EventMessagePartUpdatedProperties",
+ "EventStorageWrite",
+ "EventStorageWriteProperties",
"EventSessionUpdated",
"EventSessionUpdatedProperties",
"EventSessionDeleted",
@@ -101,18 +102,6 @@ class EventInstallationUpdated(BaseModel):
type: Literal["installation.updated"]
-class EventStorageWriteProperties(BaseModel):
- key: str
-
- content: Optional[object] = None
-
-
-class EventStorageWrite(BaseModel):
- properties: EventStorageWriteProperties
-
- type: Literal["storage.write"]
-
-
class EventMessageUpdatedProperties(BaseModel):
info: Message
@@ -136,11 +125,7 @@ class EventMessageRemoved(BaseModel):
class EventMessagePartUpdatedProperties(BaseModel):
- message_id: str = FieldInfo(alias="messageID")
-
- part: AssistantMessagePart
-
- session_id: str = FieldInfo(alias="sessionID")
+ part: Part
class EventMessagePartUpdated(BaseModel):
@@ -149,6 +134,18 @@ class EventMessagePartUpdated(BaseModel):
type: Literal["message.part.updated"]
+class EventStorageWriteProperties(BaseModel):
+ key: str
+
+ content: Optional[object] = None
+
+
+class EventStorageWrite(BaseModel):
+ properties: EventStorageWriteProperties
+
+ type: Literal["storage.write"]
+
+
class EventSessionUpdatedProperties(BaseModel):
info: Session
@@ -186,7 +183,9 @@ class EventSessionErrorPropertiesErrorMessageOutputLengthError(BaseModel):
EventSessionErrorPropertiesError: TypeAlias = Annotated[
- Union[ProviderAuthError, UnknownError, EventSessionErrorPropertiesErrorMessageOutputLengthError],
+ Union[
+ ProviderAuthError, UnknownError, EventSessionErrorPropertiesErrorMessageOutputLengthError, MessageAbortedError
+ ],
PropertyInfo(discriminator="name"),
]
@@ -194,6 +193,8 @@ class EventSessionErrorPropertiesErrorMessageOutputLengthError(BaseModel):
class EventSessionErrorProperties(BaseModel):
error: Optional[EventSessionErrorPropertiesError] = None
+ session_id: Optional[str] = FieldInfo(alias="sessionID", default=None)
+
class EventSessionError(BaseModel):
properties: EventSessionErrorProperties
@@ -219,10 +220,10 @@ class EventFileWatcherUpdated(BaseModel):
EventPermissionUpdated,
EventFileEdited,
EventInstallationUpdated,
- EventStorageWrite,
EventMessageUpdated,
EventMessageRemoved,
EventMessagePartUpdated,
+ EventStorageWrite,
EventSessionUpdated,
EventSessionDeleted,
EventSessionIdle,
diff --git a/src/opencode_ai/types/file.py b/src/opencode_ai/types/file.py
new file mode 100644
index 0000000..f156d68
--- /dev/null
+++ b/src/opencode_ai/types/file.py
@@ -0,0 +1,17 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing_extensions import Literal
+
+from .._models import BaseModel
+
+__all__ = ["File"]
+
+
+class File(BaseModel):
+ added: int
+
+ path: str
+
+ removed: int
+
+ status: Literal["added", "deleted", "modified"]
diff --git a/src/opencode_ai/types/file_part.py b/src/opencode_ai/types/file_part.py
index af7bfa2..a4b9934 100644
--- a/src/opencode_ai/types/file_part.py
+++ b/src/opencode_ai/types/file_part.py
@@ -3,14 +3,22 @@
from typing import Optional
from typing_extensions import Literal
+from pydantic import Field as FieldInfo
+
from .._models import BaseModel
__all__ = ["FilePart"]
class FilePart(BaseModel):
+ id: str
+
+ message_id: str = FieldInfo(alias="messageID")
+
mime: str
+ session_id: str = FieldInfo(alias="sessionID")
+
type: Literal["file"]
url: str
diff --git a/src/opencode_ai/types/file_part_param.py b/src/opencode_ai/types/file_part_param.py
index ac4229e..bd219ec 100644
--- a/src/opencode_ai/types/file_part_param.py
+++ b/src/opencode_ai/types/file_part_param.py
@@ -2,14 +2,22 @@
from __future__ import annotations
-from typing_extensions import Literal, Required, TypedDict
+from typing_extensions import Literal, Required, Annotated, TypedDict
+
+from .._utils import PropertyInfo
__all__ = ["FilePartParam"]
class FilePartParam(TypedDict, total=False):
+ id: Required[str]
+
+ message_id: Required[Annotated[str, PropertyInfo(alias="messageID")]]
+
mime: Required[str]
+ session_id: Required[Annotated[str, PropertyInfo(alias="sessionID")]]
+
type: Required[Literal["file"]]
url: Required[str]
diff --git a/src/opencode_ai/types/file_status_response.py b/src/opencode_ai/types/file_status_response.py
index 2141366..34a602b 100644
--- a/src/opencode_ai/types/file_status_response.py
+++ b/src/opencode_ai/types/file_status_response.py
@@ -1,21 +1,10 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List
-from typing_extensions import Literal, TypeAlias
+from typing_extensions import TypeAlias
-from .._models import BaseModel
+from .file import File
-__all__ = ["FileStatusResponse", "FileStatusResponseItem"]
+__all__ = ["FileStatusResponse"]
-
-class FileStatusResponseItem(BaseModel):
- added: int
-
- file: str
-
- removed: int
-
- status: Literal["added", "deleted", "modified"]
-
-
-FileStatusResponse: TypeAlias = List[FileStatusResponseItem]
+FileStatusResponse: TypeAlias = List[File]
diff --git a/src/opencode_ai/types/find_symbols_response.py b/src/opencode_ai/types/find_symbols_response.py
index 3f25235..a0bc12a 100644
--- a/src/opencode_ai/types/find_symbols_response.py
+++ b/src/opencode_ai/types/find_symbols_response.py
@@ -3,6 +3,8 @@
from typing import List
from typing_extensions import TypeAlias
+from .symbol import Symbol
+
__all__ = ["FindSymbolsResponse"]
-FindSymbolsResponse: TypeAlias = List[object]
+FindSymbolsResponse: TypeAlias = List[Symbol]
diff --git a/src/opencode_ai/types/find_text_response.py b/src/opencode_ai/types/find_text_response.py
index 4557834..8f47178 100644
--- a/src/opencode_ai/types/find_text_response.py
+++ b/src/opencode_ai/types/find_text_response.py
@@ -3,48 +3,8 @@
from typing import List
from typing_extensions import TypeAlias
-from .._models import BaseModel
+from .match import Match
-__all__ = [
- "FindTextResponse",
- "FindTextResponseItem",
- "FindTextResponseItemLines",
- "FindTextResponseItemPath",
- "FindTextResponseItemSubmatch",
- "FindTextResponseItemSubmatchMatch",
-]
+__all__ = ["FindTextResponse"]
-
-class FindTextResponseItemLines(BaseModel):
- text: str
-
-
-class FindTextResponseItemPath(BaseModel):
- text: str
-
-
-class FindTextResponseItemSubmatchMatch(BaseModel):
- text: str
-
-
-class FindTextResponseItemSubmatch(BaseModel):
- end: float
-
- match: FindTextResponseItemSubmatchMatch
-
- start: float
-
-
-class FindTextResponseItem(BaseModel):
- absolute_offset: float
-
- line_number: float
-
- lines: FindTextResponseItemLines
-
- path: FindTextResponseItemPath
-
- submatches: List[FindTextResponseItemSubmatch]
-
-
-FindTextResponse: TypeAlias = List[FindTextResponseItem]
+FindTextResponse: TypeAlias = List[Match]
diff --git a/src/opencode_ai/types/keybinds.py b/src/opencode_ai/types/keybinds.py
index 28219ef..a0b376f 100644
--- a/src/opencode_ai/types/keybinds.py
+++ b/src/opencode_ai/types/keybinds.py
@@ -1,7 +1,5 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import Optional
-
from pydantic import Field as FieldInfo
from .._models import BaseModel
@@ -10,83 +8,104 @@
class Keybinds(BaseModel):
- app_exit: Optional[str] = None
+ app_exit: str
"""Exit the application"""
- editor_open: Optional[str] = None
+ app_help: str
+ """Show help dialog"""
+
+ editor_open: str
"""Open external editor"""
- help: Optional[str] = None
- """Show help dialog"""
+ file_close: str
+ """Close file"""
+
+ file_diff_toggle: str
+ """Split/unified diff"""
- history_next: Optional[str] = None
- """Navigate to next history item"""
+ file_list: str
+ """List files"""
- history_previous: Optional[str] = None
- """Navigate to previous history item"""
+ file_search: str
+ """Search file"""
- input_clear: Optional[str] = None
+ input_clear: str
"""Clear input field"""
- input_newline: Optional[str] = None
+ input_newline: str
"""Insert newline in input"""
- input_paste: Optional[str] = None
+ input_paste: str
"""Paste from clipboard"""
- input_submit: Optional[str] = None
+ input_submit: str
"""Submit input"""
- leader: Optional[str] = None
+ leader: str
"""Leader key for keybind combinations"""
- messages_first: Optional[str] = None
+ messages_copy: str
+ """Copy message"""
+
+ messages_first: str
"""Navigate to first message"""
- messages_half_page_down: Optional[str] = None
+ messages_half_page_down: str
"""Scroll messages down by half page"""
- messages_half_page_up: Optional[str] = None
+ messages_half_page_up: str
"""Scroll messages up by half page"""
- messages_last: Optional[str] = None
+ messages_last: str
"""Navigate to last message"""
- messages_next: Optional[str] = None
+ messages_layout_toggle: str
+ """Toggle layout"""
+
+ messages_next: str
"""Navigate to next message"""
- messages_page_down: Optional[str] = None
+ messages_page_down: str
"""Scroll messages down by one page"""
- messages_page_up: Optional[str] = None
+ messages_page_up: str
"""Scroll messages up by one page"""
- messages_previous: Optional[str] = None
+ messages_previous: str
"""Navigate to previous message"""
- api_model_list: Optional[str] = FieldInfo(alias="model_list", default=None)
+ messages_revert: str
+ """Revert message"""
+
+ api_model_list: str = FieldInfo(alias="model_list")
"""List available models"""
- project_init: Optional[str] = None
- """Initialize project configuration"""
+ project_init: str
+ """Create/update AGENTS.md"""
- session_compact: Optional[str] = None
- """Toggle compact mode for session"""
+ session_compact: str
+ """Compact the session"""
- session_interrupt: Optional[str] = None
+ session_interrupt: str
"""Interrupt current session"""
- session_list: Optional[str] = None
+ session_list: str
"""List all sessions"""
- session_new: Optional[str] = None
+ session_new: str
"""Create a new session"""
- session_share: Optional[str] = None
+ session_share: str
"""Share current session"""
- theme_list: Optional[str] = None
+ session_unshare: str
+ """Unshare current session"""
+
+ switch_mode: str
+ """Switch mode"""
+
+ theme_list: str
"""List available themes"""
- tool_details: Optional[str] = None
- """Show tool details"""
+ tool_details: str
+ """Toggle tool details"""
diff --git a/src/opencode_ai/types/log_level.py b/src/opencode_ai/types/log_level.py
new file mode 100644
index 0000000..7d35a2e
--- /dev/null
+++ b/src/opencode_ai/types/log_level.py
@@ -0,0 +1,7 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing_extensions import Literal, TypeAlias
+
+__all__ = ["LogLevel"]
+
+LogLevel: TypeAlias = Literal["DEBUG", "INFO", "WARN", "ERROR"]
diff --git a/src/opencode_ai/types/match.py b/src/opencode_ai/types/match.py
new file mode 100644
index 0000000..2f1ca3c
--- /dev/null
+++ b/src/opencode_ai/types/match.py
@@ -0,0 +1,39 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import List
+
+from .._models import BaseModel
+
+__all__ = ["Match", "Lines", "Path", "Submatch", "SubmatchMatch"]
+
+
+class Lines(BaseModel):
+ text: str
+
+
+class Path(BaseModel):
+ text: str
+
+
+class SubmatchMatch(BaseModel):
+ text: str
+
+
+class Submatch(BaseModel):
+ end: float
+
+ match: SubmatchMatch
+
+ start: float
+
+
+class Match(BaseModel):
+ absolute_offset: float
+
+ line_number: float
+
+ lines: Lines
+
+ path: Path
+
+ submatches: List[Submatch]
diff --git a/src/opencode_ai/types/message.py b/src/opencode_ai/types/message.py
index 30be928..6e27c8d 100644
--- a/src/opencode_ai/types/message.py
+++ b/src/opencode_ai/types/message.py
@@ -1,32 +1,12 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import List, Union
-from typing_extensions import Literal, Annotated, TypeAlias
-
-from pydantic import Field as FieldInfo
+from typing import Union
+from typing_extensions import Annotated, TypeAlias
from .._utils import PropertyInfo
-from .._models import BaseModel
+from .user_message import UserMessage
from .assistant_message import AssistantMessage
-from .user_message_part import UserMessagePart
-
-__all__ = ["Message", "UserMessage", "UserMessageTime"]
-
-
-class UserMessageTime(BaseModel):
- created: float
-
-
-class UserMessage(BaseModel):
- id: str
-
- parts: List[UserMessagePart]
-
- role: Literal["user"]
-
- session_id: str = FieldInfo(alias="sessionID")
-
- time: UserMessageTime
+__all__ = ["Message"]
Message: TypeAlias = Annotated[Union[UserMessage, AssistantMessage], PropertyInfo(discriminator="role")]
diff --git a/src/opencode_ai/types/mode.py b/src/opencode_ai/types/mode.py
new file mode 100644
index 0000000..5e04502
--- /dev/null
+++ b/src/opencode_ai/types/mode.py
@@ -0,0 +1,25 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Dict, Optional
+
+from pydantic import Field as FieldInfo
+
+from .._models import BaseModel
+
+__all__ = ["Mode", "Model"]
+
+
+class Model(BaseModel):
+ api_model_id: str = FieldInfo(alias="modelID")
+
+ provider_id: str = FieldInfo(alias="providerID")
+
+
+class Mode(BaseModel):
+ name: str
+
+ tools: Dict[str, bool]
+
+ model: Optional[Model] = None
+
+ prompt: Optional[str] = None
diff --git a/src/opencode_ai/types/part.py b/src/opencode_ai/types/part.py
new file mode 100644
index 0000000..7b4ddee
--- /dev/null
+++ b/src/opencode_ai/types/part.py
@@ -0,0 +1,30 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Union
+from typing_extensions import Literal, TypeAlias
+
+from pydantic import Field as FieldInfo
+
+from .._models import BaseModel
+from .file_part import FilePart
+from .text_part import TextPart
+from .tool_part import ToolPart
+from .step_start_part import StepStartPart
+from .step_finish_part import StepFinishPart
+
+__all__ = ["Part", "UnionMember5"]
+
+
+class UnionMember5(BaseModel):
+ id: str
+
+ message_id: str = FieldInfo(alias="messageID")
+
+ session_id: str = FieldInfo(alias="sessionID")
+
+ snapshot: str
+
+ type: Literal["snapshot"]
+
+
+Part: TypeAlias = Union[TextPart, FilePart, ToolPart, StepStartPart, StepFinishPart, UnionMember5]
diff --git a/src/opencode_ai/types/session_chat_params.py b/src/opencode_ai/types/session_chat_params.py
index e1d6780..51d2e2e 100644
--- a/src/opencode_ai/types/session_chat_params.py
+++ b/src/opencode_ai/types/session_chat_params.py
@@ -2,18 +2,26 @@
from __future__ import annotations
-from typing import Iterable
-from typing_extensions import Required, Annotated, TypedDict
+from typing import Union, Iterable
+from typing_extensions import Required, Annotated, TypeAlias, TypedDict
from .._utils import PropertyInfo
-from .user_message_part_param import UserMessagePartParam
+from .file_part_param import FilePartParam
+from .text_part_param import TextPartParam
-__all__ = ["SessionChatParams"]
+__all__ = ["SessionChatParams", "Part"]
class SessionChatParams(TypedDict, total=False):
+ message_id: Required[Annotated[str, PropertyInfo(alias="messageID")]]
+
+ mode: Required[str]
+
model_id: Required[Annotated[str, PropertyInfo(alias="modelID")]]
- parts: Required[Iterable[UserMessagePartParam]]
+ parts: Required[Iterable[Part]]
provider_id: Required[Annotated[str, PropertyInfo(alias="providerID")]]
+
+
+Part: TypeAlias = Union[FilePartParam, TextPartParam]
diff --git a/src/opencode_ai/types/session_init_params.py b/src/opencode_ai/types/session_init_params.py
index ce35585..8d6d0d8 100644
--- a/src/opencode_ai/types/session_init_params.py
+++ b/src/opencode_ai/types/session_init_params.py
@@ -10,6 +10,8 @@
class SessionInitParams(TypedDict, total=False):
+ message_id: Required[Annotated[str, PropertyInfo(alias="messageID")]]
+
model_id: Required[Annotated[str, PropertyInfo(alias="modelID")]]
provider_id: Required[Annotated[str, PropertyInfo(alias="providerID")]]
diff --git a/src/opencode_ai/types/session_messages_response.py b/src/opencode_ai/types/session_messages_response.py
index 1de44a9..6eb8692 100644
--- a/src/opencode_ai/types/session_messages_response.py
+++ b/src/opencode_ai/types/session_messages_response.py
@@ -3,8 +3,17 @@
from typing import List
from typing_extensions import TypeAlias
+from .part import Part
from .message import Message
+from .._models import BaseModel
-__all__ = ["SessionMessagesResponse"]
+__all__ = ["SessionMessagesResponse", "SessionMessagesResponseItem"]
-SessionMessagesResponse: TypeAlias = List[Message]
+
+class SessionMessagesResponseItem(BaseModel):
+ info: Message
+
+ parts: List[Part]
+
+
+SessionMessagesResponse: TypeAlias = List[SessionMessagesResponseItem]
diff --git a/src/opencode_ai/types/shared/__init__.py b/src/opencode_ai/types/shared/__init__.py
index 589fe81..bc579a8 100644
--- a/src/opencode_ai/types/shared/__init__.py
+++ b/src/opencode_ai/types/shared/__init__.py
@@ -2,3 +2,4 @@
from .unknown_error import UnknownError as UnknownError
from .provider_auth_error import ProviderAuthError as ProviderAuthError
+from .message_aborted_error import MessageAbortedError as MessageAbortedError
diff --git a/src/opencode_ai/types/shared/message_aborted_error.py b/src/opencode_ai/types/shared/message_aborted_error.py
new file mode 100644
index 0000000..9ffdcaa
--- /dev/null
+++ b/src/opencode_ai/types/shared/message_aborted_error.py
@@ -0,0 +1,13 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing_extensions import Literal
+
+from ..._models import BaseModel
+
+__all__ = ["MessageAbortedError"]
+
+
+class MessageAbortedError(BaseModel):
+ data: object
+
+ name: Literal["MessageAbortedError"]
diff --git a/src/opencode_ai/types/step_finish_part.py b/src/opencode_ai/types/step_finish_part.py
new file mode 100644
index 0000000..b9f5b4b
--- /dev/null
+++ b/src/opencode_ai/types/step_finish_part.py
@@ -0,0 +1,39 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing_extensions import Literal
+
+from pydantic import Field as FieldInfo
+
+from .._models import BaseModel
+
+__all__ = ["StepFinishPart", "Tokens", "TokensCache"]
+
+
+class TokensCache(BaseModel):
+ read: float
+
+ write: float
+
+
+class Tokens(BaseModel):
+ cache: TokensCache
+
+ input: float
+
+ output: float
+
+ reasoning: float
+
+
+class StepFinishPart(BaseModel):
+ id: str
+
+ cost: float
+
+ message_id: str = FieldInfo(alias="messageID")
+
+ session_id: str = FieldInfo(alias="sessionID")
+
+ tokens: Tokens
+
+ type: Literal["step-finish"]
diff --git a/src/opencode_ai/types/step_start_part.py b/src/opencode_ai/types/step_start_part.py
index d28f4df..6c9e0df 100644
--- a/src/opencode_ai/types/step_start_part.py
+++ b/src/opencode_ai/types/step_start_part.py
@@ -2,10 +2,18 @@
from typing_extensions import Literal
+from pydantic import Field as FieldInfo
+
from .._models import BaseModel
__all__ = ["StepStartPart"]
class StepStartPart(BaseModel):
+ id: str
+
+ message_id: str = FieldInfo(alias="messageID")
+
+ session_id: str = FieldInfo(alias="sessionID")
+
type: Literal["step-start"]
diff --git a/src/opencode_ai/types/symbol.py b/src/opencode_ai/types/symbol.py
new file mode 100644
index 0000000..c7d1f99
--- /dev/null
+++ b/src/opencode_ai/types/symbol.py
@@ -0,0 +1,37 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from .._models import BaseModel
+
+__all__ = ["Symbol", "Location", "LocationRange", "LocationRangeEnd", "LocationRangeStart"]
+
+
+class LocationRangeEnd(BaseModel):
+ character: float
+
+ line: float
+
+
+class LocationRangeStart(BaseModel):
+ character: float
+
+ line: float
+
+
+class LocationRange(BaseModel):
+ end: LocationRangeEnd
+
+ start: LocationRangeStart
+
+
+class Location(BaseModel):
+ range: LocationRange
+
+ uri: str
+
+
+class Symbol(BaseModel):
+ kind: float
+
+ location: Location
+
+ name: str
diff --git a/src/opencode_ai/types/text_part.py b/src/opencode_ai/types/text_part.py
index cd32fc6..514f409 100644
--- a/src/opencode_ai/types/text_part.py
+++ b/src/opencode_ai/types/text_part.py
@@ -1,13 +1,32 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+from typing import Optional
from typing_extensions import Literal
+from pydantic import Field as FieldInfo
+
from .._models import BaseModel
-__all__ = ["TextPart"]
+__all__ = ["TextPart", "Time"]
+
+
+class Time(BaseModel):
+ start: float
+
+ end: Optional[float] = None
class TextPart(BaseModel):
+ id: str
+
+ message_id: str = FieldInfo(alias="messageID")
+
+ session_id: str = FieldInfo(alias="sessionID")
+
text: str
type: Literal["text"]
+
+ synthetic: Optional[bool] = None
+
+ time: Optional[Time] = None
diff --git a/src/opencode_ai/types/text_part_param.py b/src/opencode_ai/types/text_part_param.py
index 5c41c9f..3129256 100644
--- a/src/opencode_ai/types/text_part_param.py
+++ b/src/opencode_ai/types/text_part_param.py
@@ -2,12 +2,30 @@
from __future__ import annotations
-from typing_extensions import Literal, Required, TypedDict
+from typing_extensions import Literal, Required, Annotated, TypedDict
-__all__ = ["TextPartParam"]
+from .._utils import PropertyInfo
+
+__all__ = ["TextPartParam", "Time"]
+
+
+class Time(TypedDict, total=False):
+ start: Required[float]
+
+ end: float
class TextPartParam(TypedDict, total=False):
+ id: Required[str]
+
+ message_id: Required[Annotated[str, PropertyInfo(alias="messageID")]]
+
+ session_id: Required[Annotated[str, PropertyInfo(alias="sessionID")]]
+
text: Required[str]
type: Required[Literal["text"]]
+
+ synthetic: bool
+
+ time: Time
diff --git a/src/opencode_ai/types/tool_part.py b/src/opencode_ai/types/tool_part.py
index cf5015d..2de8ed9 100644
--- a/src/opencode_ai/types/tool_part.py
+++ b/src/opencode_ai/types/tool_part.py
@@ -3,6 +3,8 @@
from typing import Union
from typing_extensions import Literal, Annotated, TypeAlias
+from pydantic import Field as FieldInfo
+
from .._utils import PropertyInfo
from .._models import BaseModel
from .tool_state_error import ToolStateError
@@ -20,6 +22,12 @@
class ToolPart(BaseModel):
id: str
+ call_id: str = FieldInfo(alias="callID")
+
+ message_id: str = FieldInfo(alias="messageID")
+
+ session_id: str = FieldInfo(alias="sessionID")
+
state: State
tool: str
diff --git a/src/opencode_ai/types/tool_state_completed.py b/src/opencode_ai/types/tool_state_completed.py
index 6c9eb63..5129842 100644
--- a/src/opencode_ai/types/tool_state_completed.py
+++ b/src/opencode_ai/types/tool_state_completed.py
@@ -1,6 +1,6 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import Dict, Optional
+from typing import Dict
from typing_extensions import Literal
from .._models import BaseModel
@@ -15,6 +15,8 @@ class Time(BaseModel):
class ToolStateCompleted(BaseModel):
+ input: Dict[str, object]
+
metadata: Dict[str, object]
output: str
@@ -24,5 +26,3 @@ class ToolStateCompleted(BaseModel):
time: Time
title: str
-
- input: Optional[object] = None
diff --git a/src/opencode_ai/types/tool_state_error.py b/src/opencode_ai/types/tool_state_error.py
index 1e8eced..141a4cd 100644
--- a/src/opencode_ai/types/tool_state_error.py
+++ b/src/opencode_ai/types/tool_state_error.py
@@ -1,6 +1,6 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import Optional
+from typing import Dict
from typing_extensions import Literal
from .._models import BaseModel
@@ -17,8 +17,8 @@ class Time(BaseModel):
class ToolStateError(BaseModel):
error: str
+ input: Dict[str, object]
+
status: Literal["error"]
time: Time
-
- input: Optional[object] = None
diff --git a/src/opencode_ai/types/user_message.py b/src/opencode_ai/types/user_message.py
new file mode 100644
index 0000000..64c44bf
--- /dev/null
+++ b/src/opencode_ai/types/user_message.py
@@ -0,0 +1,23 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing_extensions import Literal
+
+from pydantic import Field as FieldInfo
+
+from .._models import BaseModel
+
+__all__ = ["UserMessage", "Time"]
+
+
+class Time(BaseModel):
+ created: float
+
+
+class UserMessage(BaseModel):
+ id: str
+
+ role: Literal["user"]
+
+ session_id: str = FieldInfo(alias="sessionID")
+
+ time: Time
diff --git a/src/opencode_ai/types/user_message_part.py b/src/opencode_ai/types/user_message_part.py
deleted file mode 100644
index 000d6c4..0000000
--- a/src/opencode_ai/types/user_message_part.py
+++ /dev/null
@@ -1,12 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing import Union
-from typing_extensions import Annotated, TypeAlias
-
-from .._utils import PropertyInfo
-from .file_part import FilePart
-from .text_part import TextPart
-
-__all__ = ["UserMessagePart"]
-
-UserMessagePart: TypeAlias = Annotated[Union[TextPart, FilePart], PropertyInfo(discriminator="type")]
diff --git a/src/opencode_ai/types/user_message_part_param.py b/src/opencode_ai/types/user_message_part_param.py
deleted file mode 100644
index f516c35..0000000
--- a/src/opencode_ai/types/user_message_part_param.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-from typing import Union
-from typing_extensions import TypeAlias
-
-from .file_part_param import FilePartParam
-from .text_part_param import TextPartParam
-
-__all__ = ["UserMessagePartParam"]
-
-UserMessagePartParam: TypeAlias = Union[TextPartParam, FilePartParam]
diff --git a/tests/api_resources/test_app.py b/tests/api_resources/test_app.py
index ab04f6a..87123dc 100644
--- a/tests/api_resources/test_app.py
+++ b/tests/api_resources/test_app.py
@@ -9,7 +9,7 @@
from opencode_ai import Opencode, AsyncOpencode
from tests.utils import assert_matches_type
-from opencode_ai.types import App, AppInitResponse
+from opencode_ai.types import App, AppLogResponse, AppInitResponse, AppModesResponse
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -73,6 +73,85 @@ def test_streaming_response_init(self, client: Opencode) -> None:
assert cast(Any, response.is_closed) is True
+ @pytest.mark.skip()
+ @parametrize
+ def test_method_log(self, client: Opencode) -> None:
+ app = client.app.log(
+ level="debug",
+ message="message",
+ service="service",
+ )
+ assert_matches_type(AppLogResponse, app, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_method_log_with_all_params(self, client: Opencode) -> None:
+ app = client.app.log(
+ level="debug",
+ message="message",
+ service="service",
+ extra={"foo": "bar"},
+ )
+ assert_matches_type(AppLogResponse, app, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_raw_response_log(self, client: Opencode) -> None:
+ response = client.app.with_raw_response.log(
+ level="debug",
+ message="message",
+ service="service",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ app = response.parse()
+ assert_matches_type(AppLogResponse, app, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_streaming_response_log(self, client: Opencode) -> None:
+ with client.app.with_streaming_response.log(
+ level="debug",
+ message="message",
+ service="service",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ app = response.parse()
+ assert_matches_type(AppLogResponse, app, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_method_modes(self, client: Opencode) -> None:
+ app = client.app.modes()
+ assert_matches_type(AppModesResponse, app, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_raw_response_modes(self, client: Opencode) -> None:
+ response = client.app.with_raw_response.modes()
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ app = response.parse()
+ assert_matches_type(AppModesResponse, app, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ def test_streaming_response_modes(self, client: Opencode) -> None:
+ with client.app.with_streaming_response.modes() as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ app = response.parse()
+ assert_matches_type(AppModesResponse, app, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
class TestAsyncApp:
parametrize = pytest.mark.parametrize(
@@ -134,3 +213,82 @@ async def test_streaming_response_init(self, async_client: AsyncOpencode) -> Non
assert_matches_type(AppInitResponse, app, path=["response"])
assert cast(Any, response.is_closed) is True
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_method_log(self, async_client: AsyncOpencode) -> None:
+ app = await async_client.app.log(
+ level="debug",
+ message="message",
+ service="service",
+ )
+ assert_matches_type(AppLogResponse, app, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_method_log_with_all_params(self, async_client: AsyncOpencode) -> None:
+ app = await async_client.app.log(
+ level="debug",
+ message="message",
+ service="service",
+ extra={"foo": "bar"},
+ )
+ assert_matches_type(AppLogResponse, app, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_raw_response_log(self, async_client: AsyncOpencode) -> None:
+ response = await async_client.app.with_raw_response.log(
+ level="debug",
+ message="message",
+ service="service",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ app = await response.parse()
+ assert_matches_type(AppLogResponse, app, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_streaming_response_log(self, async_client: AsyncOpencode) -> None:
+ async with async_client.app.with_streaming_response.log(
+ level="debug",
+ message="message",
+ service="service",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ app = await response.parse()
+ assert_matches_type(AppLogResponse, app, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_method_modes(self, async_client: AsyncOpencode) -> None:
+ app = await async_client.app.modes()
+ assert_matches_type(AppModesResponse, app, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_raw_response_modes(self, async_client: AsyncOpencode) -> None:
+ response = await async_client.app.with_raw_response.modes()
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ app = await response.parse()
+ assert_matches_type(AppModesResponse, app, path=["response"])
+
+ @pytest.mark.skip()
+ @parametrize
+ async def test_streaming_response_modes(self, async_client: AsyncOpencode) -> None:
+ async with async_client.app.with_streaming_response.modes() as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ app = await response.parse()
+ assert_matches_type(AppModesResponse, app, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
diff --git a/tests/api_resources/test_session.py b/tests/api_resources/test_session.py
index 0993082..379cde4 100644
--- a/tests/api_resources/test_session.py
+++ b/tests/api_resources/test_session.py
@@ -171,11 +171,17 @@ def test_path_params_abort(self, client: Opencode) -> None:
def test_method_chat(self, client: Opencode) -> None:
session = client.session.chat(
id="id",
+ message_id="messageID",
+ mode="mode",
model_id="modelID",
parts=[
{
- "text": "text",
- "type": "text",
+ "id": "id",
+ "message_id": "messageID",
+ "mime": "mime",
+ "session_id": "sessionID",
+ "type": "file",
+ "url": "url",
}
],
provider_id="providerID",
@@ -187,11 +193,17 @@ def test_method_chat(self, client: Opencode) -> None:
def test_raw_response_chat(self, client: Opencode) -> None:
response = client.session.with_raw_response.chat(
id="id",
+ message_id="messageID",
+ mode="mode",
model_id="modelID",
parts=[
{
- "text": "text",
- "type": "text",
+ "id": "id",
+ "message_id": "messageID",
+ "mime": "mime",
+ "session_id": "sessionID",
+ "type": "file",
+ "url": "url",
}
],
provider_id="providerID",
@@ -207,11 +219,17 @@ def test_raw_response_chat(self, client: Opencode) -> None:
def test_streaming_response_chat(self, client: Opencode) -> None:
with client.session.with_streaming_response.chat(
id="id",
+ message_id="messageID",
+ mode="mode",
model_id="modelID",
parts=[
{
- "text": "text",
- "type": "text",
+ "id": "id",
+ "message_id": "messageID",
+ "mime": "mime",
+ "session_id": "sessionID",
+ "type": "file",
+ "url": "url",
}
],
provider_id="providerID",
@@ -230,11 +248,17 @@ def test_path_params_chat(self, client: Opencode) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
client.session.with_raw_response.chat(
id="",
+ message_id="messageID",
+ mode="mode",
model_id="modelID",
parts=[
{
- "text": "text",
- "type": "text",
+ "id": "id",
+ "message_id": "messageID",
+ "mime": "mime",
+ "session_id": "sessionID",
+ "type": "file",
+ "url": "url",
}
],
provider_id="providerID",
@@ -245,6 +269,7 @@ def test_path_params_chat(self, client: Opencode) -> None:
def test_method_init(self, client: Opencode) -> None:
session = client.session.init(
id="id",
+ message_id="messageID",
model_id="modelID",
provider_id="providerID",
)
@@ -255,6 +280,7 @@ def test_method_init(self, client: Opencode) -> None:
def test_raw_response_init(self, client: Opencode) -> None:
response = client.session.with_raw_response.init(
id="id",
+ message_id="messageID",
model_id="modelID",
provider_id="providerID",
)
@@ -269,6 +295,7 @@ def test_raw_response_init(self, client: Opencode) -> None:
def test_streaming_response_init(self, client: Opencode) -> None:
with client.session.with_streaming_response.init(
id="id",
+ message_id="messageID",
model_id="modelID",
provider_id="providerID",
) as response:
@@ -286,6 +313,7 @@ def test_path_params_init(self, client: Opencode) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
client.session.with_raw_response.init(
id="",
+ message_id="messageID",
model_id="modelID",
provider_id="providerID",
)
@@ -617,11 +645,17 @@ async def test_path_params_abort(self, async_client: AsyncOpencode) -> None:
async def test_method_chat(self, async_client: AsyncOpencode) -> None:
session = await async_client.session.chat(
id="id",
+ message_id="messageID",
+ mode="mode",
model_id="modelID",
parts=[
{
- "text": "text",
- "type": "text",
+ "id": "id",
+ "message_id": "messageID",
+ "mime": "mime",
+ "session_id": "sessionID",
+ "type": "file",
+ "url": "url",
}
],
provider_id="providerID",
@@ -633,11 +667,17 @@ async def test_method_chat(self, async_client: AsyncOpencode) -> None:
async def test_raw_response_chat(self, async_client: AsyncOpencode) -> None:
response = await async_client.session.with_raw_response.chat(
id="id",
+ message_id="messageID",
+ mode="mode",
model_id="modelID",
parts=[
{
- "text": "text",
- "type": "text",
+ "id": "id",
+ "message_id": "messageID",
+ "mime": "mime",
+ "session_id": "sessionID",
+ "type": "file",
+ "url": "url",
}
],
provider_id="providerID",
@@ -653,11 +693,17 @@ async def test_raw_response_chat(self, async_client: AsyncOpencode) -> None:
async def test_streaming_response_chat(self, async_client: AsyncOpencode) -> None:
async with async_client.session.with_streaming_response.chat(
id="id",
+ message_id="messageID",
+ mode="mode",
model_id="modelID",
parts=[
{
- "text": "text",
- "type": "text",
+ "id": "id",
+ "message_id": "messageID",
+ "mime": "mime",
+ "session_id": "sessionID",
+ "type": "file",
+ "url": "url",
}
],
provider_id="providerID",
@@ -676,11 +722,17 @@ async def test_path_params_chat(self, async_client: AsyncOpencode) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
await async_client.session.with_raw_response.chat(
id="",
+ message_id="messageID",
+ mode="mode",
model_id="modelID",
parts=[
{
- "text": "text",
- "type": "text",
+ "id": "id",
+ "message_id": "messageID",
+ "mime": "mime",
+ "session_id": "sessionID",
+ "type": "file",
+ "url": "url",
}
],
provider_id="providerID",
@@ -691,6 +743,7 @@ async def test_path_params_chat(self, async_client: AsyncOpencode) -> None:
async def test_method_init(self, async_client: AsyncOpencode) -> None:
session = await async_client.session.init(
id="id",
+ message_id="messageID",
model_id="modelID",
provider_id="providerID",
)
@@ -701,6 +754,7 @@ async def test_method_init(self, async_client: AsyncOpencode) -> None:
async def test_raw_response_init(self, async_client: AsyncOpencode) -> None:
response = await async_client.session.with_raw_response.init(
id="id",
+ message_id="messageID",
model_id="modelID",
provider_id="providerID",
)
@@ -715,6 +769,7 @@ async def test_raw_response_init(self, async_client: AsyncOpencode) -> None:
async def test_streaming_response_init(self, async_client: AsyncOpencode) -> None:
async with async_client.session.with_streaming_response.init(
id="id",
+ message_id="messageID",
model_id="modelID",
provider_id="providerID",
) as response:
@@ -732,6 +787,7 @@ async def test_path_params_init(self, async_client: AsyncOpencode) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
await async_client.session.with_raw_response.init(
id="",
+ message_id="messageID",
model_id="modelID",
provider_id="providerID",
)