diff --git a/.release-please-manifest.json b/.release-please-manifest.json index b386bef..fac1407 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.1.0-alpha.19" + ".": "0.1.0-alpha.20" } \ No newline at end of file diff --git a/.stats.yml b/.stats.yml index 22b27ac..bab7aeb 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 22 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/opencode%2Fopencode-8792f91dd070f7b4ee671fc86e8a03976dc7fb6ee49f8c99ad989e1597003774.yml -openapi_spec_hash: fe9dc3a074be560de0b97df9b5af2c1b +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/opencode%2Fopencode-f9a2ce0fd0addc365614ef8c4bb1746f5ba14bc92f3d01f4cd85fb3b33a8d975.yml +openapi_spec_hash: c58dec8e5250d41f216cded6afb763af config_hash: b7f3d9742335715c458494988498b183 diff --git a/CHANGELOG.md b/CHANGELOG.md index 36c737a..9420e1a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## 0.1.0-alpha.20 (2025-07-17) + +Full Changelog: [v0.1.0-alpha.19...v0.1.0-alpha.20](https://github.com/sst/opencode-sdk-python/compare/v0.1.0-alpha.19...v0.1.0-alpha.20) + +### Features + +* **api:** api update ([f48c0d6](https://github.com/sst/opencode-sdk-python/commit/f48c0d6bb1943df3e3758d19b83c70fd1c15e2c2)) + ## 0.1.0-alpha.19 (2025-07-16) Full Changelog: [v0.1.0-alpha.18...v0.1.0-alpha.19](https://github.com/sst/opencode-sdk-python/compare/v0.1.0-alpha.18...v0.1.0-alpha.19) diff --git a/pyproject.toml b/pyproject.toml index a0d2553..420f74f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "opencode-ai" -version = "0.1.0-alpha.19" +version = "0.1.0-alpha.20" description = "The official Python library for the opencode API" dynamic = ["readme"] license = "Apache-2.0" diff --git a/src/opencode_ai/_version.py b/src/opencode_ai/_version.py index 0e1e5de..ac3c99b 100644 --- a/src/opencode_ai/_version.py +++ b/src/opencode_ai/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "opencode_ai" -__version__ = "0.1.0-alpha.19" # x-release-please-version +__version__ = "0.1.0-alpha.20" # x-release-please-version diff --git a/src/opencode_ai/resources/session.py b/src/opencode_ai/resources/session.py index 087f3b4..8a3cb46 100644 --- a/src/opencode_ai/resources/session.py +++ b/src/opencode_ai/resources/session.py @@ -158,11 +158,12 @@ def chat( self, id: str, *, - message_id: str, - mode: str, model_id: str, parts: Iterable[session_chat_params.Part], provider_id: str, + session_id: str, + message_id: str | NotGiven = NOT_GIVEN, + mode: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -190,11 +191,12 @@ def chat( f"/session/{id}/message", body=maybe_transform( { - "message_id": message_id, - "mode": mode, "model_id": model_id, "parts": parts, "provider_id": provider_id, + "session_id": session_id, + "message_id": message_id, + "mode": mode, }, session_chat_params.SessionChatParams, ), @@ -524,11 +526,12 @@ async def chat( self, id: str, *, - message_id: str, - mode: str, model_id: str, parts: Iterable[session_chat_params.Part], provider_id: str, + session_id: str, + message_id: str | NotGiven = NOT_GIVEN, + mode: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -556,11 +559,12 @@ async def chat( f"/session/{id}/message", body=await async_maybe_transform( { - "message_id": message_id, - "mode": mode, "model_id": model_id, "parts": parts, "provider_id": provider_id, + "session_id": session_id, + "message_id": message_id, + "mode": mode, }, session_chat_params.SessionChatParams, ), diff --git a/src/opencode_ai/types/__init__.py b/src/opencode_ai/types/__init__.py index f07c40b..801de7d 100644 --- a/src/opencode_ai/types/__init__.py +++ b/src/opencode_ai/types/__init__.py @@ -27,10 +27,8 @@ from .layout_config import LayoutConfig as LayoutConfig from .snapshot_part import SnapshotPart as SnapshotPart from .app_log_params import AppLogParams as AppLogParams -from .file_part_param import FilePartParam as FilePartParam from .keybinds_config import KeybindsConfig as KeybindsConfig from .step_start_part import StepStartPart as StepStartPart -from .text_part_param import TextPartParam as TextPartParam from .app_log_response import AppLogResponse as AppLogResponse from .file_read_params import FileReadParams as FileReadParams from .find_text_params import FindTextParams as FindTextParams diff --git a/src/opencode_ai/types/config.py b/src/opencode_ai/types/config.py index 9f1490d..51a4792 100644 --- a/src/opencode_ai/types/config.py +++ b/src/opencode_ai/types/config.py @@ -162,10 +162,10 @@ class Config(BaseModel): provider: Optional[Dict[str, Provider]] = None """Custom provider configurations and model overrides""" - share: Optional[Literal["auto", "disabled"]] = None + share: Optional[Literal["manual", "auto", "disabled"]] = None """ - Control sharing behavior: 'auto' enables automatic sharing, 'disabled' disables - all sharing + Control sharing behavior:'manual' allows manual sharing via commands, 'auto' + enables automatic sharing, 'disabled' disables all sharing """ theme: Optional[str] = None diff --git a/src/opencode_ai/types/file_part_param.py b/src/opencode_ai/types/file_part_param.py deleted file mode 100644 index bd219ec..0000000 --- a/src/opencode_ai/types/file_part_param.py +++ /dev/null @@ -1,25 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import Literal, Required, Annotated, TypedDict - -from .._utils import PropertyInfo - -__all__ = ["FilePartParam"] - - -class FilePartParam(TypedDict, total=False): - id: Required[str] - - message_id: Required[Annotated[str, PropertyInfo(alias="messageID")]] - - mime: Required[str] - - session_id: Required[Annotated[str, PropertyInfo(alias="sessionID")]] - - type: Required[Literal["file"]] - - url: Required[str] - - filename: str diff --git a/src/opencode_ai/types/keybinds_config.py b/src/opencode_ai/types/keybinds_config.py index 0513d8f..ca53231 100644 --- a/src/opencode_ai/types/keybinds_config.py +++ b/src/opencode_ai/types/keybinds_config.py @@ -105,7 +105,10 @@ class KeybindsConfig(BaseModel): """Unshare current session""" switch_mode: str - """Switch mode""" + """Next mode""" + + switch_mode_reverse: str + """Previous Mode""" theme_list: str """List available themes""" diff --git a/src/opencode_ai/types/mcp_remote_config.py b/src/opencode_ai/types/mcp_remote_config.py index ddbb7d4..6863ec7 100644 --- a/src/opencode_ai/types/mcp_remote_config.py +++ b/src/opencode_ai/types/mcp_remote_config.py @@ -1,6 +1,6 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. -from typing import Optional +from typing import Dict, Optional from typing_extensions import Literal from .._models import BaseModel @@ -17,3 +17,6 @@ class McpRemoteConfig(BaseModel): enabled: Optional[bool] = None """Enable or disable the MCP server on startup""" + + headers: Optional[Dict[str, str]] = None + """Headers to send with the request""" diff --git a/src/opencode_ai/types/session_chat_params.py b/src/opencode_ai/types/session_chat_params.py index 51d2e2e..7475317 100644 --- a/src/opencode_ai/types/session_chat_params.py +++ b/src/opencode_ai/types/session_chat_params.py @@ -3,25 +3,55 @@ from __future__ import annotations from typing import Union, Iterable -from typing_extensions import Required, Annotated, TypeAlias, TypedDict +from typing_extensions import Literal, Required, Annotated, TypeAlias, TypedDict from .._utils import PropertyInfo -from .file_part_param import FilePartParam -from .text_part_param import TextPartParam -__all__ = ["SessionChatParams", "Part"] +__all__ = ["SessionChatParams", "Part", "PartUnionMember0", "PartUnionMember0Time", "PartUnionMember1"] class SessionChatParams(TypedDict, total=False): - message_id: Required[Annotated[str, PropertyInfo(alias="messageID")]] - - mode: Required[str] - model_id: Required[Annotated[str, PropertyInfo(alias="modelID")]] parts: Required[Iterable[Part]] provider_id: Required[Annotated[str, PropertyInfo(alias="providerID")]] + session_id: Required[Annotated[str, PropertyInfo(alias="sessionID")]] + + message_id: Annotated[str, PropertyInfo(alias="messageID")] + + mode: str + + +class PartUnionMember0Time(TypedDict, total=False): + start: Required[float] + + end: float + + +class PartUnionMember0(TypedDict, total=False): + text: Required[str] + + type: Required[Literal["text"]] + + id: str + + synthetic: bool + + time: PartUnionMember0Time + + +class PartUnionMember1(TypedDict, total=False): + mime: Required[str] + + type: Required[Literal["file"]] + + url: Required[str] + + id: str + + filename: str + -Part: TypeAlias = Union[FilePartParam, TextPartParam] +Part: TypeAlias = Union[PartUnionMember0, PartUnionMember1] diff --git a/src/opencode_ai/types/text_part_param.py b/src/opencode_ai/types/text_part_param.py deleted file mode 100644 index 3129256..0000000 --- a/src/opencode_ai/types/text_part_param.py +++ /dev/null @@ -1,31 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import Literal, Required, Annotated, TypedDict - -from .._utils import PropertyInfo - -__all__ = ["TextPartParam", "Time"] - - -class Time(TypedDict, total=False): - start: Required[float] - - end: float - - -class TextPartParam(TypedDict, total=False): - id: Required[str] - - message_id: Required[Annotated[str, PropertyInfo(alias="messageID")]] - - session_id: Required[Annotated[str, PropertyInfo(alias="sessionID")]] - - text: Required[str] - - type: Required[Literal["text"]] - - synthetic: bool - - time: Time diff --git a/tests/api_resources/test_session.py b/tests/api_resources/test_session.py index 379cde4..670c501 100644 --- a/tests/api_resources/test_session.py +++ b/tests/api_resources/test_session.py @@ -171,20 +171,40 @@ def test_path_params_abort(self, client: Opencode) -> None: def test_method_chat(self, client: Opencode) -> None: session = client.session.chat( id="id", - message_id="messageID", - mode="mode", model_id="modelID", parts=[ { + "text": "text", + "type": "text", + } + ], + provider_id="providerID", + session_id="ses", + ) + assert_matches_type(AssistantMessage, session, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_method_chat_with_all_params(self, client: Opencode) -> None: + session = client.session.chat( + id="id", + model_id="modelID", + parts=[ + { + "text": "text", + "type": "text", "id": "id", - "message_id": "messageID", - "mime": "mime", - "session_id": "sessionID", - "type": "file", - "url": "url", + "synthetic": True, + "time": { + "start": 0, + "end": 0, + }, } ], provider_id="providerID", + session_id="ses", + message_id="msg", + mode="mode", ) assert_matches_type(AssistantMessage, session, path=["response"]) @@ -193,20 +213,15 @@ def test_method_chat(self, client: Opencode) -> None: def test_raw_response_chat(self, client: Opencode) -> None: response = client.session.with_raw_response.chat( id="id", - message_id="messageID", - mode="mode", model_id="modelID", parts=[ { - "id": "id", - "message_id": "messageID", - "mime": "mime", - "session_id": "sessionID", - "type": "file", - "url": "url", + "text": "text", + "type": "text", } ], provider_id="providerID", + session_id="ses", ) assert response.is_closed is True @@ -219,20 +234,15 @@ def test_raw_response_chat(self, client: Opencode) -> None: def test_streaming_response_chat(self, client: Opencode) -> None: with client.session.with_streaming_response.chat( id="id", - message_id="messageID", - mode="mode", model_id="modelID", parts=[ { - "id": "id", - "message_id": "messageID", - "mime": "mime", - "session_id": "sessionID", - "type": "file", - "url": "url", + "text": "text", + "type": "text", } ], provider_id="providerID", + session_id="ses", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -248,20 +258,15 @@ def test_path_params_chat(self, client: Opencode) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): client.session.with_raw_response.chat( id="", - message_id="messageID", - mode="mode", model_id="modelID", parts=[ { - "id": "id", - "message_id": "messageID", - "mime": "mime", - "session_id": "sessionID", - "type": "file", - "url": "url", + "text": "text", + "type": "text", } ], provider_id="providerID", + session_id="ses", ) @pytest.mark.skip() @@ -645,20 +650,40 @@ async def test_path_params_abort(self, async_client: AsyncOpencode) -> None: async def test_method_chat(self, async_client: AsyncOpencode) -> None: session = await async_client.session.chat( id="id", - message_id="messageID", - mode="mode", model_id="modelID", parts=[ { + "text": "text", + "type": "text", + } + ], + provider_id="providerID", + session_id="ses", + ) + assert_matches_type(AssistantMessage, session, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_method_chat_with_all_params(self, async_client: AsyncOpencode) -> None: + session = await async_client.session.chat( + id="id", + model_id="modelID", + parts=[ + { + "text": "text", + "type": "text", "id": "id", - "message_id": "messageID", - "mime": "mime", - "session_id": "sessionID", - "type": "file", - "url": "url", + "synthetic": True, + "time": { + "start": 0, + "end": 0, + }, } ], provider_id="providerID", + session_id="ses", + message_id="msg", + mode="mode", ) assert_matches_type(AssistantMessage, session, path=["response"]) @@ -667,20 +692,15 @@ async def test_method_chat(self, async_client: AsyncOpencode) -> None: async def test_raw_response_chat(self, async_client: AsyncOpencode) -> None: response = await async_client.session.with_raw_response.chat( id="id", - message_id="messageID", - mode="mode", model_id="modelID", parts=[ { - "id": "id", - "message_id": "messageID", - "mime": "mime", - "session_id": "sessionID", - "type": "file", - "url": "url", + "text": "text", + "type": "text", } ], provider_id="providerID", + session_id="ses", ) assert response.is_closed is True @@ -693,20 +713,15 @@ async def test_raw_response_chat(self, async_client: AsyncOpencode) -> None: async def test_streaming_response_chat(self, async_client: AsyncOpencode) -> None: async with async_client.session.with_streaming_response.chat( id="id", - message_id="messageID", - mode="mode", model_id="modelID", parts=[ { - "id": "id", - "message_id": "messageID", - "mime": "mime", - "session_id": "sessionID", - "type": "file", - "url": "url", + "text": "text", + "type": "text", } ], provider_id="providerID", + session_id="ses", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -722,20 +737,15 @@ async def test_path_params_chat(self, async_client: AsyncOpencode) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): await async_client.session.with_raw_response.chat( id="", - message_id="messageID", - mode="mode", model_id="modelID", parts=[ { - "id": "id", - "message_id": "messageID", - "mime": "mime", - "session_id": "sessionID", - "type": "file", - "url": "url", + "text": "text", + "type": "text", } ], provider_id="providerID", + session_id="ses", ) @pytest.mark.skip()