diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 380b6f9..3188ced 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.1.0-alpha.24" + ".": "0.1.0-alpha.25" } \ No newline at end of file diff --git a/.stats.yml b/.stats.yml index 32b1351..9110eeb 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ -configured_endpoints: 22 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/opencode%2Fopencode-5e3b108daef12bb43f728ee42261cd1322e8909e6225db58d6fe0617f3b5904c.yml -openapi_spec_hash: 1f9d2853ba0c1179e45426560e082ef4 -config_hash: 6d56a7ca0d6ed899ecdb5c053a8278ae +configured_endpoints: 23 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/opencode%2Fopencode-69b99aaffe10dd5247638b6a34d6c0b3c1cf5300853d12c947151fd946e7fcdb.yml +openapi_spec_hash: e2c746cf689d71f04c6e9b1bd92e6356 +config_hash: d779331eb3dabf2d99f2a20be154d1c9 diff --git a/CHANGELOG.md b/CHANGELOG.md index 42c2638..84ac353 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## 0.1.0-alpha.25 (2025-07-21) + +Full Changelog: [v0.1.0-alpha.24...v0.1.0-alpha.25](https://github.com/sst/opencode-sdk-python/compare/v0.1.0-alpha.24...v0.1.0-alpha.25) + +### Features + +* **api:** api update ([a85f832](https://github.com/sst/opencode-sdk-python/commit/a85f832a942075091b9ca3f7e6399ba00239f354)) + ## 0.1.0-alpha.24 (2025-07-21) Full Changelog: [v0.1.0-alpha.23...v0.1.0-alpha.24](https://github.com/sst/opencode-sdk-python/compare/v0.1.0-alpha.23...v0.1.0-alpha.24) diff --git a/api.md b/api.md index e8f80d9..aa2f3b1 100644 --- a/api.md +++ b/api.md @@ -134,3 +134,15 @@ Methods: - client.session.share(id) -> Session - client.session.summarize(id, \*\*params) -> SessionSummarizeResponse - client.session.unshare(id) -> Session + +# Tui + +Types: + +```python +from opencode_ai.types import TuiPromptResponse +``` + +Methods: + +- client.tui.prompt() -> TuiPromptResponse diff --git a/pyproject.toml b/pyproject.toml index cc7a6a2..74c0396 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "opencode-ai" -version = "0.1.0-alpha.24" +version = "0.1.0-alpha.25" description = "The official Python library for the opencode API" dynamic = ["readme"] license = "Apache-2.0" diff --git a/src/opencode_ai/_client.py b/src/opencode_ai/_client.py index b162cc3..cabe5b7 100644 --- a/src/opencode_ai/_client.py +++ b/src/opencode_ai/_client.py @@ -21,7 +21,7 @@ ) from ._utils import is_given, get_async_library from ._version import __version__ -from .resources import app, file, find, event, config, session +from .resources import app, tui, file, find, event, config, session from ._streaming import Stream as Stream, AsyncStream as AsyncStream from ._exceptions import APIStatusError from ._base_client import ( @@ -49,6 +49,7 @@ class Opencode(SyncAPIClient): file: file.FileResource config: config.ConfigResource session: session.SessionResource + tui: tui.TuiResource with_raw_response: OpencodeWithRawResponse with_streaming_response: OpencodeWithStreamedResponse @@ -101,6 +102,7 @@ def __init__( self.file = file.FileResource(self) self.config = config.ConfigResource(self) self.session = session.SessionResource(self) + self.tui = tui.TuiResource(self) self.with_raw_response = OpencodeWithRawResponse(self) self.with_streaming_response = OpencodeWithStreamedResponse(self) @@ -208,6 +210,7 @@ class AsyncOpencode(AsyncAPIClient): file: file.AsyncFileResource config: config.AsyncConfigResource session: session.AsyncSessionResource + tui: tui.AsyncTuiResource with_raw_response: AsyncOpencodeWithRawResponse with_streaming_response: AsyncOpencodeWithStreamedResponse @@ -260,6 +263,7 @@ def __init__( self.file = file.AsyncFileResource(self) self.config = config.AsyncConfigResource(self) self.session = session.AsyncSessionResource(self) + self.tui = tui.AsyncTuiResource(self) self.with_raw_response = AsyncOpencodeWithRawResponse(self) self.with_streaming_response = AsyncOpencodeWithStreamedResponse(self) @@ -368,6 +372,7 @@ def __init__(self, client: Opencode) -> None: self.file = file.FileResourceWithRawResponse(client.file) self.config = config.ConfigResourceWithRawResponse(client.config) self.session = session.SessionResourceWithRawResponse(client.session) + self.tui = tui.TuiResourceWithRawResponse(client.tui) class AsyncOpencodeWithRawResponse: @@ -378,6 +383,7 @@ def __init__(self, client: AsyncOpencode) -> None: self.file = file.AsyncFileResourceWithRawResponse(client.file) self.config = config.AsyncConfigResourceWithRawResponse(client.config) self.session = session.AsyncSessionResourceWithRawResponse(client.session) + self.tui = tui.AsyncTuiResourceWithRawResponse(client.tui) class OpencodeWithStreamedResponse: @@ -388,6 +394,7 @@ def __init__(self, client: Opencode) -> None: self.file = file.FileResourceWithStreamingResponse(client.file) self.config = config.ConfigResourceWithStreamingResponse(client.config) self.session = session.SessionResourceWithStreamingResponse(client.session) + self.tui = tui.TuiResourceWithStreamingResponse(client.tui) class AsyncOpencodeWithStreamedResponse: @@ -398,6 +405,7 @@ def __init__(self, client: AsyncOpencode) -> None: self.file = file.AsyncFileResourceWithStreamingResponse(client.file) self.config = config.AsyncConfigResourceWithStreamingResponse(client.config) self.session = session.AsyncSessionResourceWithStreamingResponse(client.session) + self.tui = tui.AsyncTuiResourceWithStreamingResponse(client.tui) Client = Opencode diff --git a/src/opencode_ai/_version.py b/src/opencode_ai/_version.py index 87c4570..20e5605 100644 --- a/src/opencode_ai/_version.py +++ b/src/opencode_ai/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "opencode_ai" -__version__ = "0.1.0-alpha.24" # x-release-please-version +__version__ = "0.1.0-alpha.25" # x-release-please-version diff --git a/src/opencode_ai/resources/__init__.py b/src/opencode_ai/resources/__init__.py index 7b0815f..ee6a647 100644 --- a/src/opencode_ai/resources/__init__.py +++ b/src/opencode_ai/resources/__init__.py @@ -8,6 +8,14 @@ AppResourceWithStreamingResponse, AsyncAppResourceWithStreamingResponse, ) +from .tui import ( + TuiResource, + AsyncTuiResource, + TuiResourceWithRawResponse, + AsyncTuiResourceWithRawResponse, + TuiResourceWithStreamingResponse, + AsyncTuiResourceWithStreamingResponse, +) from .file import ( FileResource, AsyncFileResource, @@ -86,4 +94,10 @@ "AsyncSessionResourceWithRawResponse", "SessionResourceWithStreamingResponse", "AsyncSessionResourceWithStreamingResponse", + "TuiResource", + "AsyncTuiResource", + "TuiResourceWithRawResponse", + "AsyncTuiResourceWithRawResponse", + "TuiResourceWithStreamingResponse", + "AsyncTuiResourceWithStreamingResponse", ] diff --git a/src/opencode_ai/resources/tui.py b/src/opencode_ai/resources/tui.py new file mode 100644 index 0000000..5a8e2e9 --- /dev/null +++ b/src/opencode_ai/resources/tui.py @@ -0,0 +1,135 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import httpx + +from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from .._base_client import make_request_options +from ..types.tui_prompt_response import TuiPromptResponse + +__all__ = ["TuiResource", "AsyncTuiResource"] + + +class TuiResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> TuiResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers + """ + return TuiResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> TuiResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response + """ + return TuiResourceWithStreamingResponse(self) + + def prompt( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TuiPromptResponse: + """Send a prompt to the TUI""" + return self._post( + "/tui/prompt", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TuiPromptResponse, + ) + + +class AsyncTuiResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncTuiResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers + """ + return AsyncTuiResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncTuiResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response + """ + return AsyncTuiResourceWithStreamingResponse(self) + + async def prompt( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TuiPromptResponse: + """Send a prompt to the TUI""" + return await self._post( + "/tui/prompt", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TuiPromptResponse, + ) + + +class TuiResourceWithRawResponse: + def __init__(self, tui: TuiResource) -> None: + self._tui = tui + + self.prompt = to_raw_response_wrapper( + tui.prompt, + ) + + +class AsyncTuiResourceWithRawResponse: + def __init__(self, tui: AsyncTuiResource) -> None: + self._tui = tui + + self.prompt = async_to_raw_response_wrapper( + tui.prompt, + ) + + +class TuiResourceWithStreamingResponse: + def __init__(self, tui: TuiResource) -> None: + self._tui = tui + + self.prompt = to_streamed_response_wrapper( + tui.prompt, + ) + + +class AsyncTuiResourceWithStreamingResponse: + def __init__(self, tui: AsyncTuiResource) -> None: + self._tui = tui + + self.prompt = async_to_streamed_response_wrapper( + tui.prompt, + ) diff --git a/src/opencode_ai/types/__init__.py b/src/opencode_ai/types/__init__.py index 5151b92..db18d1b 100644 --- a/src/opencode_ai/types/__init__.py +++ b/src/opencode_ai/types/__init__.py @@ -52,6 +52,7 @@ from .session_chat_params import SessionChatParams as SessionChatParams from .session_init_params import SessionInitParams as SessionInitParams from .symbol_source_param import SymbolSourceParam as SymbolSourceParam +from .tui_prompt_response import TuiPromptResponse as TuiPromptResponse from .file_status_response import FileStatusResponse as FileStatusResponse from .tool_state_completed import ToolStateCompleted as ToolStateCompleted from .file_part_input_param import FilePartInputParam as FilePartInputParam diff --git a/src/opencode_ai/types/tui_prompt_response.py b/src/opencode_ai/types/tui_prompt_response.py new file mode 100644 index 0000000..95dde9e --- /dev/null +++ b/src/opencode_ai/types/tui_prompt_response.py @@ -0,0 +1,7 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import TypeAlias + +__all__ = ["TuiPromptResponse"] + +TuiPromptResponse: TypeAlias = bool diff --git a/tests/api_resources/test_tui.py b/tests/api_resources/test_tui.py new file mode 100644 index 0000000..92fde65 --- /dev/null +++ b/tests/api_resources/test_tui.py @@ -0,0 +1,80 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from opencode_ai import Opencode, AsyncOpencode +from tests.utils import assert_matches_type +from opencode_ai.types import TuiPromptResponse + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestTui: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip() + @parametrize + def test_method_prompt(self, client: Opencode) -> None: + tui = client.tui.prompt() + assert_matches_type(TuiPromptResponse, tui, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_prompt(self, client: Opencode) -> None: + response = client.tui.with_raw_response.prompt() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + tui = response.parse() + assert_matches_type(TuiPromptResponse, tui, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_prompt(self, client: Opencode) -> None: + with client.tui.with_streaming_response.prompt() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + tui = response.parse() + assert_matches_type(TuiPromptResponse, tui, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncTui: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip() + @parametrize + async def test_method_prompt(self, async_client: AsyncOpencode) -> None: + tui = await async_client.tui.prompt() + assert_matches_type(TuiPromptResponse, tui, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_prompt(self, async_client: AsyncOpencode) -> None: + response = await async_client.tui.with_raw_response.prompt() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + tui = await response.parse() + assert_matches_type(TuiPromptResponse, tui, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_prompt(self, async_client: AsyncOpencode) -> None: + async with async_client.tui.with_streaming_response.prompt() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + tui = await response.parse() + assert_matches_type(TuiPromptResponse, tui, path=["response"]) + + assert cast(Any, response.is_closed) is True