Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "0.1.0-alpha.24"
".": "0.1.0-alpha.25"
}
8 changes: 4 additions & 4 deletions .stats.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
configured_endpoints: 22
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/opencode%2Fopencode-5e3b108daef12bb43f728ee42261cd1322e8909e6225db58d6fe0617f3b5904c.yml
openapi_spec_hash: 1f9d2853ba0c1179e45426560e082ef4
config_hash: 6d56a7ca0d6ed899ecdb5c053a8278ae
configured_endpoints: 23
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/opencode%2Fopencode-69b99aaffe10dd5247638b6a34d6c0b3c1cf5300853d12c947151fd946e7fcdb.yml
openapi_spec_hash: e2c746cf689d71f04c6e9b1bd92e6356
config_hash: d779331eb3dabf2d99f2a20be154d1c9
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
# Changelog

## 0.1.0-alpha.25 (2025-07-21)

Full Changelog: [v0.1.0-alpha.24...v0.1.0-alpha.25](https://github.com/sst/opencode-sdk-python/compare/v0.1.0-alpha.24...v0.1.0-alpha.25)

### Features

* **api:** api update ([a85f832](https://github.com/sst/opencode-sdk-python/commit/a85f832a942075091b9ca3f7e6399ba00239f354))

## 0.1.0-alpha.24 (2025-07-21)

Full Changelog: [v0.1.0-alpha.23...v0.1.0-alpha.24](https://github.com/sst/opencode-sdk-python/compare/v0.1.0-alpha.23...v0.1.0-alpha.24)
Expand Down
12 changes: 12 additions & 0 deletions api.md
Original file line number Diff line number Diff line change
Expand Up @@ -134,3 +134,15 @@ Methods:
- <code title="post /session/{id}/share">client.session.<a href="./src/opencode_ai/resources/session.py">share</a>(id) -> <a href="./src/opencode_ai/types/session.py">Session</a></code>
- <code title="post /session/{id}/summarize">client.session.<a href="./src/opencode_ai/resources/session.py">summarize</a>(id, \*\*<a href="src/opencode_ai/types/session_summarize_params.py">params</a>) -> <a href="./src/opencode_ai/types/session_summarize_response.py">SessionSummarizeResponse</a></code>
- <code title="delete /session/{id}/share">client.session.<a href="./src/opencode_ai/resources/session.py">unshare</a>(id) -> <a href="./src/opencode_ai/types/session.py">Session</a></code>

# Tui

Types:

```python
from opencode_ai.types import TuiPromptResponse
```

Methods:

- <code title="post /tui/prompt">client.tui.<a href="./src/opencode_ai/resources/tui.py">prompt</a>() -> <a href="./src/opencode_ai/types/tui_prompt_response.py">TuiPromptResponse</a></code>
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "opencode-ai"
version = "0.1.0-alpha.24"
version = "0.1.0-alpha.25"
description = "The official Python library for the opencode API"
dynamic = ["readme"]
license = "Apache-2.0"
Expand Down
10 changes: 9 additions & 1 deletion src/opencode_ai/_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
)
from ._utils import is_given, get_async_library
from ._version import __version__
from .resources import app, file, find, event, config, session
from .resources import app, tui, file, find, event, config, session
from ._streaming import Stream as Stream, AsyncStream as AsyncStream
from ._exceptions import APIStatusError
from ._base_client import (
Expand Down Expand Up @@ -49,6 +49,7 @@ class Opencode(SyncAPIClient):
file: file.FileResource
config: config.ConfigResource
session: session.SessionResource
tui: tui.TuiResource
with_raw_response: OpencodeWithRawResponse
with_streaming_response: OpencodeWithStreamedResponse

Expand Down Expand Up @@ -101,6 +102,7 @@ def __init__(
self.file = file.FileResource(self)
self.config = config.ConfigResource(self)
self.session = session.SessionResource(self)
self.tui = tui.TuiResource(self)
self.with_raw_response = OpencodeWithRawResponse(self)
self.with_streaming_response = OpencodeWithStreamedResponse(self)

Expand Down Expand Up @@ -208,6 +210,7 @@ class AsyncOpencode(AsyncAPIClient):
file: file.AsyncFileResource
config: config.AsyncConfigResource
session: session.AsyncSessionResource
tui: tui.AsyncTuiResource
with_raw_response: AsyncOpencodeWithRawResponse
with_streaming_response: AsyncOpencodeWithStreamedResponse

Expand Down Expand Up @@ -260,6 +263,7 @@ def __init__(
self.file = file.AsyncFileResource(self)
self.config = config.AsyncConfigResource(self)
self.session = session.AsyncSessionResource(self)
self.tui = tui.AsyncTuiResource(self)
self.with_raw_response = AsyncOpencodeWithRawResponse(self)
self.with_streaming_response = AsyncOpencodeWithStreamedResponse(self)

Expand Down Expand Up @@ -368,6 +372,7 @@ def __init__(self, client: Opencode) -> None:
self.file = file.FileResourceWithRawResponse(client.file)
self.config = config.ConfigResourceWithRawResponse(client.config)
self.session = session.SessionResourceWithRawResponse(client.session)
self.tui = tui.TuiResourceWithRawResponse(client.tui)


class AsyncOpencodeWithRawResponse:
Expand All @@ -378,6 +383,7 @@ def __init__(self, client: AsyncOpencode) -> None:
self.file = file.AsyncFileResourceWithRawResponse(client.file)
self.config = config.AsyncConfigResourceWithRawResponse(client.config)
self.session = session.AsyncSessionResourceWithRawResponse(client.session)
self.tui = tui.AsyncTuiResourceWithRawResponse(client.tui)


class OpencodeWithStreamedResponse:
Expand All @@ -388,6 +394,7 @@ def __init__(self, client: Opencode) -> None:
self.file = file.FileResourceWithStreamingResponse(client.file)
self.config = config.ConfigResourceWithStreamingResponse(client.config)
self.session = session.SessionResourceWithStreamingResponse(client.session)
self.tui = tui.TuiResourceWithStreamingResponse(client.tui)


class AsyncOpencodeWithStreamedResponse:
Expand All @@ -398,6 +405,7 @@ def __init__(self, client: AsyncOpencode) -> None:
self.file = file.AsyncFileResourceWithStreamingResponse(client.file)
self.config = config.AsyncConfigResourceWithStreamingResponse(client.config)
self.session = session.AsyncSessionResourceWithStreamingResponse(client.session)
self.tui = tui.AsyncTuiResourceWithStreamingResponse(client.tui)


Client = Opencode
Expand Down
2 changes: 1 addition & 1 deletion src/opencode_ai/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

__title__ = "opencode_ai"
__version__ = "0.1.0-alpha.24" # x-release-please-version
__version__ = "0.1.0-alpha.25" # x-release-please-version
14 changes: 14 additions & 0 deletions src/opencode_ai/resources/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,14 @@
AppResourceWithStreamingResponse,
AsyncAppResourceWithStreamingResponse,
)
from .tui import (
TuiResource,
AsyncTuiResource,
TuiResourceWithRawResponse,
AsyncTuiResourceWithRawResponse,
TuiResourceWithStreamingResponse,
AsyncTuiResourceWithStreamingResponse,
)
from .file import (
FileResource,
AsyncFileResource,
Expand Down Expand Up @@ -86,4 +94,10 @@
"AsyncSessionResourceWithRawResponse",
"SessionResourceWithStreamingResponse",
"AsyncSessionResourceWithStreamingResponse",
"TuiResource",
"AsyncTuiResource",
"TuiResourceWithRawResponse",
"AsyncTuiResourceWithRawResponse",
"TuiResourceWithStreamingResponse",
"AsyncTuiResourceWithStreamingResponse",
]
135 changes: 135 additions & 0 deletions src/opencode_ai/resources/tui.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from __future__ import annotations

import httpx

from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
from .._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
from .._base_client import make_request_options
from ..types.tui_prompt_response import TuiPromptResponse

__all__ = ["TuiResource", "AsyncTuiResource"]


class TuiResource(SyncAPIResource):
@cached_property
def with_raw_response(self) -> TuiResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.

For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
"""
return TuiResourceWithRawResponse(self)

@cached_property
def with_streaming_response(self) -> TuiResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.

For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
"""
return TuiResourceWithStreamingResponse(self)

def prompt(
self,
*,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> TuiPromptResponse:
"""Send a prompt to the TUI"""
return self._post(
"/tui/prompt",
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=TuiPromptResponse,
)


class AsyncTuiResource(AsyncAPIResource):
@cached_property
def with_raw_response(self) -> AsyncTuiResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.

For more information, see https://www.github.com/sst/opencode-sdk-python#accessing-raw-response-data-eg-headers
"""
return AsyncTuiResourceWithRawResponse(self)

@cached_property
def with_streaming_response(self) -> AsyncTuiResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.

For more information, see https://www.github.com/sst/opencode-sdk-python#with_streaming_response
"""
return AsyncTuiResourceWithStreamingResponse(self)

async def prompt(
self,
*,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> TuiPromptResponse:
"""Send a prompt to the TUI"""
return await self._post(
"/tui/prompt",
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=TuiPromptResponse,
)


class TuiResourceWithRawResponse:
def __init__(self, tui: TuiResource) -> None:
self._tui = tui

self.prompt = to_raw_response_wrapper(
tui.prompt,
)


class AsyncTuiResourceWithRawResponse:
def __init__(self, tui: AsyncTuiResource) -> None:
self._tui = tui

self.prompt = async_to_raw_response_wrapper(
tui.prompt,
)


class TuiResourceWithStreamingResponse:
def __init__(self, tui: TuiResource) -> None:
self._tui = tui

self.prompt = to_streamed_response_wrapper(
tui.prompt,
)


class AsyncTuiResourceWithStreamingResponse:
def __init__(self, tui: AsyncTuiResource) -> None:
self._tui = tui

self.prompt = async_to_streamed_response_wrapper(
tui.prompt,
)
1 change: 1 addition & 0 deletions src/opencode_ai/types/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
from .session_chat_params import SessionChatParams as SessionChatParams
from .session_init_params import SessionInitParams as SessionInitParams
from .symbol_source_param import SymbolSourceParam as SymbolSourceParam
from .tui_prompt_response import TuiPromptResponse as TuiPromptResponse
from .file_status_response import FileStatusResponse as FileStatusResponse
from .tool_state_completed import ToolStateCompleted as ToolStateCompleted
from .file_part_input_param import FilePartInputParam as FilePartInputParam
Expand Down
7 changes: 7 additions & 0 deletions src/opencode_ai/types/tui_prompt_response.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from typing_extensions import TypeAlias

__all__ = ["TuiPromptResponse"]

TuiPromptResponse: TypeAlias = bool
80 changes: 80 additions & 0 deletions tests/api_resources/test_tui.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from __future__ import annotations

import os
from typing import Any, cast

import pytest

from opencode_ai import Opencode, AsyncOpencode
from tests.utils import assert_matches_type
from opencode_ai.types import TuiPromptResponse

base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")


class TestTui:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])

@pytest.mark.skip()
@parametrize
def test_method_prompt(self, client: Opencode) -> None:
tui = client.tui.prompt()
assert_matches_type(TuiPromptResponse, tui, path=["response"])

@pytest.mark.skip()
@parametrize
def test_raw_response_prompt(self, client: Opencode) -> None:
response = client.tui.with_raw_response.prompt()

assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
tui = response.parse()
assert_matches_type(TuiPromptResponse, tui, path=["response"])

@pytest.mark.skip()
@parametrize
def test_streaming_response_prompt(self, client: Opencode) -> None:
with client.tui.with_streaming_response.prompt() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"

tui = response.parse()
assert_matches_type(TuiPromptResponse, tui, path=["response"])

assert cast(Any, response.is_closed) is True


class TestAsyncTui:
parametrize = pytest.mark.parametrize(
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)

@pytest.mark.skip()
@parametrize
async def test_method_prompt(self, async_client: AsyncOpencode) -> None:
tui = await async_client.tui.prompt()
assert_matches_type(TuiPromptResponse, tui, path=["response"])

@pytest.mark.skip()
@parametrize
async def test_raw_response_prompt(self, async_client: AsyncOpencode) -> None:
response = await async_client.tui.with_raw_response.prompt()

assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
tui = await response.parse()
assert_matches_type(TuiPromptResponse, tui, path=["response"])

@pytest.mark.skip()
@parametrize
async def test_streaming_response_prompt(self, async_client: AsyncOpencode) -> None:
async with async_client.tui.with_streaming_response.prompt() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"

tui = await response.parse()
assert_matches_type(TuiPromptResponse, tui, path=["response"])

assert cast(Any, response.is_closed) is True