diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 7657c56..f471069 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.1.0-alpha.27" + ".": "0.1.0-alpha.28" } \ No newline at end of file diff --git a/.stats.yml b/.stats.yml index 56337c0..f188454 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 24 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/opencode%2Fopencode-d10809ab68e48a338167e5504d69db2a0a80739adf6ecd3f065644a4139bc374.yml -openapi_spec_hash: 4875565ef8df3446dbab11f450e04c51 -config_hash: 0032a76356d31c6b4c218b39fff635bb +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/opencode%2Fopencode-9574184bd9e916aa69eae8e26e0679556038d3fcfb4009a445c97c6cc3e4f3ee.yml +openapi_spec_hash: 93ba1215ab0dc853a1691b049cc47d75 +config_hash: 6d92d798d44906c9e43c6dee06615360 diff --git a/CHANGELOG.md b/CHANGELOG.md index de31979..d516cad 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## 0.1.0-alpha.28 (2025-07-22) + +Full Changelog: [v0.1.0-alpha.27...v0.1.0-alpha.28](https://github.com/sst/opencode-sdk-python/compare/v0.1.0-alpha.27...v0.1.0-alpha.28) + +### Features + +* **api:** api update ([e8022cd](https://github.com/sst/opencode-sdk-python/commit/e8022cd6d313c1c710dc2721f7e962285d48b02e)) + ## 0.1.0-alpha.27 (2025-07-22) Full Changelog: [v0.1.0-alpha.26...v0.1.0-alpha.27](https://github.com/sst/opencode-sdk-python/compare/v0.1.0-alpha.26...v0.1.0-alpha.27) diff --git a/api.md b/api.md index efcd86b..67407e9 100644 --- a/api.md +++ b/api.md @@ -140,10 +140,10 @@ Methods: Types: ```python -from opencode_ai.types import TuiOpenHelpResponse, TuiPromptResponse +from opencode_ai.types import TuiAppendPromptResponse, TuiOpenHelpResponse ``` Methods: +- client.tui.append_prompt(\*\*params) -> TuiAppendPromptResponse - client.tui.open_help() -> TuiOpenHelpResponse -- client.tui.prompt(\*\*params) -> TuiPromptResponse diff --git a/pyproject.toml b/pyproject.toml index 5ab2607..9d9158e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "opencode-ai" -version = "0.1.0-alpha.27" +version = "0.1.0-alpha.28" description = "The official Python library for the opencode API" dynamic = ["readme"] license = "Apache-2.0" diff --git a/src/opencode_ai/_version.py b/src/opencode_ai/_version.py index f753b1d..0c9d183 100644 --- a/src/opencode_ai/_version.py +++ b/src/opencode_ai/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "opencode_ai" -__version__ = "0.1.0-alpha.27" # x-release-please-version +__version__ = "0.1.0-alpha.28" # x-release-please-version diff --git a/src/opencode_ai/resources/tui.py b/src/opencode_ai/resources/tui.py index 9a37ba8..7194e7a 100644 --- a/src/opencode_ai/resources/tui.py +++ b/src/opencode_ai/resources/tui.py @@ -2,11 +2,9 @@ from __future__ import annotations -from typing import Iterable - import httpx -from ..types import tui_prompt_params +from ..types import tui_append_prompt_params from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven from .._utils import maybe_transform, async_maybe_transform from .._compat import cached_property @@ -18,9 +16,8 @@ async_to_streamed_response_wrapper, ) from .._base_client import make_request_options -from ..types.part_param import PartParam -from ..types.tui_prompt_response import TuiPromptResponse from ..types.tui_open_help_response import TuiOpenHelpResponse +from ..types.tui_append_prompt_response import TuiAppendPromptResponse __all__ = ["TuiResource", "AsyncTuiResource"] @@ -45,62 +42,55 @@ def with_streaming_response(self) -> TuiResourceWithStreamingResponse: """ return TuiResourceWithStreamingResponse(self) - def open_help( + def append_prompt( self, *, + text: str, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> TuiOpenHelpResponse: - """Open the help dialog""" + ) -> TuiAppendPromptResponse: + """ + Append prompt to the TUI + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ return self._post( - "/tui/open-help", + "/tui/append-prompt", + body=maybe_transform({"text": text}, tui_append_prompt_params.TuiAppendPromptParams), options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), - cast_to=TuiOpenHelpResponse, + cast_to=TuiAppendPromptResponse, ) - def prompt( + def open_help( self, *, - parts: Iterable[PartParam], - text: str, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> TuiPromptResponse: - """ - Send a prompt to the TUI - - Args: - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ + ) -> TuiOpenHelpResponse: + """Open the help dialog""" return self._post( - "/tui/prompt", - body=maybe_transform( - { - "parts": parts, - "text": text, - }, - tui_prompt_params.TuiPromptParams, - ), + "/tui/open-help", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), - cast_to=TuiPromptResponse, + cast_to=TuiOpenHelpResponse, ) @@ -124,62 +114,55 @@ def with_streaming_response(self) -> AsyncTuiResourceWithStreamingResponse: """ return AsyncTuiResourceWithStreamingResponse(self) - async def open_help( + async def append_prompt( self, *, + text: str, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> TuiOpenHelpResponse: - """Open the help dialog""" + ) -> TuiAppendPromptResponse: + """ + Append prompt to the TUI + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ return await self._post( - "/tui/open-help", + "/tui/append-prompt", + body=await async_maybe_transform({"text": text}, tui_append_prompt_params.TuiAppendPromptParams), options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), - cast_to=TuiOpenHelpResponse, + cast_to=TuiAppendPromptResponse, ) - async def prompt( + async def open_help( self, *, - parts: Iterable[PartParam], - text: str, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> TuiPromptResponse: - """ - Send a prompt to the TUI - - Args: - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ + ) -> TuiOpenHelpResponse: + """Open the help dialog""" return await self._post( - "/tui/prompt", - body=await async_maybe_transform( - { - "parts": parts, - "text": text, - }, - tui_prompt_params.TuiPromptParams, - ), + "/tui/open-help", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), - cast_to=TuiPromptResponse, + cast_to=TuiOpenHelpResponse, ) @@ -187,45 +170,45 @@ class TuiResourceWithRawResponse: def __init__(self, tui: TuiResource) -> None: self._tui = tui + self.append_prompt = to_raw_response_wrapper( + tui.append_prompt, + ) self.open_help = to_raw_response_wrapper( tui.open_help, ) - self.prompt = to_raw_response_wrapper( - tui.prompt, - ) class AsyncTuiResourceWithRawResponse: def __init__(self, tui: AsyncTuiResource) -> None: self._tui = tui + self.append_prompt = async_to_raw_response_wrapper( + tui.append_prompt, + ) self.open_help = async_to_raw_response_wrapper( tui.open_help, ) - self.prompt = async_to_raw_response_wrapper( - tui.prompt, - ) class TuiResourceWithStreamingResponse: def __init__(self, tui: TuiResource) -> None: self._tui = tui + self.append_prompt = to_streamed_response_wrapper( + tui.append_prompt, + ) self.open_help = to_streamed_response_wrapper( tui.open_help, ) - self.prompt = to_streamed_response_wrapper( - tui.prompt, - ) class AsyncTuiResourceWithStreamingResponse: def __init__(self, tui: AsyncTuiResource) -> None: self._tui = tui + self.append_prompt = async_to_streamed_response_wrapper( + tui.append_prompt, + ) self.open_help = async_to_streamed_response_wrapper( tui.open_help, ) - self.prompt = async_to_streamed_response_wrapper( - tui.prompt, - ) diff --git a/src/opencode_ai/types/__init__.py b/src/opencode_ai/types/__init__.py index 4a81598..0da9c61 100644 --- a/src/opencode_ai/types/__init__.py +++ b/src/opencode_ai/types/__init__.py @@ -21,18 +21,14 @@ from .file_part import FilePart as FilePart from .text_part import TextPart as TextPart from .tool_part import ToolPart as ToolPart -from .part_param import PartParam as PartParam from .file_source import FileSource as FileSource from .mode_config import ModeConfig as ModeConfig from .user_message import UserMessage as UserMessage from .snapshot_part import SnapshotPart as SnapshotPart from .symbol_source import SymbolSource as SymbolSource from .app_log_params import AppLogParams as AppLogParams -from .file_part_param import FilePartParam as FilePartParam from .keybinds_config import KeybindsConfig as KeybindsConfig from .step_start_part import StepStartPart as StepStartPart -from .text_part_param import TextPartParam as TextPartParam -from .tool_part_param import ToolPartParam as ToolPartParam from .app_log_response import AppLogResponse as AppLogResponse from .file_part_source import FilePartSource as FilePartSource from .file_read_params import FileReadParams as FileReadParams @@ -45,7 +41,6 @@ from .file_source_param import FileSourceParam as FileSourceParam from .find_files_params import FindFilesParams as FindFilesParams from .mcp_remote_config import McpRemoteConfig as McpRemoteConfig -from .tui_prompt_params import TuiPromptParams as TuiPromptParams from .app_modes_response import AppModesResponse as AppModesResponse from .file_read_response import FileReadResponse as FileReadResponse from .find_text_response import FindTextResponse as FindTextResponse @@ -56,9 +51,7 @@ from .find_symbols_params import FindSymbolsParams as FindSymbolsParams from .session_chat_params import SessionChatParams as SessionChatParams from .session_init_params import SessionInitParams as SessionInitParams -from .snapshot_part_param import SnapshotPartParam as SnapshotPartParam from .symbol_source_param import SymbolSourceParam as SymbolSourceParam -from .tui_prompt_response import TuiPromptResponse as TuiPromptResponse from .file_status_response import FileStatusResponse as FileStatusResponse from .tool_state_completed import ToolStateCompleted as ToolStateCompleted from .file_part_input_param import FilePartInputParam as FilePartInputParam @@ -66,19 +59,15 @@ from .find_symbols_response import FindSymbolsResponse as FindSymbolsResponse from .session_init_response import SessionInitResponse as SessionInitResponse from .session_list_response import SessionListResponse as SessionListResponse -from .step_start_part_param import StepStartPartParam as StepStartPartParam from .text_part_input_param import TextPartInputParam as TextPartInputParam from .app_providers_response import AppProvidersResponse as AppProvidersResponse from .file_part_source_param import FilePartSourceParam as FilePartSourceParam from .session_abort_response import SessionAbortResponse as SessionAbortResponse -from .step_finish_part_param import StepFinishPartParam as StepFinishPartParam -from .tool_state_error_param import ToolStateErrorParam as ToolStateErrorParam from .tui_open_help_response import TuiOpenHelpResponse as TuiOpenHelpResponse from .session_delete_response import SessionDeleteResponse as SessionDeleteResponse from .session_summarize_params import SessionSummarizeParams as SessionSummarizeParams -from .tool_state_pending_param import ToolStatePendingParam as ToolStatePendingParam -from .tool_state_running_param import ToolStateRunningParam as ToolStateRunningParam +from .tui_append_prompt_params import TuiAppendPromptParams as TuiAppendPromptParams from .session_messages_response import SessionMessagesResponse as SessionMessagesResponse from .session_summarize_response import SessionSummarizeResponse as SessionSummarizeResponse -from .tool_state_completed_param import ToolStateCompletedParam as ToolStateCompletedParam +from .tui_append_prompt_response import TuiAppendPromptResponse as TuiAppendPromptResponse from .file_part_source_text_param import FilePartSourceTextParam as FilePartSourceTextParam diff --git a/src/opencode_ai/types/event_list_response.py b/src/opencode_ai/types/event_list_response.py index fd8832b..c45c022 100644 --- a/src/opencode_ai/types/event_list_response.py +++ b/src/opencode_ai/types/event_list_response.py @@ -45,6 +45,8 @@ "EventSessionErrorPropertiesErrorMessageOutputLengthError", "EventFileWatcherUpdated", "EventFileWatcherUpdatedProperties", + "EventIdeInstalled", + "EventIdeInstalledProperties", ] @@ -214,6 +216,16 @@ class EventFileWatcherUpdated(BaseModel): type: Literal["file.watcher.updated"] +class EventIdeInstalledProperties(BaseModel): + ide: str + + +class EventIdeInstalled(BaseModel): + properties: EventIdeInstalledProperties + + type: Literal["ide.installed"] + + EventListResponse: TypeAlias = Annotated[ Union[ EventLspClientDiagnostics, @@ -229,6 +241,7 @@ class EventFileWatcherUpdated(BaseModel): EventSessionIdle, EventSessionError, EventFileWatcherUpdated, + EventIdeInstalled, ], PropertyInfo(discriminator="type"), ] diff --git a/src/opencode_ai/types/file_part_param.py b/src/opencode_ai/types/file_part_param.py deleted file mode 100644 index 9de2723..0000000 --- a/src/opencode_ai/types/file_part_param.py +++ /dev/null @@ -1,28 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import Literal, Required, Annotated, TypedDict - -from .._utils import PropertyInfo -from .file_part_source_param import FilePartSourceParam - -__all__ = ["FilePartParam"] - - -class FilePartParam(TypedDict, total=False): - id: Required[str] - - message_id: Required[Annotated[str, PropertyInfo(alias="messageID")]] - - mime: Required[str] - - session_id: Required[Annotated[str, PropertyInfo(alias="sessionID")]] - - type: Required[Literal["file"]] - - url: Required[str] - - filename: str - - source: FilePartSourceParam diff --git a/src/opencode_ai/types/part_param.py b/src/opencode_ai/types/part_param.py deleted file mode 100644 index 8160b98..0000000 --- a/src/opencode_ai/types/part_param.py +++ /dev/null @@ -1,19 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Union -from typing_extensions import TypeAlias - -from .file_part_param import FilePartParam -from .text_part_param import TextPartParam -from .tool_part_param import ToolPartParam -from .snapshot_part_param import SnapshotPartParam -from .step_start_part_param import StepStartPartParam -from .step_finish_part_param import StepFinishPartParam - -__all__ = ["PartParam"] - -PartParam: TypeAlias = Union[ - TextPartParam, FilePartParam, ToolPartParam, StepStartPartParam, StepFinishPartParam, SnapshotPartParam -] diff --git a/src/opencode_ai/types/snapshot_part_param.py b/src/opencode_ai/types/snapshot_part_param.py deleted file mode 100644 index 847ba82..0000000 --- a/src/opencode_ai/types/snapshot_part_param.py +++ /dev/null @@ -1,21 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import Literal, Required, Annotated, TypedDict - -from .._utils import PropertyInfo - -__all__ = ["SnapshotPartParam"] - - -class SnapshotPartParam(TypedDict, total=False): - id: Required[str] - - message_id: Required[Annotated[str, PropertyInfo(alias="messageID")]] - - session_id: Required[Annotated[str, PropertyInfo(alias="sessionID")]] - - snapshot: Required[str] - - type: Required[Literal["snapshot"]] diff --git a/src/opencode_ai/types/step_finish_part_param.py b/src/opencode_ai/types/step_finish_part_param.py deleted file mode 100644 index 4dabb28..0000000 --- a/src/opencode_ai/types/step_finish_part_param.py +++ /dev/null @@ -1,39 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import Literal, Required, Annotated, TypedDict - -from .._utils import PropertyInfo - -__all__ = ["StepFinishPartParam", "Tokens", "TokensCache"] - - -class TokensCache(TypedDict, total=False): - read: Required[float] - - write: Required[float] - - -class Tokens(TypedDict, total=False): - cache: Required[TokensCache] - - input: Required[float] - - output: Required[float] - - reasoning: Required[float] - - -class StepFinishPartParam(TypedDict, total=False): - id: Required[str] - - cost: Required[float] - - message_id: Required[Annotated[str, PropertyInfo(alias="messageID")]] - - session_id: Required[Annotated[str, PropertyInfo(alias="sessionID")]] - - tokens: Required[Tokens] - - type: Required[Literal["step-finish"]] diff --git a/src/opencode_ai/types/step_start_part_param.py b/src/opencode_ai/types/step_start_part_param.py deleted file mode 100644 index a7d5655..0000000 --- a/src/opencode_ai/types/step_start_part_param.py +++ /dev/null @@ -1,19 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import Literal, Required, Annotated, TypedDict - -from .._utils import PropertyInfo - -__all__ = ["StepStartPartParam"] - - -class StepStartPartParam(TypedDict, total=False): - id: Required[str] - - message_id: Required[Annotated[str, PropertyInfo(alias="messageID")]] - - session_id: Required[Annotated[str, PropertyInfo(alias="sessionID")]] - - type: Required[Literal["step-start"]] diff --git a/src/opencode_ai/types/text_part_param.py b/src/opencode_ai/types/text_part_param.py deleted file mode 100644 index 3129256..0000000 --- a/src/opencode_ai/types/text_part_param.py +++ /dev/null @@ -1,31 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import Literal, Required, Annotated, TypedDict - -from .._utils import PropertyInfo - -__all__ = ["TextPartParam", "Time"] - - -class Time(TypedDict, total=False): - start: Required[float] - - end: float - - -class TextPartParam(TypedDict, total=False): - id: Required[str] - - message_id: Required[Annotated[str, PropertyInfo(alias="messageID")]] - - session_id: Required[Annotated[str, PropertyInfo(alias="sessionID")]] - - text: Required[str] - - type: Required[Literal["text"]] - - synthetic: bool - - time: Time diff --git a/src/opencode_ai/types/tool_part_param.py b/src/opencode_ai/types/tool_part_param.py deleted file mode 100644 index 559c770..0000000 --- a/src/opencode_ai/types/tool_part_param.py +++ /dev/null @@ -1,32 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Union -from typing_extensions import Literal, Required, Annotated, TypeAlias, TypedDict - -from .._utils import PropertyInfo -from .tool_state_error_param import ToolStateErrorParam -from .tool_state_pending_param import ToolStatePendingParam -from .tool_state_running_param import ToolStateRunningParam -from .tool_state_completed_param import ToolStateCompletedParam - -__all__ = ["ToolPartParam", "State"] - -State: TypeAlias = Union[ToolStatePendingParam, ToolStateRunningParam, ToolStateCompletedParam, ToolStateErrorParam] - - -class ToolPartParam(TypedDict, total=False): - id: Required[str] - - call_id: Required[Annotated[str, PropertyInfo(alias="callID")]] - - message_id: Required[Annotated[str, PropertyInfo(alias="messageID")]] - - session_id: Required[Annotated[str, PropertyInfo(alias="sessionID")]] - - state: Required[State] - - tool: Required[str] - - type: Required[Literal["tool"]] diff --git a/src/opencode_ai/types/tool_state_completed_param.py b/src/opencode_ai/types/tool_state_completed_param.py deleted file mode 100644 index cea3758..0000000 --- a/src/opencode_ai/types/tool_state_completed_param.py +++ /dev/null @@ -1,28 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Dict -from typing_extensions import Literal, Required, TypedDict - -__all__ = ["ToolStateCompletedParam", "Time"] - - -class Time(TypedDict, total=False): - end: Required[float] - - start: Required[float] - - -class ToolStateCompletedParam(TypedDict, total=False): - input: Required[Dict[str, object]] - - metadata: Required[Dict[str, object]] - - output: Required[str] - - status: Required[Literal["completed"]] - - time: Required[Time] - - title: Required[str] diff --git a/src/opencode_ai/types/tool_state_error_param.py b/src/opencode_ai/types/tool_state_error_param.py deleted file mode 100644 index 7622868..0000000 --- a/src/opencode_ai/types/tool_state_error_param.py +++ /dev/null @@ -1,24 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Dict -from typing_extensions import Literal, Required, TypedDict - -__all__ = ["ToolStateErrorParam", "Time"] - - -class Time(TypedDict, total=False): - end: Required[float] - - start: Required[float] - - -class ToolStateErrorParam(TypedDict, total=False): - error: Required[str] - - input: Required[Dict[str, object]] - - status: Required[Literal["error"]] - - time: Required[Time] diff --git a/src/opencode_ai/types/tool_state_pending_param.py b/src/opencode_ai/types/tool_state_pending_param.py deleted file mode 100644 index d375cd5..0000000 --- a/src/opencode_ai/types/tool_state_pending_param.py +++ /dev/null @@ -1,11 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import Literal, Required, TypedDict - -__all__ = ["ToolStatePendingParam"] - - -class ToolStatePendingParam(TypedDict, total=False): - status: Required[Literal["pending"]] diff --git a/src/opencode_ai/types/tool_state_running_param.py b/src/opencode_ai/types/tool_state_running_param.py deleted file mode 100644 index 1814d43..0000000 --- a/src/opencode_ai/types/tool_state_running_param.py +++ /dev/null @@ -1,24 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Dict -from typing_extensions import Literal, Required, TypedDict - -__all__ = ["ToolStateRunningParam", "Time"] - - -class Time(TypedDict, total=False): - start: Required[float] - - -class ToolStateRunningParam(TypedDict, total=False): - status: Required[Literal["running"]] - - time: Required[Time] - - input: object - - metadata: Dict[str, object] - - title: str diff --git a/src/opencode_ai/types/tui_prompt_params.py b/src/opencode_ai/types/tui_append_prompt_params.py similarity index 52% rename from src/opencode_ai/types/tui_prompt_params.py rename to src/opencode_ai/types/tui_append_prompt_params.py index be85887..431f731 100644 --- a/src/opencode_ai/types/tui_prompt_params.py +++ b/src/opencode_ai/types/tui_append_prompt_params.py @@ -2,15 +2,10 @@ from __future__ import annotations -from typing import Iterable from typing_extensions import Required, TypedDict -from .part_param import PartParam +__all__ = ["TuiAppendPromptParams"] -__all__ = ["TuiPromptParams"] - - -class TuiPromptParams(TypedDict, total=False): - parts: Required[Iterable[PartParam]] +class TuiAppendPromptParams(TypedDict, total=False): text: Required[str] diff --git a/src/opencode_ai/types/tui_prompt_response.py b/src/opencode_ai/types/tui_append_prompt_response.py similarity index 61% rename from src/opencode_ai/types/tui_prompt_response.py rename to src/opencode_ai/types/tui_append_prompt_response.py index 95dde9e..85b6813 100644 --- a/src/opencode_ai/types/tui_prompt_response.py +++ b/src/opencode_ai/types/tui_append_prompt_response.py @@ -2,6 +2,6 @@ from typing_extensions import TypeAlias -__all__ = ["TuiPromptResponse"] +__all__ = ["TuiAppendPromptResponse"] -TuiPromptResponse: TypeAlias = bool +TuiAppendPromptResponse: TypeAlias = bool diff --git a/tests/api_resources/test_tui.py b/tests/api_resources/test_tui.py index 6478963..50f907a 100644 --- a/tests/api_resources/test_tui.py +++ b/tests/api_resources/test_tui.py @@ -9,7 +9,7 @@ from opencode_ai import Opencode, AsyncOpencode from tests.utils import assert_matches_type -from opencode_ai.types import TuiPromptResponse, TuiOpenHelpResponse +from opencode_ai.types import TuiOpenHelpResponse, TuiAppendPromptResponse base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -19,90 +19,63 @@ class TestTui: @pytest.mark.skip() @parametrize - def test_method_open_help(self, client: Opencode) -> None: - tui = client.tui.open_help() - assert_matches_type(TuiOpenHelpResponse, tui, path=["response"]) + def test_method_append_prompt(self, client: Opencode) -> None: + tui = client.tui.append_prompt( + text="text", + ) + assert_matches_type(TuiAppendPromptResponse, tui, path=["response"]) @pytest.mark.skip() @parametrize - def test_raw_response_open_help(self, client: Opencode) -> None: - response = client.tui.with_raw_response.open_help() + def test_raw_response_append_prompt(self, client: Opencode) -> None: + response = client.tui.with_raw_response.append_prompt( + text="text", + ) assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" tui = response.parse() - assert_matches_type(TuiOpenHelpResponse, tui, path=["response"]) + assert_matches_type(TuiAppendPromptResponse, tui, path=["response"]) @pytest.mark.skip() @parametrize - def test_streaming_response_open_help(self, client: Opencode) -> None: - with client.tui.with_streaming_response.open_help() as response: + def test_streaming_response_append_prompt(self, client: Opencode) -> None: + with client.tui.with_streaming_response.append_prompt( + text="text", + ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" tui = response.parse() - assert_matches_type(TuiOpenHelpResponse, tui, path=["response"]) + assert_matches_type(TuiAppendPromptResponse, tui, path=["response"]) assert cast(Any, response.is_closed) is True @pytest.mark.skip() @parametrize - def test_method_prompt(self, client: Opencode) -> None: - tui = client.tui.prompt( - parts=[ - { - "id": "id", - "message_id": "messageID", - "session_id": "sessionID", - "text": "text", - "type": "text", - } - ], - text="text", - ) - assert_matches_type(TuiPromptResponse, tui, path=["response"]) + def test_method_open_help(self, client: Opencode) -> None: + tui = client.tui.open_help() + assert_matches_type(TuiOpenHelpResponse, tui, path=["response"]) @pytest.mark.skip() @parametrize - def test_raw_response_prompt(self, client: Opencode) -> None: - response = client.tui.with_raw_response.prompt( - parts=[ - { - "id": "id", - "message_id": "messageID", - "session_id": "sessionID", - "text": "text", - "type": "text", - } - ], - text="text", - ) + def test_raw_response_open_help(self, client: Opencode) -> None: + response = client.tui.with_raw_response.open_help() assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" tui = response.parse() - assert_matches_type(TuiPromptResponse, tui, path=["response"]) + assert_matches_type(TuiOpenHelpResponse, tui, path=["response"]) @pytest.mark.skip() @parametrize - def test_streaming_response_prompt(self, client: Opencode) -> None: - with client.tui.with_streaming_response.prompt( - parts=[ - { - "id": "id", - "message_id": "messageID", - "session_id": "sessionID", - "text": "text", - "type": "text", - } - ], - text="text", - ) as response: + def test_streaming_response_open_help(self, client: Opencode) -> None: + with client.tui.with_streaming_response.open_help() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" tui = response.parse() - assert_matches_type(TuiPromptResponse, tui, path=["response"]) + assert_matches_type(TuiOpenHelpResponse, tui, path=["response"]) assert cast(Any, response.is_closed) is True @@ -114,89 +87,62 @@ class TestAsyncTui: @pytest.mark.skip() @parametrize - async def test_method_open_help(self, async_client: AsyncOpencode) -> None: - tui = await async_client.tui.open_help() - assert_matches_type(TuiOpenHelpResponse, tui, path=["response"]) + async def test_method_append_prompt(self, async_client: AsyncOpencode) -> None: + tui = await async_client.tui.append_prompt( + text="text", + ) + assert_matches_type(TuiAppendPromptResponse, tui, path=["response"]) @pytest.mark.skip() @parametrize - async def test_raw_response_open_help(self, async_client: AsyncOpencode) -> None: - response = await async_client.tui.with_raw_response.open_help() + async def test_raw_response_append_prompt(self, async_client: AsyncOpencode) -> None: + response = await async_client.tui.with_raw_response.append_prompt( + text="text", + ) assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" tui = await response.parse() - assert_matches_type(TuiOpenHelpResponse, tui, path=["response"]) + assert_matches_type(TuiAppendPromptResponse, tui, path=["response"]) @pytest.mark.skip() @parametrize - async def test_streaming_response_open_help(self, async_client: AsyncOpencode) -> None: - async with async_client.tui.with_streaming_response.open_help() as response: + async def test_streaming_response_append_prompt(self, async_client: AsyncOpencode) -> None: + async with async_client.tui.with_streaming_response.append_prompt( + text="text", + ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" tui = await response.parse() - assert_matches_type(TuiOpenHelpResponse, tui, path=["response"]) + assert_matches_type(TuiAppendPromptResponse, tui, path=["response"]) assert cast(Any, response.is_closed) is True @pytest.mark.skip() @parametrize - async def test_method_prompt(self, async_client: AsyncOpencode) -> None: - tui = await async_client.tui.prompt( - parts=[ - { - "id": "id", - "message_id": "messageID", - "session_id": "sessionID", - "text": "text", - "type": "text", - } - ], - text="text", - ) - assert_matches_type(TuiPromptResponse, tui, path=["response"]) + async def test_method_open_help(self, async_client: AsyncOpencode) -> None: + tui = await async_client.tui.open_help() + assert_matches_type(TuiOpenHelpResponse, tui, path=["response"]) @pytest.mark.skip() @parametrize - async def test_raw_response_prompt(self, async_client: AsyncOpencode) -> None: - response = await async_client.tui.with_raw_response.prompt( - parts=[ - { - "id": "id", - "message_id": "messageID", - "session_id": "sessionID", - "text": "text", - "type": "text", - } - ], - text="text", - ) + async def test_raw_response_open_help(self, async_client: AsyncOpencode) -> None: + response = await async_client.tui.with_raw_response.open_help() assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" tui = await response.parse() - assert_matches_type(TuiPromptResponse, tui, path=["response"]) + assert_matches_type(TuiOpenHelpResponse, tui, path=["response"]) @pytest.mark.skip() @parametrize - async def test_streaming_response_prompt(self, async_client: AsyncOpencode) -> None: - async with async_client.tui.with_streaming_response.prompt( - parts=[ - { - "id": "id", - "message_id": "messageID", - "session_id": "sessionID", - "text": "text", - "type": "text", - } - ], - text="text", - ) as response: + async def test_streaming_response_open_help(self, async_client: AsyncOpencode) -> None: + async with async_client.tui.with_streaming_response.open_help() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" tui = await response.parse() - assert_matches_type(TuiPromptResponse, tui, path=["response"]) + assert_matches_type(TuiOpenHelpResponse, tui, path=["response"]) assert cast(Any, response.is_closed) is True