File tree Expand file tree Collapse file tree 4 files changed +5
-21
lines changed Expand file tree Collapse file tree 4 files changed +5
-21
lines changed Original file line number Diff line number Diff line change 11[tool .poetry ]
22name = " zai-sdk"
3- version = " 0.0.3.7 "
3+ version = " 0.0.4 "
44description = " A SDK library for accessing big model apis from Z.ai"
55authors = [" Z.ai" ]
66readme = " README.md"
Original file line number Diff line number Diff line change 11__title__ = 'Z.ai'
2- __version__ = '0.0.3.7 '
2+ __version__ = '0.0.4 '
Original file line number Diff line number Diff line change @@ -88,25 +88,6 @@ def create(
8888 watermark_enabled (Optional[bool]): Whether to enable watermark on generated audio
8989 """
9090 _cast_type = AsyncTaskStatus
91- logger .debug (f'temperature:{ temperature } , top_p:{ top_p } ' )
92- if temperature is not None and temperature != NOT_GIVEN :
93- if temperature <= 0 :
94- do_sample = False
95- temperature = 0.01
96- # logger.warning("temperature: value range is (0.0, 1.0) open interval,"
97- # "do_sample rewritten as false (parameters top_p temperature do not take effect)")
98- if temperature >= 1 :
99- temperature = 0.99
100- # logger.warning("temperature: value range is (0.0, 1.0) open interval")
101- if top_p is not None and top_p != NOT_GIVEN :
102- if top_p >= 1 :
103- top_p = 0.99
104- # logger.warning("top_p: value range is (0.0, 1.0) open interval, cannot equal 0 or 1")
105- if top_p <= 0 :
106- top_p = 0.01
107- # logger.warning("top_p: value range is (0.0, 1.0) open interval, cannot equal 0 or 1")
108-
109- logger .debug (f'temperature:{ temperature } , top_p:{ top_p } ' )
11091 if isinstance (messages , List ):
11192 for item in messages :
11293 if item .get ('content' ):
Original file line number Diff line number Diff line change @@ -66,6 +66,7 @@ def create(
6666 response_format : object | None = None ,
6767 thinking : object | None = None ,
6868 watermark_enabled : Optional [bool ] | NotGiven = NOT_GIVEN ,
69+ tool_stream : bool | NotGiven = NOT_GIVEN ,
6970 ) -> Completion | StreamResponse [ChatCompletionChunk ]:
7071 """
7172 Create a chat completion
@@ -93,6 +94,7 @@ def create(
9394 response_format (object): Response format specification
9495 thinking (Optional[object]): Configuration parameters for model reasoning
9596 watermark_enabled (Optional[bool]): Whether to enable watermark on generated audio
97+ tool_stream (Optional[bool]): Whether to enable tool streaming
9698 """
9799 logger .debug (f'temperature:{ temperature } , top_p:{ top_p } ' )
98100 if temperature is not None and temperature != NOT_GIVEN :
@@ -141,6 +143,7 @@ def create(
141143 'response_format' : response_format ,
142144 'thinking' : thinking ,
143145 'watermark_enabled' : watermark_enabled ,
146+ 'tool_stream' : tool_stream ,
144147 }
145148 )
146149 return self ._post (
You can’t perform that action at this time.
0 commit comments