File tree 2 files changed +10
-6
lines changed
2 files changed +10
-6
lines changed Original file line number Diff line number Diff line change @@ -157,15 +157,15 @@ def llama_context_default_params() -> llama_context_params:
157
157
_lib .llama_context_default_params .restype = llama_context_params
158
158
159
159
160
- def llama_mmap_supported () -> c_bool :
160
+ def llama_mmap_supported () -> bool :
161
161
return _lib .llama_mmap_supported ()
162
162
163
163
164
164
_lib .llama_mmap_supported .argtypes = []
165
165
_lib .llama_mmap_supported .restype = c_bool
166
166
167
167
168
- def llama_mlock_supported () -> c_bool :
168
+ def llama_mlock_supported () -> bool :
169
169
return _lib .llama_mlock_supported ()
170
170
171
171
@@ -387,7 +387,9 @@ def llama_n_embd(ctx: llama_context_p) -> c_int:
387
387
# Can be mutated in order to change the probabilities of the next token
388
388
# Rows: n_tokens
389
389
# Cols: n_vocab
390
- def llama_get_logits (ctx : llama_context_p ): # type: (...) -> Array[float] # type: ignore
390
+ def llama_get_logits (
391
+ ctx : llama_context_p ,
392
+ ): # type: (...) -> Array[float] # type: ignore
391
393
return _lib .llama_get_logits (ctx )
392
394
393
395
@@ -397,7 +399,9 @@ def llama_get_logits(ctx: llama_context_p): # type: (...) -> Array[float] # typ
397
399
398
400
# Get the embeddings for the input
399
401
# shape: [n_embd] (1-dimensional)
400
- def llama_get_embeddings (ctx : llama_context_p ): # type: (...) -> Array[float] # type: ignore
402
+ def llama_get_embeddings (
403
+ ctx : llama_context_p ,
404
+ ): # type: (...) -> Array[float] # type: ignore
401
405
return _lib .llama_get_embeddings (ctx )
402
406
403
407
Original file line number Diff line number Diff line change @@ -27,11 +27,11 @@ class Settings(BaseSettings):
27
27
)
28
28
f16_kv : bool = Field (default = True , description = "Whether to use f16 key/value." )
29
29
use_mlock : bool = Field (
30
- default = bool ( llama_cpp .llama_mlock_supported (). value ),
30
+ default = llama_cpp .llama_mlock_supported (),
31
31
description = "Use mlock." ,
32
32
)
33
33
use_mmap : bool = Field (
34
- default = bool ( llama_cpp .llama_mmap_supported (). value ),
34
+ default = llama_cpp .llama_mmap_supported (),
35
35
description = "Use mmap." ,
36
36
)
37
37
embedding : bool = Field (default = True , description = "Whether to use embeddings." )
You can’t perform that action at this time.
0 commit comments