4
4
import requests
5
5
6
6
try :
7
- from curl_cffi import requests as cf_reqs
7
+ from curl_cffi import Session
8
8
has_curl_cffi = True
9
9
except ImportError :
10
10
has_curl_cffi = False
11
11
from ..typing import CreateResult , Messages
12
12
from ..errors import MissingRequirementsError
13
+ from ..requests .raise_for_status import raise_for_status
13
14
from .base_provider import ProviderModelMixin , AbstractProvider
14
15
from .helper import format_prompt
15
16
@@ -18,7 +19,7 @@ class HuggingChat(AbstractProvider, ProviderModelMixin):
18
19
working = True
19
20
supports_stream = True
20
21
default_model = "meta-llama/Meta-Llama-3.1-70B-Instruct"
21
-
22
+
22
23
models = [
23
24
'meta-llama/Meta-Llama-3.1-70B-Instruct' ,
24
25
'CohereForAI/c4ai-command-r-plus-08-2024' ,
@@ -30,7 +31,7 @@ class HuggingChat(AbstractProvider, ProviderModelMixin):
30
31
'mistralai/Mistral-Nemo-Instruct-2407' ,
31
32
'microsoft/Phi-3.5-mini-instruct' ,
32
33
]
33
-
34
+
34
35
model_aliases = {
35
36
"llama-3.1-70b" : "meta-llama/Meta-Llama-3.1-70B-Instruct" ,
36
37
"command-r-plus" : "CohereForAI/c4ai-command-r-plus-08-2024" ,
@@ -43,15 +44,6 @@ class HuggingChat(AbstractProvider, ProviderModelMixin):
43
44
"phi-3.5-mini" : "microsoft/Phi-3.5-mini-instruct" ,
44
45
}
45
46
46
- @classmethod
47
- def get_model (cls , model : str ) -> str :
48
- if model in cls .models :
49
- return model
50
- elif model in cls .model_aliases :
51
- return cls .model_aliases [model ]
52
- else :
53
- return cls .default_model
54
-
55
47
@classmethod
56
48
def create_completion (
57
49
cls ,
@@ -65,7 +57,7 @@ def create_completion(
65
57
model = cls .get_model (model )
66
58
67
59
if model in cls .models :
68
- session = cf_reqs . Session ()
60
+ session = Session ()
69
61
session .headers = {
70
62
'accept' : '*/*' ,
71
63
'accept-language' : 'en' ,
@@ -82,20 +74,18 @@ def create_completion(
82
74
'sec-fetch-site' : 'same-origin' ,
83
75
'user-agent' : 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36' ,
84
76
}
85
-
86
77
json_data = {
87
78
'model' : model ,
88
79
}
89
-
90
80
response = session .post ('https://huggingface.co/chat/conversation' , json = json_data )
91
- if response .status_code != 200 :
92
- raise RuntimeError (f"Request failed with status code: { response .status_code } , response: { response .text } " )
81
+ raise_for_status (response )
93
82
94
83
conversationId = response .json ().get ('conversationId' )
95
84
96
85
# Get the data response and parse it properly
97
86
response = session .get (f'https://huggingface.co/chat/conversation/{ conversationId } /__data.json?x-sveltekit-invalidated=11' )
98
-
87
+ raise_for_status (response )
88
+
99
89
# Split the response content by newlines and parse each line as JSON
100
90
try :
101
91
json_data = None
@@ -156,6 +146,7 @@ def create_completion(
156
146
headers = headers ,
157
147
files = files ,
158
148
)
149
+ raise_for_status (response )
159
150
160
151
full_response = ""
161
152
for line in response .iter_lines ():
@@ -182,9 +173,4 @@ def create_completion(
182
173
full_response = full_response .replace ('<|im_end|' , '' ).replace ('\u0000 ' , '' ).strip ()
183
174
184
175
if not stream :
185
- yield full_response
186
-
187
- @classmethod
188
- def supports_model (cls , model : str ) -> bool :
189
- """Check if the model is supported by the provider."""
190
- return model in cls .models or model in cls .model_aliases
176
+ yield full_response
0 commit comments