Skip to content

Commit dd4fc3d

Browse files
authored
Clippy fixes. (#1818)
* Clippy fixes. * Fixing fmt. * Pythn fmt.
1 parent f339867 commit dd4fc3d

File tree

17 files changed

+77
-111
lines changed

17 files changed

+77
-111
lines changed

bindings/node/src/decoders.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ impl Decoder {
2929
.read()
3030
.unwrap()
3131
.decode(tokens)
32-
.map_err(|e| Error::from_reason(format!("{}", e)))
32+
.map_err(|e| Error::from_reason(format!("{e}")))
3333
}
3434
}
3535

bindings/node/src/normalizers.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ impl Normalizer {
2525

2626
self
2727
.normalize(&mut normalized)
28-
.map_err(|e| Error::from_reason(format!("{}", e)))?;
28+
.map_err(|e| Error::from_reason(format!("{e}")))?;
2929

3030
Ok(normalized.get().to_string())
3131
}

bindings/node/src/pre_tokenizers.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ impl PreTokenizer {
8080

8181
self
8282
.pre_tokenize(&mut pretokenized)
83-
.map_err(|e| Error::from_reason(format!("{}", e)))?;
83+
.map_err(|e| Error::from_reason(format!("{e}")))?;
8484

8585
pretokenized
8686
.get_splits(tk::OffsetReferential::Original, tk::OffsetType::Char)

bindings/node/src/tasks/models.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ impl Task for BPEFromFilesTask {
2121
.take()
2222
.ok_or(Error::from_reason("Empty builder".to_string()))?
2323
.build()
24-
.map_err(|e| Error::from_reason(format!("{}", e)))
24+
.map_err(|e| Error::from_reason(format!("{e}")))
2525
}
2626

2727
fn resolve(&mut self, _env: Env, output: Self::Output) -> Result<Self::JsValue> {
@@ -45,7 +45,7 @@ impl Task for WordPieceFromFilesTask {
4545
.take()
4646
.ok_or(Error::from_reason("Empty builder".to_string()))?
4747
.build()
48-
.map_err(|e| Error::from_reason(format!("{}", e)))
48+
.map_err(|e| Error::from_reason(format!("{e}")))
4949
}
5050

5151
fn resolve(&mut self, _env: Env, output: Self::Output) -> Result<Self::JsValue> {
@@ -68,7 +68,7 @@ impl Task for WordLevelFromFilesTask {
6868
.take()
6969
.ok_or(Error::from_reason("Empty builder".to_string()))?
7070
.build()
71-
.map_err(|e| Error::from_reason(format!("{}", e)))
71+
.map_err(|e| Error::from_reason(format!("{e}")))
7272
}
7373

7474
fn resolve(&mut self, _env: Env, output: Self::Output) -> Result<Self::JsValue> {

bindings/node/src/tasks/tokenizer.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ impl Task for EncodeTask<'static> {
2828
.ok_or(Error::from_reason("No provided input"))?,
2929
self.add_special_tokens,
3030
)
31-
.map_err(|e| Error::from_reason(format!("{}", e)))
31+
.map_err(|e| Error::from_reason(format!("{e}")))
3232
}
3333

3434
fn resolve(&mut self, _env: Env, output: Self::Output) -> Result<Self::JsValue> {
@@ -55,7 +55,7 @@ impl Task for DecodeTask {
5555
.read()
5656
.unwrap()
5757
.decode(&self.ids, self.skip_special_tokens)
58-
.map_err(|e| Error::from_reason(format!("{}", e)))
58+
.map_err(|e| Error::from_reason(format!("{e}")))
5959
}
6060

6161
fn resolve(&mut self, _env: Env, output: Self::Output) -> Result<Self::JsValue> {
@@ -85,7 +85,7 @@ impl Task for EncodeBatchTask<'static> {
8585
.ok_or(Error::from_reason("No provided input"))?,
8686
self.add_special_tokens,
8787
)
88-
.map_err(|e| Error::from_reason(format!("{}", e)))
88+
.map_err(|e| Error::from_reason(format!("{e}")))
8989
}
9090

9191
fn resolve(&mut self, _env: Env, output: Self::Output) -> Result<Self::JsValue> {
@@ -118,7 +118,7 @@ impl Task for DecodeBatchTask {
118118
.read()
119119
.unwrap()
120120
.decode_batch(&ids, self.skip_special_tokens)
121-
.map_err(|e| Error::from_reason(format!("{}", e)))
121+
.map_err(|e| Error::from_reason(format!("{e}")))
122122
}
123123

124124
fn resolve(&mut self, _env: Env, output: Self::Output) -> Result<Self::JsValue> {

bindings/node/src/tokenizer.rs

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -251,7 +251,7 @@ impl Tokenizer {
251251
.read()
252252
.unwrap()
253253
.save(path, pretty)
254-
.map_err(|e| Error::from_reason(format!("{}", e)))
254+
.map_err(|e| Error::from_reason(format!("{e}")))
255255
}
256256

257257
#[napi]
@@ -341,9 +341,7 @@ impl Tokenizer {
341341
PreTokenizer,
342342
Processor,
343343
Decoder,
344-
> = s
345-
.parse()
346-
.map_err(|e| Error::from_reason(format!("{}", e)))?;
344+
> = s.parse().map_err(|e| Error::from_reason(format!("{e}")))?;
347345
Ok(Self {
348346
tokenizer: Arc::new(RwLock::new(tokenizer)),
349347
})
@@ -352,7 +350,7 @@ impl Tokenizer {
352350
#[napi(factory)]
353351
pub fn from_file(file: String) -> Result<Self> {
354352
let tokenizer = tk::tokenizer::TokenizerImpl::from_file(file)
355-
.map_err(|e| Error::from_reason(format!("Error loading from file{}", e)))?;
353+
.map_err(|e| Error::from_reason(format!("Error loading from file{e}")))?;
356354
Ok(Self {
357355
tokenizer: Arc::new(RwLock::new(tokenizer)),
358356
})
@@ -472,7 +470,7 @@ impl Tokenizer {
472470
.write()
473471
.unwrap()
474472
.train_from_files(&mut trainer, files)
475-
.map_err(|e| Error::from_reason(format!("{}", e)))?;
473+
.map_err(|e| Error::from_reason(format!("{e}")))?;
476474
Ok(())
477475
}
478476

@@ -504,7 +502,7 @@ impl Tokenizer {
504502
},
505503
add_special_tokens,
506504
)
507-
.map_err(|e| Error::from_reason(format!("{}", e)))?
505+
.map_err(|e| Error::from_reason(format!("{e}")))?
508506
.into(),
509507
)
510508
}

bindings/python/src/decoders.rs

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -107,8 +107,7 @@ impl PyDecoder {
107107
fn __getstate__(&self, py: Python) -> PyResult<PyObject> {
108108
let data = serde_json::to_string(&self.decoder).map_err(|e| {
109109
exceptions::PyException::new_err(format!(
110-
"Error while attempting to pickle Decoder: {}",
111-
e
110+
"Error while attempting to pickle Decoder: {e}"
112111
))
113112
})?;
114113
Ok(PyBytes::new(py, data.as_bytes()).into())
@@ -119,8 +118,7 @@ impl PyDecoder {
119118
Ok(s) => {
120119
self.decoder = serde_json::from_slice(s).map_err(|e| {
121120
exceptions::PyException::new_err(format!(
122-
"Error while attempting to unpickle Decoder: {}",
123-
e
121+
"Error while attempting to unpickle Decoder: {e}"
124122
))
125123
})?;
126124
Ok(())

bindings/python/src/encoding.rs

Lines changed: 9 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -33,8 +33,7 @@ impl PyEncoding {
3333
fn __getstate__(&self, py: Python) -> PyResult<PyObject> {
3434
let data = serde_json::to_string(&self.encoding).map_err(|e| {
3535
exceptions::PyException::new_err(format!(
36-
"Error while attempting to pickle Encoding: {}",
37-
e
36+
"Error while attempting to pickle Encoding: {e}"
3837
))
3938
})?;
4039
Ok(PyBytes::new(py, data.as_bytes()).into())
@@ -45,8 +44,7 @@ impl PyEncoding {
4544
Ok(s) => {
4645
self.encoding = serde_json::from_slice(s).map_err(|e| {
4746
exceptions::PyException::new_err(format!(
48-
"Error while attempting to unpickle Encoding: {}",
49-
e
47+
"Error while attempting to unpickle Encoding: {e}"
5048
))
5149
})?;
5250
Ok(())
@@ -407,17 +405,16 @@ impl PyEncoding {
407405
"left" => Ok(PaddingDirection::Left),
408406
"right" => Ok(PaddingDirection::Right),
409407
other => Err(PyError(format!(
410-
"Unknown `direction`: `{}`. Use \
411-
one of `left` or `right`",
412-
other
408+
"Unknown `direction`: `{other}`. Use \
409+
one of `left` or `right`"
413410
))
414411
.into_pyerr::<exceptions::PyValueError>()),
415412
}?;
416413
}
417414
"pad_id" => pad_id = value.extract()?,
418415
"pad_type_id" => pad_type_id = value.extract()?,
419416
"pad_token" => pad_token = value.extract()?,
420-
_ => println!("Ignored unknown kwarg option {}", key),
417+
_ => println!("Ignored unknown kwarg option {key}"),
421418
}
422419
}
423420
}
@@ -446,11 +443,10 @@ impl PyEncoding {
446443
let tdir = match direction {
447444
"left" => Ok(TruncationDirection::Left),
448445
"right" => Ok(TruncationDirection::Right),
449-
_ => Err(PyError(format!(
450-
"Invalid truncation direction value : {}",
451-
direction
452-
))
453-
.into_pyerr::<exceptions::PyValueError>()),
446+
_ => Err(
447+
PyError(format!("Invalid truncation direction value : {direction}"))
448+
.into_pyerr::<exceptions::PyValueError>(),
449+
),
454450
}?;
455451

456452
self.encoding.truncate(max_length, stride, tdir);

bindings/python/src/error.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ impl PyError {
1313
PyError(String::from(s))
1414
}
1515
pub fn into_pyerr<T: PyTypeInfo>(self) -> PyErr {
16-
PyErr::new::<T, _>(format!("{}", self))
16+
PyErr::new::<T, _>(format!("{self}"))
1717
}
1818
}
1919
impl Display for PyError {
@@ -26,7 +26,7 @@ impl std::error::Error for PyError {}
2626
pub struct ToPyResult<T>(pub Result<T>);
2727
impl<T> From<ToPyResult<T>> for PyResult<T> {
2828
fn from(v: ToPyResult<T>) -> Self {
29-
v.0.map_err(|e| exceptions::PyException::new_err(format!("{}", e)))
29+
v.0.map_err(|e| exceptions::PyException::new_err(format!("{e}")))
3030
}
3131
}
3232
impl<T> ToPyResult<T> {
@@ -37,6 +37,6 @@ impl<T> ToPyResult<T> {
3737

3838
pub(crate) fn deprecation_warning(py: Python<'_>, version: &str, message: &str) -> PyResult<()> {
3939
let deprecation_warning = py.import("builtins")?.getattr("DeprecationWarning")?;
40-
let full_message = format!("Deprecated in {}: {}", version, message);
40+
let full_message = format!("Deprecated in {version}: {message}");
4141
pyo3::PyErr::warn(py, &deprecation_warning, &CString::new(full_message)?, 0)
4242
}

bindings/python/src/lib.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -38,8 +38,7 @@ extern "C" fn child_after_fork() {
3838
eprintln!("To disable this warning, you can either:");
3939
eprintln!(
4040
"\t- Avoid using `tokenizers` before the fork if possible\n\
41-
\t- Explicitly set the environment variable {}=(true | false)",
42-
ENV_VARIABLE
41+
\t- Explicitly set the environment variable {ENV_VARIABLE}=(true | false)"
4342
);
4443
set_parallelism(false);
4544
}

0 commit comments

Comments
 (0)