Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions .code-samples.meilisearch.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -449,6 +449,23 @@ reset_typo_tolerance_1: |-
.reset_typo_tolerance()
.await
.unwrap();
get_all_batches_1: |-
let mut query = meilisearch_sdk::batches::BatchesQuery::new(&client);
query.with_limit(20);
let batches: meilisearch_sdk::batches::BatchesResults =
client.get_batches_with(&query).await.unwrap();
get_batch_1: |-
let uid: u32 = 42;
let batch: meilisearch_sdk::batches::Batch = client
.get_batch(uid)
.await
.unwrap();
get_all_batches_paginating_1: |-
let mut query = meilisearch_sdk::batches::BatchesQuery::new(&client);
query.with_limit(2);
query.with_from(40);
let batches: meilisearch_sdk::batches::BatchesResults =
client.get_batches_with(&query).await.unwrap();
get_stop_words_1: |-
let stop_words: Vec<String> = client
.index("movies")
Expand Down
199 changes: 199 additions & 0 deletions src/batches.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,199 @@
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;

use crate::{client::Client, errors::Error, request::HttpClient};

/// Types and queries for the Meilisearch Batches API.
///
/// See: https://www.meilisearch.com/docs/reference/api/batches
#[derive(Debug, Clone, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Batch {
/// Unique identifier of the batch.
pub uid: u32,
/// When the batch was enqueued.
#[serde(default, with = "time::serde::rfc3339::option")]
pub enqueued_at: Option<OffsetDateTime>,
/// When the batch started processing.
#[serde(default, with = "time::serde::rfc3339::option")]
pub started_at: Option<OffsetDateTime>,
/// When the batch finished processing.
#[serde(default, with = "time::serde::rfc3339::option")]
pub finished_at: Option<OffsetDateTime>,
/// Index uid related to this batch (if applicable).
#[serde(skip_serializing_if = "Option::is_none")]
pub index_uid: Option<String>,
/// The task uids that are part of this batch.
#[serde(skip_serializing_if = "Option::is_none")]
pub task_uids: Option<Vec<u32>>,
/// The strategy that caused the autobatcher to stop batching tasks.
///
/// Introduced in Meilisearch v1.15.
#[serde(skip_serializing_if = "Option::is_none")]
pub batch_strategy: Option<BatchStrategy>,
}

/// Reason why the autobatcher stopped batching tasks.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
#[non_exhaustive]
pub enum BatchStrategy {
/// The batch reached its configured size threshold.
SizeLimitReached,
/// The batch reached its configured time window threshold.
TimeLimitReached,
/// Unknown strategy (forward-compatibility).
#[serde(other)]
Unknown,
}

#[derive(Debug, Clone, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct BatchesResults {
pub results: Vec<Batch>,
pub total: u32,
pub limit: u32,
#[serde(skip_serializing_if = "Option::is_none")]
pub from: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub next: Option<u32>,
}

/// Query builder for listing batches.
#[derive(Debug, Serialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct BatchesQuery<'a, Http: HttpClient> {
#[serde(skip_serializing)]
client: &'a Client<Http>,
/// Maximum number of batches to return.
#[serde(skip_serializing_if = "Option::is_none")]
limit: Option<u32>,
/// The first batch uid that should be returned.
#[serde(skip_serializing_if = "Option::is_none")]
from: Option<u32>,
}

impl<'a, Http: HttpClient> BatchesQuery<'a, Http> {
#[must_use]
pub fn new(client: &'a Client<Http>) -> BatchesQuery<'a, Http> {
BatchesQuery {
client,
limit: None,
from: None,
}
}

#[must_use]
pub fn with_limit(&mut self, limit: u32) -> &mut Self {
self.limit = Some(limit);
self
}

#[must_use]
pub fn with_from(&mut self, from: u32) -> &mut Self {
self.from = Some(from);
self
}

/// Execute the query and list batches.
pub async fn execute(&self) -> Result<BatchesResults, Error> {
self.client.get_batches_with(self).await
}
}

#[cfg(test)]
mod tests {
use crate::batches::BatchStrategy;
use crate::client::Client;

#[tokio::test]
async fn test_get_batches_parses_batch_strategy() {
let mut s = mockito::Server::new_async().await;
let base = s.url();

let response_body = serde_json::json!({
"results": [
{
"uid": 42,
"enqueuedAt": "2024-10-11T11:49:53.000Z",
"startedAt": "2024-10-11T11:49:54.000Z",
"finishedAt": "2024-10-11T11:49:55.000Z",
"indexUid": "movies",
"taskUids": [1, 2, 3],
"batchStrategy": "time_limit_reached"
}
],
"limit": 20,
"from": null,
"next": null,
"total": 1
})
.to_string();

let _m = s
.mock("GET", "/batches")
.with_status(200)
.with_header("content-type", "application/json")
.with_body(response_body)
.create_async()
.await;

let client = Client::new(base, None::<String>).unwrap();
let batches = client.get_batches().await.expect("list batches failed");
assert_eq!(batches.results.len(), 1);
let b = &batches.results[0];
assert_eq!(b.uid, 42);
assert_eq!(b.batch_strategy, Some(BatchStrategy::TimeLimitReached));
}

#[tokio::test]
async fn test_get_batch_by_uid_parses_batch_strategy() {
let mut s = mockito::Server::new_async().await;
let base = s.url();

let response_body = serde_json::json!({
"uid": 99,
"batchStrategy": "size_limit_reached",
"taskUids": [10, 11]
})
.to_string();

let _m = s
.mock("GET", "/batches/99")
.with_status(200)
.with_header("content-type", "application/json")
.with_body(response_body)
.create_async()
.await;

let client = Client::new(base, None::<String>).unwrap();
let batch = client.get_batch(99).await.expect("get batch failed");
assert_eq!(batch.uid, 99);
assert_eq!(batch.batch_strategy, Some(BatchStrategy::SizeLimitReached));
}

#[tokio::test]
async fn test_query_serialization_for_batches() {
use mockito::Matcher;
let mut s = mockito::Server::new_async().await;
let base = s.url();

let _m = s
.mock("GET", "/batches")
.match_query(Matcher::AllOf(vec![
Matcher::UrlEncoded("limit".into(), "2".into()),
Matcher::UrlEncoded("from".into(), "40".into()),
]))
.with_status(200)
.with_header("content-type", "application/json")
.with_body(r#"{"results":[],"limit":2,"total":0}"#)
.create_async()
.await;

let client = Client::new(base, None::<String>).unwrap();
let mut q = crate::batches::BatchesQuery::new(&client);
let _ = q.with_limit(2).with_from(40);
let res = client.get_batches_with(&q).await.expect("request failed");
assert_eq!(res.limit, 2);
}
}
86 changes: 86 additions & 0 deletions src/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1112,6 +1112,92 @@ impl<Http: HttpClient> Client<Http> {
Ok(tasks)
}

/// List batches using the Batches API.
///
/// See: https://www.meilisearch.com/docs/reference/api/batches
///
/// # Example
///
/// ```
/// # use meilisearch_sdk::client::Client;
/// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700");
/// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey");
/// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async {
/// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap();
/// let batches = client.get_batches().await.unwrap();
/// # let _ = batches;
/// # });
/// ```
pub async fn get_batches(&self) -> Result<crate::batches::BatchesResults, Error> {
let res = self
.http_client
.request::<(), (), crate::batches::BatchesResults>(
&format!("{}/batches", self.host),
Method::Get { query: () },
200,
)
.await?;
Ok(res)
}

/// List batches with pagination filters.
///
/// # Example
///
/// ```
/// # use meilisearch_sdk::{client::Client, batches::BatchesQuery};
/// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700");
/// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey");
/// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async {
/// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap();
/// let mut query = BatchesQuery::new(&client);
/// query.with_limit(1);
/// let batches = client.get_batches_with(&query).await.unwrap();
/// # let _ = batches;
/// # });
/// ```
pub async fn get_batches_with(
&self,
query: &crate::batches::BatchesQuery<'_, Http>,
) -> Result<crate::batches::BatchesResults, Error> {
let res = self
.http_client
.request::<&crate::batches::BatchesQuery<'_, Http>, (), crate::batches::BatchesResults>(
&format!("{}/batches", self.host),
Method::Get { query },
200,
)
.await?;
Ok(res)
}

/// Get a single batch by its uid.
///
/// # Example
///
/// ```
/// # use meilisearch_sdk::client::Client;
/// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700");
/// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey");
/// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async {
/// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap();
/// let uid: u32 = 42;
/// let batch = client.get_batch(uid).await.unwrap();
/// # let _ = batch;
/// # });
/// ```
pub async fn get_batch(&self, uid: u32) -> Result<crate::batches::Batch, Error> {
let res = self
.http_client
.request::<(), (), crate::batches::Batch>(
&format!("{}/batches/{}", self.host, uid),
Method::Get { query: () },
200,
)
.await?;
Ok(res)
}

/// Generates a new tenant token.
///
/// # Example
Expand Down
2 changes: 2 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -230,6 +230,8 @@
#![warn(clippy::all)]
#![allow(clippy::needless_doctest_main)]

/// Module to interact with the Batches API.
pub mod batches;
/// Module containing the [`Client`](client::Client) struct.
pub mod client;
/// Module representing the [documents] structures.
Expand Down