Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions roboflow/core/workspace.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,6 +287,7 @@ def upload_dataset(
project_type: str = "object-detection",
batch_name=None,
num_retries=0,
are_predictions=False,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'd prefer to be consistent and keep using is_prediction.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

totally makes sense. I updated.

):
"""
Upload a dataset to Roboflow.
Expand All @@ -298,6 +299,9 @@ def upload_dataset(
dataset_format (str): format of the dataset (`voc`, `yolov8`, `yolov5`)
project_license (str): license of the project (set to `private` for private projects, only available for paid customers)
project_type (str): type of the project (only `object-detection` is supported)
batch_name (str, optional): name of the batch to upload the images to. Defaults to an automatically generated value.
num_retries (int, optional): number of times to retry uploading an image if the upload fails. Defaults to 0.
are_predictions (bool, optional): whether the annotations provided in the dataset are predictions and not ground truth. Defaults to False.
""" # noqa: E501 // docs
if dataset_format != "NOT_USED":
print("Warning: parameter 'dataset_format' is deprecated and will be removed in a future release")
Expand Down Expand Up @@ -352,6 +356,7 @@ def _upload_image(imagedesc):
sequence_number=imagedesc.get("index"),
sequence_size=len(images),
num_retry_uploads=num_retries,
is_prediction=are_predictions,
)

return image, upload_time, upload_retry_attempts
Expand Down
44 changes: 44 additions & 0 deletions tests/test_project.py
Original file line number Diff line number Diff line change
Expand Up @@ -373,6 +373,50 @@ def test_project_upload_dataset(self):
"params": {},
"assertions": {"save_annotation": {"count": 1}},
},
{
"name": "with_predictions_flag_true",
"dataset": [
{"file": "pred1.jpg", "split": "train", "annotationfile": {"file": "pred1.xml"}},
{"file": "pred2.jpg", "split": "valid", "annotationfile": {"file": "pred2.xml"}},
],
"params": {"are_predictions": True},
"assertions": {
"upload": {"count": 2, "kwargs": {"is_prediction": True}},
"save_annotation": {"count": 2},
},
},
{
"name": "with_predictions_flag_false",
"dataset": [
{"file": "gt1.jpg", "split": "train", "annotationfile": {"file": "gt1.xml"}},
],
"params": {"are_predictions": False},
"assertions": {
"upload": {"count": 1, "kwargs": {"is_prediction": False}},
"save_annotation": {"count": 1},
},
},
{
"name": "predictions_with_batch",
"dataset": [
{"file": "batch_pred.jpg", "split": "train", "annotationfile": {"file": "batch_pred.xml"}},
],
"params": {
"are_predictions": True,
"batch_name": "prediction-batch",
"num_retries": 2,
},
"assertions": {
"upload": {
"count": 1,
"kwargs": {
"is_prediction": True,
"batch_name": "prediction-batch",
"num_retry_uploads": 2,
},
},
},
},
]

error_cases = [
Expand Down
Loading