Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion roboflow/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from roboflow.models import CLIPModel, GazeModel # noqa: F401
from roboflow.util.general import write_line

__version__ = "1.2.7"
__version__ = "1.2.8"


def check_key(api_key, model, notebook, num_retries=0):
Expand Down
5 changes: 5 additions & 0 deletions roboflow/core/workspace.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,6 +287,7 @@ def upload_dataset(
project_type: str = "object-detection",
batch_name=None,
num_retries=0,
is_prediction=False,
):
"""
Upload a dataset to Roboflow.
Expand All @@ -298,6 +299,9 @@ def upload_dataset(
dataset_format (str): format of the dataset (`voc`, `yolov8`, `yolov5`)
project_license (str): license of the project (set to `private` for private projects, only available for paid customers)
project_type (str): type of the project (only `object-detection` is supported)
batch_name (str, optional): name of the batch to upload the images to. Defaults to an automatically generated value.
num_retries (int, optional): number of times to retry uploading an image if the upload fails. Defaults to 0.
is_prediction (bool, optional): whether the annotations provided in the dataset are predictions and not ground truth. Defaults to False.
""" # noqa: E501 // docs
if dataset_format != "NOT_USED":
print("Warning: parameter 'dataset_format' is deprecated and will be removed in a future release")
Expand Down Expand Up @@ -352,6 +356,7 @@ def _upload_image(imagedesc):
sequence_number=imagedesc.get("index"),
sequence_size=len(images),
num_retry_uploads=num_retries,
is_prediction=is_prediction,
)

return image, upload_time, upload_retry_attempts
Expand Down
44 changes: 44 additions & 0 deletions tests/test_project.py
Original file line number Diff line number Diff line change
Expand Up @@ -373,6 +373,50 @@ def test_project_upload_dataset(self):
"params": {},
"assertions": {"save_annotation": {"count": 1}},
},
{
"name": "with_predictions_flag_true",
"dataset": [
{"file": "pred1.jpg", "split": "train", "annotationfile": {"file": "pred1.xml"}},
{"file": "pred2.jpg", "split": "valid", "annotationfile": {"file": "pred2.xml"}},
],
"params": {"is_prediction": True},
"assertions": {
"upload": {"count": 2, "kwargs": {"is_prediction": True}},
"save_annotation": {"count": 2},
},
},
{
"name": "with_predictions_flag_false",
"dataset": [
{"file": "gt1.jpg", "split": "train", "annotationfile": {"file": "gt1.xml"}},
],
"params": {"is_prediction": False},
"assertions": {
"upload": {"count": 1, "kwargs": {"is_prediction": False}},
"save_annotation": {"count": 1},
},
},
{
"name": "predictions_with_batch",
"dataset": [
{"file": "batch_pred.jpg", "split": "train", "annotationfile": {"file": "batch_pred.xml"}},
],
"params": {
"is_prediction": True,
"batch_name": "prediction-batch",
"num_retries": 2,
},
"assertions": {
"upload": {
"count": 1,
"kwargs": {
"is_prediction": True,
"batch_name": "prediction-batch",
"num_retry_uploads": 2,
},
},
},
},
]

error_cases = [
Expand Down
Loading