Skip to content

Commit

Permalink
Fix/luxonis dataset converter (#90)
Browse files Browse the repository at this point in the history
* Fix annotation yielding

* Add delete_existing param

* Polishing

* chore: trigger tests only manually

---------

Co-authored-by: Nikita Sokovnin <49622375+sokovninn@users.noreply.github.com>
  • Loading branch information
HonzaCuhel and sokovninn authored Mar 3, 2025
1 parent 0492085 commit b39a334
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 33 deletions.
5 changes: 0 additions & 5 deletions .github/workflows/tests.yaml
Original file line number Diff line number Diff line change
@@ -1,11 +1,6 @@
name: Tests

on:
pull_request:
paths:
- 'datadreamer/**/**.py'
- 'tests/core_tests/**/**.py'
- .github/workflows/tests.yaml
workflow_dispatch:

jobs:
Expand Down
61 changes: 33 additions & 28 deletions datadreamer/utils/luxonis_dataset_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,44 +122,44 @@ def dataset_generator():
h = min(box[3] / height - y, 1 - y)
annotation["boundingbox"] = {"x": x, "y": y, "w": w, "h": h}

if has_masks:
mask = image_data["masks"][i]
if isinstance(mask, list):
poly = [(point[0] / width, point[1] / height) for point in mask]
annotation["instance_segmentation"] = {
"points": poly,
"height": height,
"width": width,
}
else:
annotation["instance_segmentation"] = {
"counts": mask["counts"],
"height": mask["size"][0],
"width": mask["size"][1],
}

yield {
"file": image_full_path,
"task": f"datadreamer_{task}",
"annotation": annotation,
}
if has_masks:
mask = image_data["masks"][i]
if isinstance(mask, list):
poly = [
(point[0] / width, point[1] / height) for point in mask
]
annotation["instance_segmentation"] = {
"points": poly,
"height": height,
"width": width,
}
else:
annotation["instance_segmentation"] = {
"counts": mask["counts"],
"height": mask["size"][0],
"width": mask["size"][1],
}

yield {
"file": image_full_path,
"task": f"datadreamer_{task}",
"annotation": annotation,
}

dataset_name = (
os.path.basename(output_dir)
if self.dataset_name is None or self.dataset_name == ""
else self.dataset_name
)

if LuxonisDataset.exists(dataset_name):
dataset = LuxonisDataset(dataset_name)
dataset.delete_dataset()

# if dataset_plugin is set, use that
if self.dataset_plugin:
if "GOOGLE_APPLICATION_CREDENTIALS" in os.environ:
logger.info(f"Using {self.dataset_plugin} dataset")
dataset_constructor = DATASETS_REGISTRY.get(self.dataset_plugin)
dataset = dataset_constructor(dataset_name)
dataset = dataset_constructor(
dataset_name, delete_existing=True, delete_remote=True
)
else:
raise ValueError(
"GOOGLE_APPLICATION_CREDENTIALS environment variable is not set for using the dataset plugin."
Expand All @@ -170,10 +170,15 @@ def dataset_generator():
and "GOOGLE_APPLICATION_CREDENTIALS" in os.environ
):
logger.info("Using GCS bucket")
dataset = LuxonisDataset(dataset_name, bucket_storage=BucketStorage.GCS)
dataset = LuxonisDataset(
dataset_name,
bucket_storage=BucketStorage.GCS,
delete_existing=True,
delete_remote=True,
)
else:
logger.info("Using local dataset")
dataset = LuxonisDataset(dataset_name)
dataset = LuxonisDataset(dataset_name, delete_existing=True)

dataset.add(dataset_generator())

Expand Down

0 comments on commit b39a334

Please sign in to comment.