Skip to content

Commit b18c412

Browse files
authored
Add dataset.delete_scene (#199)
* Add dataset.delete_scene * Improve docstring of delete_scene
1 parent 529fb01 commit b18c412

File tree

5 files changed

+71
-3
lines changed

5 files changed

+71
-3
lines changed
Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
from typing import List, Optional
2+
3+
from nucleus.pydantic_base import DictCompatibleImmutableModel
4+
5+
6+
class ScenesListEntry(DictCompatibleImmutableModel):
7+
"""/dataset/{dataset_id}/scenes_list nested payload"""
8+
9+
id: str
10+
reference_id: str
11+
type: str
12+
metadata: Optional[dict] = {}
13+
14+
15+
class ScenesList(DictCompatibleImmutableModel):
16+
"""/dataset/{dataset_id}/scenes_list payload"""
17+
18+
scenes: List[ScenesListEntry]

nucleus/dataset.py

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@
5050
)
5151
from .data_transfer_object.dataset_info import DatasetInfo
5252
from .data_transfer_object.dataset_size import DatasetSize
53+
from .data_transfer_object.scenes_list import ScenesList, ScenesListEntry
5354
from .dataset_item import (
5455
DatasetItem,
5556
check_all_paths_remote,
@@ -186,13 +187,14 @@ def items(self) -> List[DatasetItem]:
186187
return constructed_dataset_items
187188

188189
@property
189-
def scenes(self) -> List[Dict[str, Any]]:
190+
def scenes(self) -> List[ScenesListEntry]:
190191
"""List of ID, reference ID, type, and metadata for all scenes in the Dataset."""
191192
response = self._client.make_request(
192193
{}, f"dataset/{self.id}/scenes_list", requests.get
193194
)
194195

195-
return response.get("scenes", None)
196+
scenes_list = ScenesList.parse_obj(response)
197+
return scenes_list.scenes
196198

197199
@sanitize_string_args
198200
def autotag_items(self, autotag_name, for_scores_greater_than=0):
@@ -738,6 +740,18 @@ def delete_item(self, reference_id: str) -> dict:
738740
requests.delete,
739741
)
740742

743+
@sanitize_string_args
744+
def delete_scene(self, reference_id: str):
745+
"""Deletes a Scene associated with the Dataset
746+
747+
All items, annotations and predictions associated with the scene will be
748+
deleted as well.
749+
750+
Parameters:
751+
reference_id: The user-defined reference ID of the item to delete.
752+
"""
753+
self._client.delete(f"dataset/{self.id}/scene/{reference_id}")
754+
741755
def list_autotags(self):
742756
"""Fetches all autotags of the dataset.
743757

nucleus/pydantic_base.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,3 +14,13 @@ class DictCompatibleModel(BaseModel):
1414

1515
def __getitem__(self, key):
1616
return getattr(self, key)
17+
18+
19+
class DictCompatibleImmutableModel(ImmutableModel):
20+
"""Backwards compatible wrapper where we transform dictionaries into Pydantic Models
21+
22+
Allows us to access model.key with model["key"].
23+
"""
24+
25+
def __getitem__(self, key):
26+
return getattr(self, key)

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ exclude = '''
2121

2222
[tool.poetry]
2323
name = "scale-nucleus"
24-
version = "0.5.1"
24+
version = "0.5.2"
2525
description = "The official Python client library for Nucleus, the Data Platform for AI"
2626
license = "MIT"
2727
authors = ["Scale AI Nucleus Team <nucleusapi@scaleapi.com>"]

tests/test_scene.py

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import copy
2+
import time
23

34
import pytest
45

@@ -438,6 +439,31 @@ def test_scene_upload_and_update(dataset_scene):
438439
}
439440

440441

442+
@pytest.mark.integration
443+
def test_scene_deletion(dataset_scene):
444+
payload = TEST_LIDAR_SCENES
445+
scenes = [
446+
LidarScene.from_json(scene_json) for scene_json in payload[SCENES_KEY]
447+
]
448+
update = payload[UPDATE_KEY]
449+
450+
job = dataset_scene.append(scenes, update=update, asynchronous=True)
451+
job.sleep_until_complete()
452+
453+
uploaded_scenes = dataset_scene.scenes
454+
assert len(uploaded_scenes) == len(scenes)
455+
assert all(
456+
u["reference_id"] == o.reference_id
457+
for u, o in zip(uploaded_scenes, scenes)
458+
)
459+
460+
for scene in uploaded_scenes:
461+
dataset_scene.delete_scene(scene.reference_id)
462+
time.sleep(1)
463+
scenes = dataset_scene.scenes
464+
assert len(scenes) == 0, f"Expected to delete all scenes, got: {scenes}"
465+
466+
441467
@pytest.mark.integration
442468
def test_scene_upload_async_item_dataset(dataset_item):
443469
payload = TEST_LIDAR_SCENES

0 commit comments

Comments
 (0)