Skip to content

Commit

Permalink
addressing comments and adding in tests
Browse files Browse the repository at this point in the history
  • Loading branch information
BryonLewis committed Feb 1, 2022
1 parent 055a0c4 commit 50aa602
Show file tree
Hide file tree
Showing 4 changed files with 50 additions and 14 deletions.
3 changes: 0 additions & 3 deletions client/platform/web-girder/views/Home.vue
Original file line number Diff line number Diff line change
Expand Up @@ -89,9 +89,6 @@ export default defineComponent({
}
return null;
},
exportTargetId() {
return this.exportTarget?._id || null;
},
selectedViameFolderIds() {
return this.selected.filter(
({ _modelType, meta }) => _modelType === 'folder' && meta && meta.annotate,
Expand Down
30 changes: 20 additions & 10 deletions server/dive_server/crud_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,7 @@ def makeAnnotationAndMedia(dsFolder: types.GirderModel):
dsFolder, user, excludeBelowThreshold, typeFilter
)
mediaFolder = crud.getCloneRoot(user, dsFolder)

source_type = fromMeta(mediaFolder, constants.TypeMarker)
mediaRegex = None
if source_type == constants.ImageSequenceType:
Expand All @@ -215,16 +216,19 @@ def makeAnnotationAndMedia(dsFolder: types.GirderModel):
mediaRegex = constants.videoRegex
return gen, mediaFolder, mediaRegex

failed_datasets = []

def stream():
zip_name = "batch_export"
if len(dsFolders) == 1:
zip_name = dsFolders[0]['name']
z = ziputil.ZipGenerator(zip_name)
z = ziputil.ZipGenerator()
for dsFolder in dsFolders:
if len(dsFolders) > 1:
zip_path = f"./{dsFolder['name']}/"
else:
zip_path = "./"
zip_path = f"./{dsFolder['name']}/"
try:
get_media(dsFolder, user)
except RestException:
failed_datasets.append(
f"Dataset: {dsFolder['name']} was not found. This may be a cloned dataset where the source was deleted.\n"
)
continue

def makeMetajson():
"""Include dataset metadtata file with full export"""
Expand All @@ -240,8 +244,7 @@ def makeMetajson():

for data in z.addFile(makeMetajson, Path(f'{zip_path}meta.json')):
yield data

gen, mediaFolder, mediaRegex = makeAnnotationAndMedia(dsFolder)
gen, mediaFolder, mediaRegex = makeAnnotationAndMedia(dsFolder)
if includeMedia:
# Add media
for item in Folder().childItems(
Expand All @@ -256,6 +259,13 @@ def makeMetajson():
if includeDetections:
for data in z.addFile(gen, Path(f'{zip_path}output_tracks.csv')):
yield data
if len(failed_datasets) > 0:

def makeFailedDatasets():
yield ''.join(failed_datasets)

for data in z.addFile(makeFailedDatasets, Path('./failed_datasets.txt')):
yield data
yield z.footer()

return stream
Expand Down
2 changes: 1 addition & 1 deletion server/dive_server/views_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def get_media(self, folder):
"List of track types to filter by",
paramType="query",
required=True,
default=None,
default=[],
requireArray=True,
)
.param(
Expand Down
29 changes: 29 additions & 0 deletions server/tests/integration/test_dataset_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,35 @@ def test_zip_batch_download(user: dict):
folder_names = list(set([os.path.dirname(x) for x in z.namelist()]))
assert len(folder_names) == len(datasetNames)

@pytest.mark.integration
@pytest.mark.run(order=7)
def test_failed_batch_download():
user = users['alice']
client = getClient(user['login'])
privateFolder = getTestFolder(client)
datasetIds = []
datasetNames = []
failed_datasets = []
for dataset in client.listFolder(privateFolder['_id']):
if 'clone' not in dataset["name"] and 'train_mp4' in dataset['name']:
client.sendRestRequest('DELETE', f'folder/{dataset["_id"]}')
failed_datasets.append(dataset["name"])
continue
datasetIds.append(dataset["_id"])
datasetNames.append(dataset["name"])
downloaded = client.sendRestRequest(
'GET',
f'dive_dataset/export?includeMedia=true&includeDetections=true&excludeBelowThreshold=false&folderIds={json.dumps(datasetIds)}',
jsonResp=False,
)
z = ZipFile(io.BytesIO(downloaded.content))
print(z.namelist())
assert 'failed_datasets.txt' in z.namelist()
failed_string = z.read('failed_datasets.txt').decode('utf-8')
for failed in failed_datasets:
assert failed in failed_string



@pytest.mark.integration
@pytest.mark.parametrize("user", users.values())
Expand Down

0 comments on commit 50aa602

Please sign in to comment.