Skip to content

Commit

Permalink
feat: Delete/transfer video (#696)
Browse files Browse the repository at this point in the history
* feat: Delete recording files when deleting recording

* fix: Fix share recording feature

* feat: Share related files when transferring recording

* fix: Fix tests and lint code

---------

Co-authored-by: Richard Abrich <[email protected]>
  • Loading branch information
KIRA009 and abrichr authored Jun 5, 2024
1 parent 6492120 commit 889d48c
Show file tree
Hide file tree
Showing 6 changed files with 94 additions and 21 deletions.
7 changes: 7 additions & 0 deletions openadapt/db/crud.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,9 +268,16 @@ def delete_recording(session: SaSession, recording: Recording) -> None:
session (sa.orm.Session): The database session.
recording (Recording): The recording object.
"""
recording_timestamp = recording.timestamp
session.query(Recording).filter(Recording.id == recording.id).delete()
session.commit()

utils.delete_performance_plot(recording_timestamp)

from openadapt.video import delete_video_file

delete_video_file(recording_timestamp)


def get_all_recordings(session: SaSession) -> list[Recording]:
"""Get all recordings.
Expand Down
9 changes: 3 additions & 6 deletions openadapt/db/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,18 +159,15 @@ def genericize_datatypes(
# Insert the recording into the target recording table
tgt_conn.execute(tgt_recording_table.insert().values(src_recording))

# Get the timestamp from the source recording
src_timestamp = src_recording["timestamp"]

# Copy data from tables with the same timestamp
# Copy data from tables with the same recording_id
for table in src_metadata.sorted_tables:
if (
table.name not in exclude_tables
and "recording_timestamp" in table.columns.keys()
):
# Select data from source table with the same timestamp
# Select data from source table with the same recording_id
src_select = table.select().where(
table.c.recording_timestamp == src_timestamp
table.c.recording_id == recording_id
)
src_rows = src_conn.execute(src_select).fetchall()

Expand Down
30 changes: 25 additions & 5 deletions openadapt/share.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@

from openadapt import db, utils
from openadapt.config import RECORDING_DIR_PATH
from openadapt.db import crud
from openadapt.video import get_video_file_path

LOG_LEVEL = "INFO"
utils.configure_logging(logger, LOG_LEVEL)
Expand All @@ -33,11 +35,12 @@ def export_recording_to_folder(recording_id: int) -> None:
Returns:
str: The path of the created zip file.
"""
# Create the directory if it doesn't exist
os.makedirs(RECORDING_DIR_PATH, exist_ok=True)

recording_db_path = db.export_recording(recording_id)

assert recording_db_path, recording_db_path
# Create the directory if it doesn't exist
os.makedirs(RECORDING_DIR_PATH, exist_ok=True)

# Get the timestamp from the recording_db_path
timestamp = extract_timestamp_from_filename(os.path.basename(recording_db_path))
Expand All @@ -49,9 +52,26 @@ def export_recording_to_folder(recording_id: int) -> None:
zip_filename = f"recording_{recording_id}_{timestamp}.zip"
zip_path = os.path.join(RECORDING_DIR_PATH, zip_filename)

# Create an in-memory zip file and add the db file
with ZipFile(zip_path, "w", ZIP_DEFLATED, compresslevel=9) as zip_file:
zip_file.write(recording_db_path, arcname=db_filename)
zipfile = ZipFile(zip_path, "w", ZIP_DEFLATED, compresslevel=9)
zipfile.write(recording_db_path, arcname=db_filename)

with crud.get_new_session(read_only=True) as session:
recording = crud.get_recording_by_id(session, recording_id)
recording_timestamp = recording.timestamp

performance_plot_path = utils.get_performance_plot_file_path(recording_timestamp)
if os.path.exists(performance_plot_path):
zipfile.write(
performance_plot_path, arcname=os.path.basename(performance_plot_path)
)
logger.info(f"added {performance_plot_path=}")

video_file_path = get_video_file_path(recording_timestamp)
if os.path.exists(video_file_path):
zipfile.write(video_file_path, arcname=os.path.basename(video_file_path))
logger.info(f"added {video_file_path=}")

zipfile.close()

logger.info(f"created {zip_path=}")

Expand Down
34 changes: 30 additions & 4 deletions openadapt/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -709,6 +709,22 @@ def get_strategy_class_by_name() -> dict:
return class_by_name


def get_performance_plot_file_path(recording_timestamp: float) -> str:
"""Get the filename for the performance plot.
Args:
recording_timestamp (float): The timestamp of the recording.
Returns:
str: The filename.
"""
os.makedirs(PERFORMANCE_PLOTS_DIR_PATH, exist_ok=True)

fname_parts = ["performance", str(recording_timestamp)]
fname = "-".join(fname_parts) + ".png"
return os.path.join(PERFORMANCE_PLOTS_DIR_PATH, fname)


def plot_performance(
recording: Recording = None,
view_file: bool = False,
Expand Down Expand Up @@ -788,10 +804,7 @@ def plot_performance(

# TODO: add PROC_WRITE_BY_EVENT_TYPE
if save_file:
fname_parts = ["performance", str(recording.timestamp)]
fname = "-".join(fname_parts) + ".png"
os.makedirs(PERFORMANCE_PLOTS_DIR_PATH, exist_ok=True)
fpath = os.path.join(PERFORMANCE_PLOTS_DIR_PATH, fname)
fpath = get_performance_plot_file_path(recording.timestamp)
logger.info(f"{fpath=}")
plt.savefig(fpath)
if view_file:
Expand All @@ -812,6 +825,19 @@ def plot_performance(
)


def delete_performance_plot(recording_timestamp: float) -> None:
"""Delete the performance plot for the given recording timestamp.
Args:
recording_timestamp (float): The timestamp of the recording.
"""
fpath = get_performance_plot_file_path(recording_timestamp)
try:
os.remove(fpath)
except FileNotFoundError as exc:
logger.warning(f"{exc=}")


def strip_element_state(action_event: ActionEvent) -> ActionEvent:
"""Strip the element state from the action event and its children.
Expand Down
14 changes: 14 additions & 0 deletions openadapt/video.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,20 @@ def get_video_file_path(recording_timestamp: float) -> str:
)


def delete_video_file(recording_timestamp: float) -> None:
"""Deletes the video file corresponding to the given recording timestamp.
Args:
recording_timestamp (float): The timestamp of the recording to delete.
"""
video_file_path = get_video_file_path(recording_timestamp)
if os.path.exists(video_file_path):
os.remove(video_file_path)
logger.info(f"Deleted video file: {video_file_path}")
else:
logger.error(f"Video file not found: {video_file_path}")


def initialize_video_writer(
output_path: str,
width: int,
Expand Down
21 changes: 15 additions & 6 deletions tests/openadapt/test_share.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Module to test share.py."""

from collections import namedtuple
from unittest.mock import patch
from zipfile import ZIP_DEFLATED, ZipFile
import os
Expand All @@ -26,12 +27,20 @@ def test_export_recording_to_folder() -> None:
with open(recording_db_path, "w") as f:
f.write("Recording data")

# Mock the db.export_recording() function to return the temporary file path
with patch("openadapt.share.db.export_recording", return_value=recording_db_path):
zip_file_path = share.export_recording_to_folder(recording_id)

assert zip_file_path is not None
assert os.path.exists(zip_file_path)
# Mock the crud.get_recording_by_id() function to return a recording object
Recording = namedtuple("Recording", ["timestamp"])
with patch(
"openadapt.db.crud.get_recording_by_id",
return_value=Recording(timestamp=193994394),
):
# Mock the db.export_recording() function to return the temporary file path
with patch(
"openadapt.share.db.export_recording", return_value=recording_db_path
):
zip_file_path = share.export_recording_to_folder(recording_id)

assert zip_file_path is not None
assert os.path.exists(zip_file_path)

# Assert that the file is removed after calling export_recording_to_folder
assert not os.path.exists(recording_db_path), "Temporary file was not removed."
Expand Down

0 comments on commit 889d48c

Please sign in to comment.