Skip to content

Commit

Permalink
Fix failing tests (#1181)
Browse files Browse the repository at this point in the history
  • Loading branch information
CBroz1 authored Nov 8, 2024
1 parent 42556d6 commit fa1114e
Show file tree
Hide file tree
Showing 7 changed files with 15 additions and 10 deletions.
5 changes: 4 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@ dj.FreeTable(dj.conn(), "common_session.session_group").drop()
- Import `datajoint.dependencies.unite_master_parts` -> `topo_sort` #1116,
#1137, #1162
- Fix bool settings imported from dj config file #1117
- Allow definition of tasks and new probe entries from config #1074, #1120, #1179
- Allow definition of tasks and new probe entries from config #1074, #1120,
#1179
- Enforce match between ingested nwb probe geometry and existing table entry
#1074
- Update DataJoint install and password instructions #1131
Expand All @@ -35,9 +36,11 @@ dj.FreeTable(dj.conn(), "common_session.session_group").drop()
- Remove mambaforge from tests #1153
- Remove debug statement #1164
- Add testing for python versions 3.9, 3.10, 3.11, 3.12 #1169
- Initialize tables in pytests #1181
- Allow python \< 3.13 #1169
- Remove numpy version restriction #1169
- Merge table delete removes orphaned master entries #1164
- Edit `merge_fetch` to expect positional before keyword arguments #1181

### Pipelines

Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/utils/dj_merge_tables.py
Original file line number Diff line number Diff line change
Expand Up @@ -774,7 +774,7 @@ def merge_restrict_class(
return parent_class & parent_key

def merge_fetch(
self, restriction: str = True, log_export=True, *attrs, **kwargs
self, *attrs, restriction: str = True, log_export=True, **kwargs
) -> list:
"""Perform a fetch across all parts. If >1 result, return as a list.
Expand Down
4 changes: 2 additions & 2 deletions src/spyglass/utils/mixins/export.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from os import environ
from re import match as re_match

from datajoint.condition import make_condition
from datajoint.condition import AndList, make_condition
from datajoint.table import Table
from packaging.version import parse as version_parse

Expand Down Expand Up @@ -320,7 +320,7 @@ def restrict(self, restriction):
log_export = "fetch_nwb" not in self._called_funcs()
return self._run_with_log(
super().restrict,
restriction=dj.AndList([restriction, self.restriction]),
restriction=AndList([restriction, self.restriction]),
log_export=log_export,
)

Expand Down
2 changes: 1 addition & 1 deletion tests/common/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,4 +56,4 @@ def common_ephys(common):
@pytest.fixture(scope="session")
def pop_common_electrode_group(common_ephys):
common_ephys.ElectrodeGroup.populate()
yield common_ephys.ElectrodeGroup
yield common_ephys.ElectrodeGroup()
2 changes: 1 addition & 1 deletion tests/common/test_position.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def position_video(common_position):

def test_position_video(position_video, upsample_position):
_ = position_video.populate()
assert len(position_video) == 1, "Failed to populate PositionVideo table."
assert len(position_video) == 2, "Failed to populate PositionVideo table."


def test_convert_to_pixels():
Expand Down
2 changes: 1 addition & 1 deletion tests/common/test_usage.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def test_export_populate(populate_export):
table, file = populate_export

assert len(file) == 4, "Export tables not captured correctly"
assert len(table) == 35, "Export files not captured correctly"
assert len(table) == 37, "Export files not captured correctly"


def test_invalid_export_id(export_tbls):
Expand Down
8 changes: 5 additions & 3 deletions tests/utils/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,14 +243,16 @@ def graph_tables(dj_conn, graph_schema):
# Merge inserts after declaring tables
merge_keys = graph_schema["PkNode"].fetch("KEY", offset=1, as_dict=True)
graph_schema["MergeOutput"].insert(merge_keys, skip_duplicates=True)
merge_child_keys = graph_schema["MergeOutput"].merge_fetch(
True, "merge_id", offset=1
merge_child_keys = graph_schema["MergeOutput"]().merge_fetch(
"merge_id", restriction=True, offset=1
)
merge_child_inserts = [
(i, j, k + 10)
for i, j, k in zip(merge_child_keys, range(4), range(10, 15))
]
graph_schema["MergeChild"].insert(merge_child_inserts, skip_duplicates=True)
graph_schema["MergeChild"]().insert(
merge_child_inserts, skip_duplicates=True
)

yield graph_schema

Expand Down

0 comments on commit fa1114e

Please sign in to comment.