Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Query simplifications, edits in table explanations, some code refactoring #82

Merged
merged 25 commits into from
Sep 28, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
c1585c9
introduce a new activity table design to allow new traces e.g. from f…
tdincer Jun 29, 2022
8f38698
Merge branch 'datajoint:main' into main
tdincer Jun 29, 2022
b75f1c0
Merge branch 'main' of https://github.com/datajoint/element-calcium-i…
tdincer Jun 29, 2022
4fbe41a
address Dimitri's comments
tdincer Jun 29, 2022
32f95a7
Merge branch 'main' of https://github.com/tdincer/element-calcium-ima…
tdincer Jun 29, 2022
15af911
table join optimization
tdincer Jun 30, 2022
3706a11
revert mistakes
tdincer Jun 30, 2022
ffede4a
Merge branch 'main' into main
tdincer Aug 15, 2022
9b1d577
Update element_calcium_imaging/imaging_withpostprocessing.py
tdincer Aug 15, 2022
67fef56
ProcessingMethod description line break
tdincer Aug 15, 2022
8c193b5
Merge branch 'main' of https://github.com/tdincer/element-calcium-ima…
tdincer Aug 15, 2022
6dc0f1d
param -> caiman_params
tdincer Aug 15, 2022
7808aba
Update element_calcium_imaging/imaging_withpostprocessing.py
tdincer Aug 15, 2022
c86e412
refine curation definition
tdincer Aug 15, 2022
4f32085
Update element_calcium_imaging/imaging_withpostprocessing.py
tdincer Aug 15, 2022
5ca5d45
query optimization & misc
tdincer Aug 15, 2022
8f0b075
remove Activity key source
tdincer Aug 16, 2022
ed49692
rename withpostprocessing to fissa
tdincer Aug 16, 2022
8d4f745
Merge branch 'datajoint:main' into main
tdincer Sep 1, 2022
2c9d515
merge conflict
tdincer Sep 27, 2022
167a7d2
remove imaging_fissa
tdincer Sep 28, 2022
409df42
Merge branch 'main' of https://github.com/tdincer/element-calcium-ima…
tdincer Sep 28, 2022
b4099c9
changelog
tdincer Sep 28, 2022
e3b221b
add tag links
tdincer Sep 28, 2022
ca55762
Update CHANGELOG.md
tdincer Sep 28, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,13 @@

Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention.


## [0.2.2] - 2022-09-28
kabilar marked this conversation as resolved.
Show resolved Hide resolved

+ Update - Minor table explanation edits
+ Update - Query simplifications
+ Update - Minor code refactoring

## [0.2.1] - 2022-09-12

+ Bugfix - fix errors in auto generating new ProcessingTask
Expand Down Expand Up @@ -36,6 +43,8 @@ Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and
+ Add - `scan` and `imaging` modules
+ Add - Readers for `ScanImage`, `ScanBox`, `Suite2p`, `CaImAn`

[0.2.2]: https://github.com/datajoint/element-calcium-imaging/releases/tag/0.2.2
[0.2.1]: https://github.com/datajoint/element-calcium-imaging/releases/tag/0.2.1
[0.2.0]: https://github.com/datajoint/element-calcium-imaging/releases/tag/0.2.0
[0.1.0]: https://github.com/datajoint/element-calcium-imaging/releases/tag/0.1.0
[0.1.0b0]: https://github.com/datajoint/element-calcium-imaging/releases/tag/0.1.0b0
Expand Down
90 changes: 46 additions & 44 deletions element_calcium_imaging/imaging.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,9 @@ def activate(

@schema
class ProcessingMethod(dj.Lookup):
definition = """ # Method, package, analysis suite used for processing of calcium imaging data (e.g. Suite2p, CaImAn, etc.)
definition = """
# Method, package, analysis suite used for processing of calcium imaging
# data (e.g. Suite2p, CaImAn, etc.)
processing_method: char(8)
---
processing_method_desc: varchar(1000)
Expand Down Expand Up @@ -168,8 +170,9 @@ def infer_output_dir(cls, key, relative=False, mkdir=False):
).parent
root_dir = find_root_directory(get_imaging_root_data_dir(), scan_dir)

method = (ProcessingParamSet & key).fetch1(
'processing_method').replace(".", "-")
method = (
(ProcessingParamSet & key).fetch1("processing_method").replace(".", "-")
)

processed_dir = pathlib.Path(get_processed_root_data_dir())
output_dir = (
Expand All @@ -188,7 +191,7 @@ def generate(cls, scan_key, paramset_idx=0):
"""
Method to auto-generate ProcessingTask entries for a particular Scan using the specified parameter set.
"""
key = {**scan_key, 'paramset_idx': paramset_idx}
key = {**scan_key, "paramset_idx": paramset_idx}

output_dir = cls.infer_output_dir(key, relative=False, mkdir=True)

Expand All @@ -199,9 +202,11 @@ def generate(cls, scan_key, paramset_idx=0):
try:
if method == "suite2p":
from element_interface import suite2p_loader

suite2p_loader.Suite2p(output_dir)
elif method == "caiman":
from element_interface import caiman_loader

caiman_loader.CaImAn(output_dir)
else:
raise NotImplementedError(
Expand Down Expand Up @@ -267,14 +272,11 @@ def make(self, key):
raise NotImplementedError("Unknown method: {}".format(method))
elif task_mode == "trigger":

method = (
ProcessingTask * ProcessingParamSet * ProcessingMethod * scan.Scan & key
).fetch1("processing_method")
method = (ProcessingParamSet * ProcessingTask & key).fetch1(
"processing_method"
)

image_files = (
ProcessingTask * scan.Scan * scan.ScanInfo * scan.ScanInfo.ScanFile
& key
).fetch("file_path")
image_files = (scan.ScanInfo.ScanFile & key).fetch("file_path")
image_files = [
find_full_path(get_imaging_root_data_dir(), image_file)
for image_file in image_files
Expand All @@ -287,9 +289,7 @@ def make(self, key):
"params"
)
suite2p_params["save_path0"] = output_dir
suite2p_params["fs"] = (
scan.ScanInfo & key
).fetch1("fps")
suite2p_params["fs"] = (scan.ScanInfo & key).fetch1("fps")

input_format = pathlib.Path(image_files[0]).suffix
suite2p_params["input_format"] = input_format[1:]
Expand All @@ -308,19 +308,19 @@ def make(self, key):
elif method == "caiman":
from element_interface.run_caiman import run_caiman

params = (ProcessingTask * ProcessingParamSet & key).fetch1("params")
sampling_rate, ndepths = (
scan.ScanInfo & key
).fetch1("fps", "ndepths")
caiman_params = (ProcessingTask * ProcessingParamSet & key).fetch1(
"params"
)
sampling_rate, ndepths = (scan.ScanInfo & key).fetch1("fps", "ndepths")

is3D = bool(ndepths > 1)
if is3D:
raise NotImplementedError(
"Caiman pipeline is not capable of analyzing 3D scans at the moment."
"Caiman pipeline is not yet capable of analyzing 3D scans."
)
run_caiman(
file_paths=[f.as_posix() for f in image_files],
parameters=params,
parameters=caiman_params,
sampling_rate=sampling_rate,
output_dir=output_dir,
is3D=is3D,
Expand All @@ -338,7 +338,7 @@ def make(self, key):

@schema
class Curation(dj.Manual):
definition = """ # Different rounds of curation performed on the processing results of the imaging data (no-curation can also be included here)
definition = """ # Curation(s) performed on processing results (including none)
-> Processing
curation_id: int
---
Expand All @@ -355,7 +355,7 @@ def create1_from_processing_task(self, key, is_curated=False, curation_note=""):
if key not in Processing():
raise ValueError(
f"No corresponding entry in Processing available for: {key};"
f" do `Processing.populate(key)`"
f"Please run `Processing.populate(key)`"
)

output_dir = (ProcessingTask & key).fetch1("processing_output_dir")
Expand Down Expand Up @@ -1023,17 +1023,18 @@ class ActivityExtractionMethod(dj.Lookup):

@schema
class Activity(dj.Computed):
definition = """ # inferred neural activity from fluorescence trace - e.g. dff, spikes
definition = """
# Inferred neural activity from fluorescence trace - e.g. dff, spikes
-> Fluorescence
-> ActivityExtractionMethod
"""

class Trace(dj.Part):
definition = """ #
definition = """
-> master
-> Fluorescence.Trace
---
activity_trace: longblob #
activity_trace: longblob
"""

@property
Expand Down Expand Up @@ -1062,14 +1063,17 @@ def make(self, key):
suite2p_dataset = imaging_dataset
# ---- iterate through all s2p plane outputs ----
spikes = [
dict(key,
mask=mask_idx,
fluo_channel=0,
activity_trace=spks,
)
for mask_idx, spks in enumerate(
s for plane in suite2p_dataset.planes.values()
for s in plane.spks)
dict(
key,
mask=mask_idx,
fluo_channel=0,
activity_trace=spks,
)
for mask_idx, spks in enumerate(
s
for plane in suite2p_dataset.planes.values()
for s in plane.spks
)
]

self.insert1(key)
Expand All @@ -1085,18 +1089,16 @@ def make(self, key):
segmentation_channel = params.get(
"segmentation_channel", caiman_dataset.segmentation_channel
)

self.insert1(key)
self.Trace.insert(
dict(
key,
mask=mask["mask_id"],
fluo_channel=segmentation_channel,
activity_trace=mask[
attr_mapper[key["extraction_method"]]
],
)
for mask in caiman_dataset.masks
self.Trace.insert(
dict(
key,
mask=mask["mask_id"],
fluo_channel=segmentation_channel,
activity_trace=mask[attr_mapper[key["extraction_method"]]],
)
for mask in caiman_dataset.masks
)
else:
raise NotImplementedError("Unknown/unimplemented method: {}".format(method))
Expand Down
84 changes: 43 additions & 41 deletions element_calcium_imaging/imaging_no_curation.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,9 @@ def activate(

@schema
class ProcessingMethod(dj.Lookup):
definition = """ # Method, package, analysis suite used for processing of calcium imaging data (e.g. Suite2p, CaImAn, etc.)
definition = """
# Method, package, analysis suite used for processing of calcium imaging
# data (e.g. Suite2p, CaImAn, etc.)
processing_method: char(8)
---
processing_method_desc: varchar(1000)
Expand Down Expand Up @@ -168,8 +170,9 @@ def infer_output_dir(cls, key, relative=False, mkdir=False):
).parent
root_dir = find_root_directory(get_imaging_root_data_dir(), scan_dir)

method = (ProcessingParamSet & key).fetch1(
'processing_method').replace(".", "-")
method = (
(ProcessingParamSet & key).fetch1("processing_method").replace(".", "-")
)

processed_dir = pathlib.Path(get_processed_root_data_dir())
output_dir = (
Expand All @@ -188,7 +191,7 @@ def generate(cls, scan_key, paramset_idx=0):
"""
Method to auto-generate ProcessingTask entries for a particular Scan using the specified parameter set.
"""
key = {**scan_key, 'paramset_idx': paramset_idx}
key = {**scan_key, "paramset_idx": paramset_idx}

output_dir = cls.infer_output_dir(key, relative=False, mkdir=True)

Expand All @@ -199,9 +202,11 @@ def generate(cls, scan_key, paramset_idx=0):
try:
if method == "suite2p":
from element_interface import suite2p_loader

suite2p_loader.Suite2p(output_dir)
elif method == "caiman":
from element_interface import caiman_loader

caiman_loader.CaImAn(output_dir)
else:
raise NotImplementedError(
Expand Down Expand Up @@ -267,14 +272,11 @@ def make(self, key):
raise NotImplementedError("Unknown method: {}".format(method))
elif task_mode == "trigger":

method = (
ProcessingTask * ProcessingParamSet * ProcessingMethod * scan.Scan & key
).fetch1("processing_method")
method = (ProcessingParamSet * ProcessingTask & key).fetch1(
"processing_method"
)

image_files = (
ProcessingTask * scan.Scan * scan.ScanInfo * scan.ScanInfo.ScanFile
& key
).fetch("file_path")
image_files = (scan.ScanInfo.ScanFile & key).fetch("file_path")
image_files = [
find_full_path(get_imaging_root_data_dir(), image_file)
for image_file in image_files
Expand All @@ -287,9 +289,7 @@ def make(self, key):
"params"
)
suite2p_params["save_path0"] = output_dir
suite2p_params["fs"] = (
scan.ScanInfo & key
).fetch1("fps")
suite2p_params["fs"] = (scan.ScanInfo & key).fetch1("fps")

input_format = pathlib.Path(image_files[0]).suffix
suite2p_params["input_format"] = input_format[1:]
Expand All @@ -308,19 +308,19 @@ def make(self, key):
elif method == "caiman":
from element_interface.run_caiman import run_caiman

params = (ProcessingTask * ProcessingParamSet & key).fetch1("params")
sampling_rate, ndepths = (
scan.ScanInfo & key
).fetch1("fps", "ndepths")
caiman_params = (ProcessingTask * ProcessingParamSet & key).fetch1(
"params"
)
sampling_rate, ndepths = (scan.ScanInfo & key).fetch1("fps", "ndepths")

is3D = bool(ndepths > 1)
if is3D:
raise NotImplementedError(
"Caiman pipeline is not capable of analyzing 3D scans at the moment."
"Caiman pipeline is not yet capable of analyzing 3D scans."
)
run_caiman(
file_paths=[f.as_posix() for f in image_files],
parameters=params,
parameters=caiman_params,
sampling_rate=sampling_rate,
output_dir=output_dir,
is3D=is3D,
Expand Down Expand Up @@ -973,17 +973,18 @@ class ActivityExtractionMethod(dj.Lookup):

@schema
class Activity(dj.Computed):
definition = """ # inferred neural activity from fluorescence trace - e.g. dff, spikes
definition = """
# Inferred neural activity from fluorescence trace - e.g. dff, spikes
-> Fluorescence
-> ActivityExtractionMethod
"""

class Trace(dj.Part):
definition = """ #
definition = """
-> master
-> Fluorescence.Trace
---
activity_trace: longblob #
activity_trace: longblob
"""

@property
Expand Down Expand Up @@ -1012,14 +1013,17 @@ def make(self, key):
suite2p_dataset = imaging_dataset
# ---- iterate through all s2p plane outputs ----
spikes = [
dict(key,
mask=mask_idx,
fluo_channel=0,
activity_trace=spks,
)
for mask_idx, spks in enumerate(
s for plane in suite2p_dataset.planes.values()
for s in plane.spks)
dict(
key,
mask=mask_idx,
fluo_channel=0,
activity_trace=spks,
)
for mask_idx, spks in enumerate(
s
for plane in suite2p_dataset.planes.values()
for s in plane.spks
)
]

self.insert1(key)
Expand All @@ -1037,16 +1041,14 @@ def make(self, key):
)

self.insert1(key)
self.Trace.insert(
dict(
key,
mask=mask["mask_id"],
fluo_channel=segmentation_channel,
activity_trace=mask[
attr_mapper[key["extraction_method"]]
],
)
for mask in caiman_dataset.masks
self.Trace.insert(
dict(
key,
mask=mask["mask_id"],
fluo_channel=segmentation_channel,
activity_trace=mask[attr_mapper[key["extraction_method"]]],
)
for mask in caiman_dataset.masks
)
else:
raise NotImplementedError("Unknown/unimplemented method: {}".format(method))
Expand Down
Loading