Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,7 @@ for label, interval_data in results.groupby("interval_labels"):
- Upgrade to pynwb>=3.1 #1506
- Remove imports of ndx extensions in main package to prevent errors in nwb io #1506
- Add `analysis_table` property to mixin for custom pipelines #1525
- Fix update bug in `_resolve_external_tables` #1536

### Pipelines

Expand Down
22 changes: 17 additions & 5 deletions src/spyglass/utils/dj_helper_fn.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
import numpy as np
from datajoint.table import Table
from datajoint.user_tables import TableMeta, UserTable

from spyglass.utils.logging import logger
from spyglass.utils.nwb_helper_fn import file_from_dandi, get_nwb_file

Expand Down Expand Up @@ -105,9 +104,9 @@ def declare_all_merge_tables() -> Tuple[Type[dj.Table]]:
from spyglass.decoding.decoding_merge import DecodingOutput # noqa: F401
from spyglass.lfp.lfp_merge import LFPOutput # noqa: F401
from spyglass.position.position_merge import PositionOutput # noqa: F401
from spyglass.spikesorting.spikesorting_merge import ( # noqa: F401
from spyglass.spikesorting.spikesorting_merge import (
SpikeSortingOutput,
)
) # noqa: F401

return DecodingOutput, LFPOutput, PositionOutput, SpikeSortingOutput

Expand Down Expand Up @@ -492,6 +491,7 @@ def _resolve_external_table(
file_restr = f"filepath LIKE '%{file_name}'"

to_updates = []
tables_to_update = []
if location == "analysis": # Update for each custom Analysis external
for external in AnalysisRegistry().get_externals():
restr_external = external & file_restr
Expand All @@ -503,10 +503,22 @@ def _resolve_external_table(
+ f"{file_name}, cannot resolve."
)
to_updates.append(restr_external)
tables_to_update.append(external)

elif location == "raw":
restr_external = common_schema.external["raw"] & file_restr
if not bool(restr_external):
logger.warning(
f"No entries found in common_schema.external['raw'] for file: {file_name}"
)
return
if len(restr_external) > 1:
raise ValueError(
"Multiple entries found in common_schema.external['raw'] for file: "
+ f"{file_name}, cannot resolve."
)
to_updates.append(restr_external)
tables_to_update.append(common_schema.external["raw"])

if not to_updates:
logger.warning(
Expand All @@ -518,10 +530,10 @@ def _resolve_external_table(
size=Path(filepath).stat().st_size,
contents_hash=dj.hash.uuid_from_file(filepath),
)
for to_update in to_updates:
for to_update, table in zip(to_updates, tables_to_update):
key = to_update.fetch1()
key.update(update_vals)
to_update.update1(key)
table.update1(key)


def make_file_obj_id_unique(nwb_path: str):
Expand Down
Loading