Skip to content
Draft
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,7 @@ for label, interval_data in results.groupby("interval_labels"):
- Upgrade to pynwb>=3.1 #1506
- Remove imports of ndx extensions in main package to prevent errors in nwb io #1506
- Add `analysis_table` property to mixin for custom pipelines #1525
- Fix update bug in `_resolve_external_tables` #1536

### Pipelines

Expand Down
12 changes: 7 additions & 5 deletions src/spyglass/utils/dj_helper_fn.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
import numpy as np
from datajoint.table import Table
from datajoint.user_tables import TableMeta, UserTable

from spyglass.utils.logging import logger
from spyglass.utils.nwb_helper_fn import file_from_dandi, get_nwb_file

Expand Down Expand Up @@ -105,9 +104,9 @@ def declare_all_merge_tables() -> Tuple[Type[dj.Table]]:
from spyglass.decoding.decoding_merge import DecodingOutput # noqa: F401
from spyglass.lfp.lfp_merge import LFPOutput # noqa: F401
from spyglass.position.position_merge import PositionOutput # noqa: F401
from spyglass.spikesorting.spikesorting_merge import ( # noqa: F401
from spyglass.spikesorting.spikesorting_merge import (
SpikeSortingOutput,
)
) # noqa: F401

return DecodingOutput, LFPOutput, PositionOutput, SpikeSortingOutput

Expand Down Expand Up @@ -492,6 +491,7 @@ def _resolve_external_table(
file_restr = f"filepath LIKE '%{file_name}'"

to_updates = []
table_to_update = []
if location == "analysis": # Update for each custom Analysis external
for external in AnalysisRegistry().get_externals():
restr_external = external & file_restr
Expand All @@ -503,10 +503,12 @@ def _resolve_external_table(
+ f"{file_name}, cannot resolve."
)
to_updates.append(restr_external)
table_to_update.append(external)

elif location == "raw":
restr_external = common_schema.external["raw"] & file_restr
to_updates.append(restr_external)
table_to_update.append(common_schema.external["raw"])

if not to_updates:
logger.warning(
Expand All @@ -518,10 +520,10 @@ def _resolve_external_table(
size=Path(filepath).stat().st_size,
contents_hash=dj.hash.uuid_from_file(filepath),
)
for to_update in to_updates:
for to_update, table in zip(to_updates, table_to_update):
key = to_update.fetch1()
key.update(update_vals)
to_update.update1(key)
table.update1(key)

Comment on lines 465 to 537
Copy link

Copilot AI Feb 19, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The _resolve_external_table function lacks test coverage. Given that this function handles critical database updates and has been the source of a bug (as per issue #1535), adding tests would help prevent regressions. The test file tests/utils/test_dj_helper_fn.py exists but only contains a single deprecation test. Consider adding tests for both the "analysis" and "raw" location branches, including edge cases like empty results, multiple entries, and successful updates.

Copilot uses AI. Check for mistakes.
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@copilot open a new pull request to apply changes based on this feedback


def make_file_obj_id_unique(nwb_path: str):
Expand Down
Loading