Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
a848e67
Sp6 to Sp7 add missing constraints and tables
acwhite211 Jan 12, 2026
0a88e90
datamodel additions
acwhite211 Jan 13, 2026
6714b4f
add table model ids
acwhite211 Jan 13, 2026
ad10717
allow multi-field primary keys in the datamodel
acwhite211 Jan 13, 2026
f2981d5
modeling naming fiexes
acwhite211 Jan 13, 2026
6aa8762
add Sgrbatchmatchresultset and Sgrmatchconfiguration
acwhite211 Jan 13, 2026
863e1aa
fix field index issue
acwhite211 Jan 14, 2026
19f92ae
Merge branch 'main' into issue-7551
acwhite211 Jan 14, 2026
c9593de
add missing migration commands
acwhite211 Jan 14, 2026
20a6ebe
migration error fix in components
acwhite211 Jan 14, 2026
cf692fe
discline id check
acwhite211 Jan 14, 2026
dc5f505
init fix of unit tests
acwhite211 Jan 14, 2026
c4b7a65
fix base predicates
acwhite211 Jan 14, 2026
974251d
temp
acwhite211 Jan 14, 2026
4038313
patch path fix
acwhite211 Jan 15, 2026
8e9b25d
another patch path fix
acwhite211 Jan 15, 2026
927557f
sqlalchemy build models with multi primary key fields
acwhite211 Jan 16, 2026
9da909b
predicates safe filtering to fix unit test issues
acwhite211 Jan 16, 2026
46ba371
update_locality uiformatter fix
acwhite211 Jan 16, 2026
17da256
Merge branch 'main' into issue-7551
acwhite211 Jan 16, 2026
4b20be6
Lint code with ESLint and Prettier
acwhite211 Jan 16, 2026
766f2e9
add a skip option in the datamodel for sqlalchemy
acwhite211 Jan 23, 2026
b3cdd53
datamodel Table, add skip field
acwhite211 Jan 23, 2026
ec63f06
filter out skipped tables
acwhite211 Jan 23, 2026
9fe5f4a
back populate test fixes
acwhite211 Jan 23, 2026
94a8078
handle sqlalchemy unit test issue for now
acwhite211 Jan 27, 2026
b2e3bb9
Merge branch 'main' into issue-7551
acwhite211 Jan 27, 2026
db1607f
comment out legacy test
acwhite211 Jan 27, 2026
59b21f0
add many-to-many relationships
acwhite211 Jan 27, 2026
f915e43
simplify model and datamodel
acwhite211 Jan 28, 2026
98139a3
model adjustments
acwhite211 Jan 28, 2026
b781ee8
model timestamp many-to-many update fix
acwhite211 Jan 28, 2026
a737819
legacy unit test fix
acwhite211 Jan 28, 2026
76cec14
migration correction
acwhite211 Jan 28, 2026
bbbd50d
Merge branch 'main' into issue-7551
acwhite211 Jan 30, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def test_localityupdate_not_exist(self):
self._assertStatusCodeEqual(response, http.HttpResponseNotFound.status_code)
self.assertEqual(response.content.decode(), f"The localityupdate with task id '{task_id}' was not found")

@patch("specifyweb.specify.views.update_locality_task.AsyncResult")
@patch("specifyweb.backend.locality_update_tool.views.update_locality_task.AsyncResult")
def test_failed(self, AsyncResult: Mock):
mock_result = Mock()
mock_result.state = CELERY_TASK_STATE.FAILURE
Expand Down Expand Up @@ -70,7 +70,7 @@ def test_failed(self, AsyncResult: Mock):
}
)

@patch("specifyweb.specify.views.update_locality_task.AsyncResult")
@patch("specifyweb.backend.locality_update_tool.views.update_locality_task.AsyncResult")
def test_parse_failed(self, AsyncResult: Mock):
mock_result = Mock()
mock_result.state = CELERY_TASK_STATE.SUCCESS
Expand Down Expand Up @@ -98,7 +98,7 @@ def test_parse_failed(self, AsyncResult: Mock):
}
)

@patch("specifyweb.specify.views.update_locality_task.AsyncResult")
@patch("specifyweb.backend.locality_update_tool.views.update_locality_task.AsyncResult")
def test_parsed(self, AsyncResult: Mock):
mock_result = Mock()
mock_result.state = CELERY_TASK_STATE.SUCCESS
Expand Down Expand Up @@ -149,7 +149,7 @@ def test_parsed(self, AsyncResult: Mock):
}
)

@patch("specifyweb.specify.views.update_locality_task.AsyncResult")
@patch("specifyweb.backend.locality_update_tool.views.update_locality_task.AsyncResult")
def test_succeeded(self, AsyncResult: Mock):
mock_result = Mock()
mock_result.state = LocalityUpdateStatus.SUCCEEDED
Expand Down Expand Up @@ -181,7 +181,7 @@ def test_succeeded(self, AsyncResult: Mock):
}
)

@patch("specifyweb.specify.views.update_locality_task.AsyncResult")
@patch("specifyweb.backend.locality_update_tool.views.update_locality_task.AsyncResult")
def test_succeeded_locality_rows(self, AsyncResult: Mock):
mock_result = Mock()
mock_result.state = LocalityUpdateStatus.SUCCEEDED
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def test_no_ui_formatter(self):

self.assertEqual(parsed_with_value, parsed_with_value_result)

@patch("specifyweb.specify.update_locality.get_uiformatter")
@patch("specifyweb.backend.locality_update_tool.update_locality.get_uiformatter")
def test_cnn_formatter(self, get_uiformatter: Mock):

get_uiformatter.return_value = UIFormatter(
Expand Down
16 changes: 12 additions & 4 deletions specifyweb/backend/locality_update_tool/update_locality.py
Original file line number Diff line number Diff line change
Expand Up @@ -379,11 +379,19 @@ def parse_locality_set(collection, raw_headers: list[str], data: list[list[str]]
locality_id: int | None = None if len(
locality_query) != 1 else locality_query[0].id

parsed_locality_fields = [parse_field(
collection, 'Locality', dict['field'], dict['value'], locality_id, row_number) for dict in locality_values if dict['value'].strip() != ""]
parsed_locality_fields = [
parse_field(
collection, 'Locality', d['field'], d['value'], locality_id, row_number
)
for d in locality_values
]

parsed_geocoorddetail_fields = [parse_field(
collection, 'Geocoorddetail', dict["field"], dict['value'], locality_id, row_number) for dict in geocoorddetail_values if dict['value'].strip() != ""]
parsed_geocoorddetail_fields = [
parse_field(
collection, 'Geocoorddetail', d['field'], d['value'], locality_id, row_number
)
for d in geocoorddetail_values
]

parsed_row, parsed_errors = merge_parse_results(
[*parsed_locality_fields, *parsed_geocoorddetail_fields], locality_id, row_number)
Expand Down
1 change: 0 additions & 1 deletion specifyweb/backend/stored_queries/build_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,4 +141,3 @@ def make_relationship(reldef):

for tabledef in datamodel.tables:
map_class(tabledef)

3 changes: 2 additions & 1 deletion specifyweb/backend/stored_queries/tests/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,4 +305,5 @@ def test_sqlalchemy_model_errors(self):
"CollectionObjectGroup": {
"incorrect_direction": {"cojo": ["onetomany", "onetoone"]}
},
}
"SgrBatchMatchResultSet": {"not_found": ["items"]},
}
10 changes: 5 additions & 5 deletions specifyweb/backend/stored_queries/tests/tests_legacy.py
Original file line number Diff line number Diff line change
Expand Up @@ -956,10 +956,10 @@ def test_sqlalchemy_model_errors(self):
},
"CollectionObjectGroup": {
"incorrect_direction": {
"cojo": [
"onetomany",
"onetoone"
]
"cojo": ["onetomany", "onetoone"]
}
},
}
"SgrBatchMatchResultSet": {
"not_found": ['items']
}
}
38 changes: 33 additions & 5 deletions specifyweb/backend/trees/extras.py
Original file line number Diff line number Diff line change
Expand Up @@ -411,18 +411,46 @@ def synonymize(node, into, agent, user=None, collection=None):

# This check can be disabled by a remote pref
import specifyweb.backend.context.app_resource as app_resource
collection_prefs_json, _, __ = app_resource.get_app_resource(collection, user, 'CollectionPreferences')
if collection_prefs_json is not None:
collection_prefs_dict = json.loads(collection_prefs_json)

treeManagement_pref = collection_prefs_dict.get('treeManagement', {})
collection_prefs_dict = {} # always defined

res = app_resource.get_app_resource(collection, user, 'CollectionPreferences')
force_checks = (collection is None or user is None)
if res is not None:
collection_prefs_json, _, __ = res
if collection_prefs_json:
try:
collection_prefs_dict = json.loads(collection_prefs_json) or {}
except Exception:
collection_prefs_dict = {}

treeManagement_pref = collection_prefs_dict.get('treeManagement', {})
if force_checks and target.children.exists():
raise TreeBusinessRuleException(
f'Synonymizing "{node.fullname}" to "{into.fullname}" which has children',
{"tree": "Taxon",
"localizationKey": "nodeSynonimizeWithChildren",
"node": {
"id": node.id,
"rankid": node.rankid,
"fullName": node.fullname,
"children": list(node.children.values('id', 'fullname'))
},
"parent": {
"id": into.id,
"rankid": into.rankid,
"fullName": into.fullname,
"parentid": into.parent.id,
"children": list(into.children.values('id', 'fullname'))
}}
)
force_checks = (collection is None or user is None)
synonymized = treeManagement_pref.get('synonymized', {}) \
if isinstance(treeManagement_pref, dict) else {}

add_synonym_enabled = synonymized.get(r'^sp7\.allow_adding_child_to_synonymized_parent\.' + node.specify_model.name + '=(.+)', False) if isinstance(synonymized, dict) else False

if node.children.count() > 0 and (add_synonym_enabled is True):
if node.children.count() > 0 and (force_checks or add_synonym_enabled is False):
raise TreeBusinessRuleException(
f'Synonymizing node "{node.fullname}" which has children',
{"tree" : "Taxon",
Expand Down
47 changes: 42 additions & 5 deletions specifyweb/backend/workbench/upload/predicates.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import specifyweb.specify.models as spmodels
from specifyweb.specify.utils.func import Func

from django.core.exceptions import ObjectDoesNotExist
from django.core.exceptions import ObjectDoesNotExist, FieldDoesNotExist, FieldError

from specifyweb.backend.workbench.upload.clone import GENERIC_FIELDS_TO_SKIP

Expand All @@ -39,6 +39,14 @@ def add_to_remove_node(previous: ToRemoveNode, new_node: ToRemoveNode) -> ToRemo
**{key: [*previous.get(key, []), *values] for key, values in new_node.items()},
}

def _model_supports_filter_key(model, key: str) -> bool:
field_name = key[:-3] if key.endswith("_id") else key
try:
model._meta.get_field(field_name)
return True
except FieldDoesNotExist:
return False


class ToRemove(NamedTuple):
model_name: str
Expand Down Expand Up @@ -124,7 +132,7 @@ def _smart_apply(
base_predicates = {
_get_field_name(field_name): value
for (field_name, value) in self.filters.items()
if not isinstance(value, list)
if not isinstance(value, list) and _model_supports_filter_key(current_model, field_name)
}

filtered = {
Expand All @@ -134,9 +142,19 @@ def _smart_apply(

unique_alias = next(get_unique_alias)

# Apply filters first
query = query.filter(**filtered)

# IMPORTANT: downstream reduction logic assumes every predicate level
# defines a "predicate-N" alias, so always alias the PK.
unique_alias = next(get_unique_alias)
alias_path = _get_field_name("id")
query = query.filter(**filtered).alias(**{unique_alias: F(alias_path)})
aliases = [*aliases, (alias_path, unique_alias)]
try:
query = query.alias(**{unique_alias: F(alias_path)})
aliases = [*aliases, (alias_path, unique_alias)]
except FieldError:
# Extremely defensive; every model should have "id"
pass

def _reduce_by_key(rel_name: str):
# mypy isn't able to infer types correctly
Expand Down Expand Up @@ -284,12 +302,31 @@ def canonicalize_remove_node(node: ToRemoveNode) -> Q:

def _map_matchee(matchee: list[ToRemoveMatchee], model_name: str) -> Exists:
model: Model = get_model(model_name)
qs = [Q(**match["filter_on"]) for match in matchee]

# Filter out any filter keys that don't exist on this model
qs: list[Q] = []
for match in matchee:
safe_filter_on = {
k: v
for k, v in match["filter_on"].items()
if _model_supports_filter_key(model, k)
}
# If nothing remains, this particular matchee can't apply to this model
if safe_filter_on:
qs.append(Q(**safe_filter_on))

# If none of the matchees had any applicable filter keys,
# make this Exists() always false by filtering on an empty pk set.
if not qs:
return Exists(model.objects.none())

qs_or = Func.make_ors(qs)
query = model.objects.filter(qs_or)

to_remove = [match["remove"] for match in matchee if match["remove"] is not None]
if to_remove:
query = query.exclude(Func.make_ors(to_remove))

return Exists(query)


Expand Down
3 changes: 3 additions & 0 deletions specifyweb/backend/workbench/upload/scoping.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,9 @@ def scoping_relationships(collection, table: Table) -> dict[str, int]:
try:
table.get_relationship("collection")
extra_static["collection_id"] = collection.id
extra_static["discipline_id"] = collection.discipline_id
extra_static["division_id"] = collection.discipline.division_id
extra_static["institution_id"] = collection.discipline.division.institution_id
except DoesNotExistError:
pass

Expand Down
12 changes: 9 additions & 3 deletions specifyweb/backend/workbench/upload/upload_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -791,7 +791,9 @@ def _do_clone(self, attrs) -> Any:

def _get_inserter(self):
def _inserter(model, attrs):
uploaded = model.objects.create(**attrs)
valid_fields = {f.attname for f in model._meta.concrete_fields}
filtered_attrs = {k: v for k, v in attrs.items() if k in valid_fields}
uploaded = model.objects.create(**filtered_attrs)
self.auditor.insert(uploaded, None)
return uploaded

Expand Down Expand Up @@ -1146,7 +1148,11 @@ def is_equal(old, new):
return old == new

return {
key: FieldChangeInfo(field_name=key, old_value=getattr(reference_record, key), new_value=new_value) # type: ignore
key: FieldChangeInfo(
field_name=key,
old_value=getattr(reference_record, key),
new_value=new_value,
) # type: ignore
for (key, new_value) in attrs.items()
if not is_equal(getattr(reference_record, key), new_value)
if hasattr(reference_record, key) and not is_equal(getattr(reference_record, key), new_value)
}
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,8 @@ export function InteractionDialog({
);

const isLoanReturnLike =
isLoanReturn || (actionTable.name !== 'Loan' && actionTable.name.includes('Loan'));
isLoanReturn ||
(actionTable.name !== 'Loan' && actionTable.name.includes('Loan'));

const itemTable = isLoanReturnLike ? tables.Loan : tables.CollectionObject;

Expand Down Expand Up @@ -206,8 +207,7 @@ export function InteractionDialog({
)
).then((data) =>
availablePrepsReady(catalogNumbers, data, {
skipEntryMatch:
searchField.name.toLowerCase() !== 'catalognumber',
skipEntryMatch: searchField.name.toLowerCase() !== 'catalognumber',
})
)
);
Expand Down Expand Up @@ -377,7 +377,9 @@ export function InteractionDialog({
values,
isLoan
)
).then((data) => availablePrepsReady(values, data, { skipEntryMatch: true }))
).then((data) =>
availablePrepsReady(values, data, { skipEntryMatch: true })
)
);
}

Expand Down
Loading