From bc82c58e0b5e8197622fc553feeedffb88109b22 Mon Sep 17 00:00:00 2001 From: alec_dev Date: Thu, 25 Sep 2025 16:04:33 -0500 Subject: [PATCH 01/11] path migration to dedupe and merge picklists --- .../0008_merge_duplicate_picklists.py | 62 +++++++++++++++++++ 1 file changed, 62 insertions(+) create mode 100644 specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py diff --git a/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py b/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py new file mode 100644 index 00000000000..a01a4af79fc --- /dev/null +++ b/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py @@ -0,0 +1,62 @@ +from django.db import migrations +from django.db.models import Count, F + +def deduplicate_picklists(apps, schema_editor): + Picklist = apps.get_model('specify', 'Picklist') + PicklistItem = apps.get_model('specify', 'PicklistItem') + + duplicate_picklist_groups = ( + Picklist.objects + .values( + collection_name=F('collection__collectionName'), + name=F('name'), + tablename=F('tablename'), + ) + .annotate(pl_count=Count('picklistid')) + .filter(pl_count__gt=1) + ) + + for group in duplicate_picklist_groups: + picklists = Picklist.objects.filter( + collection__collectionName=group['collection_name'], + name=group['name'], + tablename=group['tablename'], + ).order_by('picklistid') + + if picklists.count() < 2: + continue + + primary_picklist = picklists.first() + duplicate_picklists = picklists.exclude(picklistid=primary_picklist.picklistid) + + # Before deleting duplicates, add any picklist items that don't exist in the + # primary picklist to the primary picklist. + for duplicate in duplicate_picklists: + items = PicklistItem.objects.filter(picklist=duplicate).order_by('picklistitemid') + for item in items: + existing_item = PicklistItem.objects.filter( # TODO: Verify these are the right fields to check for duplicates + picklist=primary_picklist, + value=item.value, + displayvalue=item.displayvalue + ).first() + if not existing_item: + item.picklist = primary_picklist + item.save() + else: + # TODO: Update references to the duplicate picklist if necessary + item.delete() + + duplicate.delete() + +class Migration(migrations.Migration): + dependencies = [ + ('patches', '0007_fix_tectonicunit_tree_root') + ] + + operations = [ + migrations.RunPython( + deduplicate_picklists, + migrations.RunPython.noop, + atomic=True + ) + ] From 06ac8275735ab9c19c239a33270a98e1918c6ed0 Mon Sep 17 00:00:00 2001 From: alec_dev Date: Thu, 2 Oct 2025 10:05:35 -0500 Subject: [PATCH 02/11] typo fixes Updated field names to match database schema and improved comments for clarity. --- .../patches/migrations/0008_merge_duplicate_picklists.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py b/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py index a01a4af79fc..2f0330b1aa2 100644 --- a/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py +++ b/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py @@ -8,7 +8,7 @@ def deduplicate_picklists(apps, schema_editor): duplicate_picklist_groups = ( Picklist.objects .values( - collection_name=F('collection__collectionName'), + collection_name=F('collection__collectionname'), name=F('name'), tablename=F('tablename'), ) @@ -18,7 +18,7 @@ def deduplicate_picklists(apps, schema_editor): for group in duplicate_picklist_groups: picklists = Picklist.objects.filter( - collection__collectionName=group['collection_name'], + collection__collectionname=group['collection_name'], name=group['name'], tablename=group['tablename'], ).order_by('picklistid') @@ -34,7 +34,7 @@ def deduplicate_picklists(apps, schema_editor): for duplicate in duplicate_picklists: items = PicklistItem.objects.filter(picklist=duplicate).order_by('picklistitemid') for item in items: - existing_item = PicklistItem.objects.filter( # TODO: Verify these are the right fields to check for duplicates + existing_item = PicklistItem.objects.filter( # Verify these are the right fields to check for duplicates picklist=primary_picklist, value=item.value, displayvalue=item.displayvalue @@ -43,7 +43,6 @@ def deduplicate_picklists(apps, schema_editor): item.picklist = primary_picklist item.save() else: - # TODO: Update references to the duplicate picklist if necessary item.delete() duplicate.delete() From e0787690431eaa5ba8ef2cc8aea014f02e891950 Mon Sep 17 00:00:00 2001 From: alec_dev Date: Thu, 2 Oct 2025 10:23:24 -0500 Subject: [PATCH 03/11] Refactor duplicate picklist query for clarity --- .../patches/migrations/0008_merge_duplicate_picklists.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py b/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py index 2f0330b1aa2..a2d20854802 100644 --- a/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py +++ b/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py @@ -8,8 +8,8 @@ def deduplicate_picklists(apps, schema_editor): duplicate_picklist_groups = ( Picklist.objects .values( - collection_name=F('collection__collectionname'), - name=F('name'), + 'collection__collectionname', + 'name', tablename=F('tablename'), ) .annotate(pl_count=Count('picklistid')) From 7ff0ba0efec94b7d7277f7c1884c513e611e000b Mon Sep 17 00:00:00 2001 From: alec_dev Date: Thu, 2 Oct 2025 10:35:55 -0500 Subject: [PATCH 04/11] fix values in duplicate_picklist_groups query --- .../0008_merge_duplicate_picklists.py | 120 +++++++++--------- 1 file changed, 60 insertions(+), 60 deletions(-) diff --git a/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py b/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py index a2d20854802..7ef7c0e68d1 100644 --- a/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py +++ b/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py @@ -1,61 +1,61 @@ -from django.db import migrations -from django.db.models import Count, F - -def deduplicate_picklists(apps, schema_editor): - Picklist = apps.get_model('specify', 'Picklist') - PicklistItem = apps.get_model('specify', 'PicklistItem') - - duplicate_picklist_groups = ( - Picklist.objects - .values( - 'collection__collectionname', - 'name', - tablename=F('tablename'), - ) - .annotate(pl_count=Count('picklistid')) - .filter(pl_count__gt=1) - ) - - for group in duplicate_picklist_groups: - picklists = Picklist.objects.filter( - collection__collectionname=group['collection_name'], - name=group['name'], - tablename=group['tablename'], - ).order_by('picklistid') - - if picklists.count() < 2: - continue - - primary_picklist = picklists.first() - duplicate_picklists = picklists.exclude(picklistid=primary_picklist.picklistid) - - # Before deleting duplicates, add any picklist items that don't exist in the - # primary picklist to the primary picklist. - for duplicate in duplicate_picklists: - items = PicklistItem.objects.filter(picklist=duplicate).order_by('picklistitemid') - for item in items: - existing_item = PicklistItem.objects.filter( # Verify these are the right fields to check for duplicates - picklist=primary_picklist, - value=item.value, - displayvalue=item.displayvalue - ).first() - if not existing_item: - item.picklist = primary_picklist - item.save() - else: - item.delete() - - duplicate.delete() - -class Migration(migrations.Migration): - dependencies = [ - ('patches', '0007_fix_tectonicunit_tree_root') - ] - - operations = [ - migrations.RunPython( - deduplicate_picklists, - migrations.RunPython.noop, - atomic=True + from django.db import migrations + from django.db.models import Count, F + + def deduplicate_picklists(apps, schema_editor): + Picklist = apps.get_model('specify', 'Picklist') + PicklistItem = apps.get_model('specify', 'PicklistItem') + + duplicate_picklist_groups = ( + Picklist.objects + .values( + 'name', + 'tablename', + collection_name=F('collection__collectionname'), + ) + .annotate(pl_count=Count('picklistid')) + .filter(pl_count__gt=1) ) - ] + + for group in duplicate_picklist_groups: + picklists = Picklist.objects.filter( + collection__collectionname=group['collection_name'], + name=group['name'], + tablename=group['tablename'], + ).order_by('picklistid') + + if picklists.count() < 2: + continue + + primary_picklist = picklists.first() + duplicate_picklists = picklists.exclude(picklistid=primary_picklist.picklistid) + + # Before deleting duplicates, add any picklist items that don't exist in the + # primary picklist to the primary picklist. + for duplicate in duplicate_picklists: + items = PicklistItem.objects.filter(picklist=duplicate).order_by('picklistitemid') + for item in items: + existing_item = PicklistItem.objects.filter( # Verify these are the right fields to check for duplicates + picklist=primary_picklist, + value=item.value, + displayvalue=item.displayvalue + ).first() + if not existing_item: + item.picklist = primary_picklist + item.save() + else: + item.delete() + + duplicate.delete() + + class Migration(migrations.Migration): + dependencies = [ + ('patches', '0007_fix_tectonicunit_tree_root') + ] + + operations = [ + migrations.RunPython( + deduplicate_picklists, + migrations.RunPython.noop, + atomic=True + ) + ] From cb7c18c7cab4480c11c5191cec09de60726b80f4 Mon Sep 17 00:00:00 2001 From: alec_dev Date: Thu, 2 Oct 2025 10:47:42 -0500 Subject: [PATCH 05/11] indent fix --- .../0008_merge_duplicate_picklists.py | 120 +++++++++--------- 1 file changed, 60 insertions(+), 60 deletions(-) diff --git a/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py b/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py index 7ef7c0e68d1..d92b7d53c51 100644 --- a/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py +++ b/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py @@ -1,61 +1,61 @@ - from django.db import migrations - from django.db.models import Count, F - - def deduplicate_picklists(apps, schema_editor): - Picklist = apps.get_model('specify', 'Picklist') - PicklistItem = apps.get_model('specify', 'PicklistItem') - - duplicate_picklist_groups = ( - Picklist.objects - .values( - 'name', - 'tablename', - collection_name=F('collection__collectionname'), - ) - .annotate(pl_count=Count('picklistid')) - .filter(pl_count__gt=1) +from django.db import migrations +from django.db.models import Count, F + +def deduplicate_picklists(apps, schema_editor): + Picklist = apps.get_model('specify', 'Picklist') + PicklistItem = apps.get_model('specify', 'PicklistItem') + + duplicate_picklist_groups = ( + Picklist.objects + .values( + 'name', + 'tablename', + collection_name=F('collection__collectionname'), ) - - for group in duplicate_picklist_groups: - picklists = Picklist.objects.filter( - collection__collectionname=group['collection_name'], - name=group['name'], - tablename=group['tablename'], - ).order_by('picklistid') - - if picklists.count() < 2: - continue - - primary_picklist = picklists.first() - duplicate_picklists = picklists.exclude(picklistid=primary_picklist.picklistid) - - # Before deleting duplicates, add any picklist items that don't exist in the - # primary picklist to the primary picklist. - for duplicate in duplicate_picklists: - items = PicklistItem.objects.filter(picklist=duplicate).order_by('picklistitemid') - for item in items: - existing_item = PicklistItem.objects.filter( # Verify these are the right fields to check for duplicates - picklist=primary_picklist, - value=item.value, - displayvalue=item.displayvalue - ).first() - if not existing_item: - item.picklist = primary_picklist - item.save() - else: - item.delete() - - duplicate.delete() - - class Migration(migrations.Migration): - dependencies = [ - ('patches', '0007_fix_tectonicunit_tree_root') - ] - - operations = [ - migrations.RunPython( - deduplicate_picklists, - migrations.RunPython.noop, - atomic=True - ) - ] + .annotate(pl_count=Count('picklistid')) + .filter(pl_count__gt=1) + ) + + for group in duplicate_picklist_groups: + picklists = Picklist.objects.filter( + collection__collectionname=group['collection_name'], + name=group['name'], + tablename=group['tablename'], + ).order_by('picklistid') + + if picklists.count() < 2: + continue + + primary_picklist = picklists.first() + duplicate_picklists = picklists.exclude(picklistid=primary_picklist.picklistid) + + # Before deleting duplicates, add any picklist items that don't exist in the + # primary picklist to the primary picklist. + for duplicate in duplicate_picklists: + items = PicklistItem.objects.filter(picklist=duplicate).order_by('picklistitemid') + for item in items: + existing_item = PicklistItem.objects.filter( # Verify these are the right fields to check for duplicates + picklist=primary_picklist, + value=item.value, + displayvalue=item.displayvalue + ).first() + if not existing_item: + item.picklist = primary_picklist + item.save() + else: + item.delete() + + duplicate.delete() + +class Migration(migrations.Migration): + dependencies = [ + ('patches', '0007_fix_tectonicunit_tree_root') + ] + + operations = [ + migrations.RunPython( + deduplicate_picklists, + migrations.RunPython.noop, + atomic=True + ) + ] From 7637e8bf79fe6dfeacb728272d7b0e839edc2f63 Mon Sep 17 00:00:00 2001 From: alec_dev Date: Thu, 2 Oct 2025 11:08:26 -0500 Subject: [PATCH 06/11] fix picklist id field reference --- .../patches/migrations/0008_merge_duplicate_picklists.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py b/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py index d92b7d53c51..9da168b888e 100644 --- a/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py +++ b/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py @@ -12,7 +12,7 @@ def deduplicate_picklists(apps, schema_editor): 'tablename', collection_name=F('collection__collectionname'), ) - .annotate(pl_count=Count('picklistid')) + .annotate(pl_count=Count('id')) .filter(pl_count__gt=1) ) @@ -21,7 +21,7 @@ def deduplicate_picklists(apps, schema_editor): collection__collectionname=group['collection_name'], name=group['name'], tablename=group['tablename'], - ).order_by('picklistid') + ).order_by('id') if picklists.count() < 2: continue From a266b1bb1663deec354b2f6883920380f2cbd006 Mon Sep 17 00:00:00 2001 From: alec_dev Date: Thu, 2 Oct 2025 16:35:20 -0500 Subject: [PATCH 07/11] change comparison logic to determining duplicates --- .../0008_merge_duplicate_picklists.py | 97 +++++++++++-------- 1 file changed, 59 insertions(+), 38 deletions(-) diff --git a/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py b/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py index 9da168b888e..b4c7baba69f 100644 --- a/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py +++ b/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py @@ -1,61 +1,82 @@ from django.db import migrations -from django.db.models import Count, F - + def deduplicate_picklists(apps, schema_editor): Picklist = apps.get_model('specify', 'Picklist') PicklistItem = apps.get_model('specify', 'PicklistItem') - - duplicate_picklist_groups = ( + + # Find groups that are duplicates by business key + duplicate_groups = ( Picklist.objects - .values( - 'name', - 'tablename', - collection_name=F('collection__collectionname'), - ) - .annotate(pl_count=Count('id')) + .values('name', 'tablename', 'fieldname', 'collection') # <-- use field names + .annotate(pl_count=migrations.models.Count('id')) .filter(pl_count__gt=1) ) - for group in duplicate_picklist_groups: - picklists = Picklist.objects.filter( - collection__collectionname=group['collection_name'], - name=group['name'], - tablename=group['tablename'], - ).order_by('id') + for group in duplicate_groups: + # Order by id so the lowest-id is the "primary" + picklists = ( + Picklist.objects + .filter( + name=group['name'], + tablename=group['tablename'], + fieldname=group['fieldname'], + collection=group['collection'], + ) + .order_by('id') + ) + # Sanity if picklists.count() < 2: continue - primary_picklist = picklists.first() - duplicate_picklists = picklists.exclude(picklistid=primary_picklist.picklistid) - - # Before deleting duplicates, add any picklist items that don't exist in the - # primary picklist to the primary picklist. - for duplicate in duplicate_picklists: - items = PicklistItem.objects.filter(picklist=duplicate).order_by('picklistitemid') - for item in items: - existing_item = PicklistItem.objects.filter( # Verify these are the right fields to check for duplicates - picklist=primary_picklist, - value=item.value, - displayvalue=item.displayvalue - ).first() - if not existing_item: - item.picklist = primary_picklist - item.save() + primary = picklists.first() + duplicates = picklists.exclude(id=primary.id) # <-- use 'id' + + # Preload existing (title, value) pairs from the primary to avoid dupes + existing_pairs = set( + PicklistItem.objects + .filter(picklist=primary) + .values_list('title', 'value') + ) + + for dup in duplicates: + # Grab all items on the duplicate picklist + dup_items = list( + PicklistItem.objects + .filter(picklist=dup) + .only('id', 'title', 'value', 'picklist') + .order_by('id') + ) + + # Partition into items we should move vs delete + to_move = [] + to_delete_ids = [] + for it in dup_items: + key = (it.title, it.value) + if key in existing_pairs: + to_delete_ids.append(it.id) else: - item.delete() + it.picklist = primary + to_move.append(it) + existing_pairs.add(key) # avoid creating a dupe on later items - duplicate.delete() + if to_move: + PicklistItem.objects.bulk_update(to_move, ['picklist']) + if to_delete_ids: + PicklistItem.objects.filter(id__in=to_delete_ids).delete() + + # Remove the empty duplicate picklist + dup.delete() class Migration(migrations.Migration): dependencies = [ - ('patches', '0007_fix_tectonicunit_tree_root') + ('patches', '0007_fix_tectonicunit_tree_root'), ] operations = [ migrations.RunPython( deduplicate_picklists, - migrations.RunPython.noop, - atomic=True - ) + reverse_code=migrations.RunPython.noop, + atomic=True, + ), ] From e6b17d4e212b1860f288b0bff17ecee8e979303f Mon Sep 17 00:00:00 2001 From: alec_dev Date: Thu, 2 Oct 2025 16:48:11 -0500 Subject: [PATCH 08/11] fix Count import Added import for Count to facilitate deduplication. --- .../backend/patches/migrations/0008_merge_duplicate_picklists.py | 1 + 1 file changed, 1 insertion(+) diff --git a/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py b/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py index b4c7baba69f..36471000cf7 100644 --- a/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py +++ b/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py @@ -1,4 +1,5 @@ from django.db import migrations +from django.db.models import Count def deduplicate_picklists(apps, schema_editor): Picklist = apps.get_model('specify', 'Picklist') From 8c2e5b045f01fa63c604382a5586e1b6897b7993 Mon Sep 17 00:00:00 2001 From: alec_dev Date: Fri, 3 Oct 2025 08:54:58 -0500 Subject: [PATCH 09/11] Count fix --- .../patches/migrations/0008_merge_duplicate_picklists.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py b/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py index 36471000cf7..536e3e89ab1 100644 --- a/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py +++ b/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py @@ -9,7 +9,7 @@ def deduplicate_picklists(apps, schema_editor): duplicate_groups = ( Picklist.objects .values('name', 'tablename', 'fieldname', 'collection') # <-- use field names - .annotate(pl_count=migrations.models.Count('id')) + .annotate(pl_count=Count('id')) .filter(pl_count__gt=1) ) From c3f7ad3e1d26522ce3733e154e77b396f033c175 Mon Sep 17 00:00:00 2001 From: alec_dev Date: Fri, 3 Oct 2025 09:10:27 -0500 Subject: [PATCH 10/11] cleanup --- .../migrations/0008_merge_duplicate_picklists.py | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py b/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py index 536e3e89ab1..d31f9021868 100644 --- a/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py +++ b/specifyweb/backend/patches/migrations/0008_merge_duplicate_picklists.py @@ -5,16 +5,14 @@ def deduplicate_picklists(apps, schema_editor): Picklist = apps.get_model('specify', 'Picklist') PicklistItem = apps.get_model('specify', 'PicklistItem') - # Find groups that are duplicates by business key duplicate_groups = ( Picklist.objects - .values('name', 'tablename', 'fieldname', 'collection') # <-- use field names + .values('name', 'tablename', 'fieldname', 'collection') .annotate(pl_count=Count('id')) .filter(pl_count__gt=1) ) for group in duplicate_groups: - # Order by id so the lowest-id is the "primary" picklists = ( Picklist.objects .filter( @@ -26,14 +24,12 @@ def deduplicate_picklists(apps, schema_editor): .order_by('id') ) - # Sanity if picklists.count() < 2: continue primary = picklists.first() - duplicates = picklists.exclude(id=primary.id) # <-- use 'id' + duplicates = picklists.exclude(id=primary.id) - # Preload existing (title, value) pairs from the primary to avoid dupes existing_pairs = set( PicklistItem.objects .filter(picklist=primary) @@ -41,7 +37,6 @@ def deduplicate_picklists(apps, schema_editor): ) for dup in duplicates: - # Grab all items on the duplicate picklist dup_items = list( PicklistItem.objects .filter(picklist=dup) @@ -49,7 +44,7 @@ def deduplicate_picklists(apps, schema_editor): .order_by('id') ) - # Partition into items we should move vs delete + # Partition into items to be either move or delete to_move = [] to_delete_ids = [] for it in dup_items: @@ -59,7 +54,7 @@ def deduplicate_picklists(apps, schema_editor): else: it.picklist = primary to_move.append(it) - existing_pairs.add(key) # avoid creating a dupe on later items + existing_pairs.add(key) if to_move: PicklistItem.objects.bulk_update(to_move, ['picklist']) From 5f7659ee04ae82d53c2efdcfaa570a49797b27dc Mon Sep 17 00:00:00 2001 From: alec_dev Date: Mon, 3 Nov 2025 16:27:54 +0000 Subject: [PATCH 11/11] Lint code with ESLint and Prettier Triggered by 7e5adb0093b1078c0b714761ed6fe9a611c3515a on branch refs/heads/issue-3970 --- .../js_src/lib/components/DataModel/businessRules.ts | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/specifyweb/frontend/js_src/lib/components/DataModel/businessRules.ts b/specifyweb/frontend/js_src/lib/components/DataModel/businessRules.ts index f7581eb2b30..868b11dac2a 100644 --- a/specifyweb/frontend/js_src/lib/components/DataModel/businessRules.ts +++ b/specifyweb/frontend/js_src/lib/components/DataModel/businessRules.ts @@ -7,7 +7,11 @@ import { softFail } from '../Errors/Crash'; import { isTreeResource } from '../InitialContext/treeRanks'; import type { BusinessRuleDefs } from './businessRuleDefs'; import { businessRuleDefs } from './businessRuleDefs'; -import { backboneFieldSeparator, backendFilter, djangoLookupSeparator } from './helpers'; +import { + backboneFieldSeparator, + backendFilter, + djangoLookupSeparator, +} from './helpers'; import type { AnySchema, AnyTree, @@ -316,10 +320,7 @@ export class BusinessRuleManager { ) ); - const stringValuesAreEqual = ( - left: string, - right: string - ): boolean => + const stringValuesAreEqual = (left: string, right: string): boolean => rule.isDatabaseConstraint ? left.localeCompare(right, undefined, { sensitivity: 'accent' }) === 0 : left === right;