newdb: Handle duplicate values when setting many-many fields

Fixes an error when merging book records that have the same tags.
This commit is contained in:
Kovid Goyal 2013-08-05 21:43:29 +05:30
parent 31666454f3
commit 602c3d22a5
2 changed files with 12 additions and 0 deletions

View File

@ -292,6 +292,9 @@ class WritingTest(BaseTest):
ae(c.field_for('sort', 1), 'Moose, The') ae(c.field_for('sort', 1), 'Moose, The')
ae(c.field_for('sort', 2), 'Cat') ae(c.field_for('sort', 2), 'Cat')
# Test setting with the same value repeated
ae(sf('tags', {3: ('a', 'b', 'a')}), {3})
# }}} # }}}
def test_dirtied(self): # {{{ def test_dirtied(self): # {{{

View File

@ -337,6 +337,14 @@ def many_one(book_id_val_map, db, field, allow_case_change, *args):
# }}} # }}}
# Many-Many fields {{{ # Many-Many fields {{{
def uniq(vals):
' Remove all duplicates from vals, while preserving order. Items in vals must be hashable '
vals = vals or ()
seen = set()
seen_add = seen.add
return tuple(x for x in vals if x not in seen and not seen_add(x))
def many_many(book_id_val_map, db, field, allow_case_change, *args): def many_many(book_id_val_map, db, field, allow_case_change, *args):
dirtied = set() dirtied = set()
m = field.metadata m = field.metadata
@ -349,6 +357,7 @@ def many_many(book_id_val_map, db, field, allow_case_change, *args):
rid_map = {kmap(item):item_id for item_id, item in table.id_map.iteritems()} rid_map = {kmap(item):item_id for item_id, item in table.id_map.iteritems()}
val_map = {} val_map = {}
case_changes = {} case_changes = {}
book_id_val_map = {k:uniq(vals) for k, vals in book_id_val_map.iteritems()}
for vals in book_id_val_map.itervalues(): for vals in book_id_val_map.itervalues():
for val in vals: for val in vals:
get_db_id(val, db, m, table, kmap, rid_map, allow_case_change, get_db_id(val, db, m, table, kmap, rid_map, allow_case_change,