Legacy rename API

This commit is contained in:
Kovid Goyal 2013-07-16 16:11:04 +05:30
parent 064294fa32
commit 17c520d1d9
4 changed files with 58 additions and 12 deletions

View File

@ -30,7 +30,7 @@ from calibre.ebooks.metadata.book.base import Metadata
from calibre.ebooks.metadata.opf2 import metadata_to_opf
from calibre.ptempfile import (base_dir, PersistentTemporaryFile,
SpooledTemporaryFile)
from calibre.utils.config import prefs
from calibre.utils.config import prefs, tweaks
from calibre.utils.date import now as nowf, utcnow, UNDEFINED_DATE
from calibre.utils.icu import sort_key
@ -1100,16 +1100,16 @@ class Cache(object):
self._update_last_modified(tuple(formats_map.iterkeys()))
@read_api
def get_next_series_num_for(self, series):
def get_next_series_num_for(self, series, field='series'):
books = ()
sf = self.fields['series']
sf = self.fields[field]
if series:
q = icu_lower(series)
for val, book_ids in sf.iter_searchable_values(self._get_metadata, frozenset(self.all_book_ids())):
for val, book_ids in sf.iter_searchable_values(self._get_metadata, frozenset(self._all_book_ids())):
if q == icu_lower(val):
books = book_ids
break
series_indices = sorted(self._field_for('series_index', book_id) for book_id in books)
series_indices = sorted(self._field_for(sf.index_field.name, book_id) for book_id in books)
return _get_next_series_num_for_list(tuple(series_indices), unwrap=False)
@read_api
@ -1223,24 +1223,30 @@ class Cache(object):
return val_map
@write_api
def rename_items(self, field, item_id_to_new_name_map):
def rename_items(self, field, item_id_to_new_name_map, change_index=True):
f = self.fields[field]
try:
func = self.fields[field].table.rename_item
func = f.table.rename_item
except AttributeError:
raise ValueError('Cannot rename items for one-one fields: %s' % field)
affected_books = set()
moved_books = set()
id_map = {}
for item_id, new_name in item_id_to_new_name_map.iteritems():
books, new_id = func(item_id, new_name, self.backend)
affected_books.update(books)
id_map[item_id] = new_id
if new_id != item_id:
moved_books.update(books)
if affected_books:
if field == 'authors':
self._set_field('author_sort', # also marks as dirty
self._set_field('author_sort',
{k:' & '.join(v) for k, v in self._author_sort_strings_for_books(affected_books).iteritems()})
self._update_path(affected_books, mark_as_dirtied=False)
else:
self._mark_as_dirty(affected_books)
elif change_index and hasattr(f, 'index_field') and tweaks['series_index_auto_increment'] != 'no_change':
for book_id in moved_books:
self._set_field(f.index_field.name, {book_id:self._get_next_series_num_for(self._field_for(field, book_id), field=field)})
self._mark_as_dirty(affected_books)
return affected_books, id_map
@write_api

View File

@ -290,7 +290,7 @@ class LibraryDatabase(object):
def authors_sort_strings(self, index, index_is_id=False):
book_id = index if index_is_id else self.id(index)
return list(self.author_sort_strings_for_books.canonical_author_sort_for_books((book_id,))[book_id])
return list(self.new_api.author_sort_strings_for_books((book_id,))[book_id])
def author_sort_from_book(self, index, index_is_id=False):
return ' & '.join(self.authors_sort_strings(index, index_is_id=index_is_id))
@ -500,6 +500,9 @@ class LibraryDatabase(object):
book_id = index if index_is_id else self.id(index)
return self.new_api.get_metadata(book_id, get_cover=get_cover, get_user_categories=get_user_categories, cover_as_data=cover_as_data)
def rename_series(self, old_id, new_name, change_index=True):
self.new_api.rename_items('series', {old_id:new_name}, change_index=change_index)
# Private interface {{{
def __iter__(self):
for row in self.data.iterall():
@ -582,6 +585,16 @@ for field in (
return func
setattr(LibraryDatabase, 'set_%s' % field.replace('!', ''), MT(setter(field)))
for field in ('authors', 'tags', 'publisher'):
def renamer(field):
def func(self, old_id, new_name):
id_map = self.new_api.rename_items(field, {old_id:new_name})[1]
if field == 'authors':
return id_map[old_id]
return func
fname = field[:-1] if field in {'tags', 'authors'} else field
setattr(LibraryDatabase, 'rename_%s' % fname, MT(renamer(field)))
LibraryDatabase.update_last_modified = MT(
lambda self, book_ids, commit=False, now=None: self.new_api.update_last_modified(book_ids, now=now))

View File

@ -472,6 +472,7 @@ class LegacyTest(BaseTest):
nmi = [ndb.get_metadata(x) for x in (0, 1, 2)]
self.assertEqual([x.author_sort_map for x in omi], [x.author_sort_map for x in nmi])
self.assertEqual([x.author_link_map for x in omi], [x.author_link_map for x in nmi])
db.close()
ndb = self.init_legacy(self.cloned_library)
db = self.init_old(self.cloned_library)
@ -554,6 +555,7 @@ class LegacyTest(BaseTest):
('@all_tags',),
('#tags', 0), ('#tags', 1), ('#tags', 2),
))
db.close()
ndb = self.init_legacy(self.cloned_library)
db = self.init_old(self.cloned_library)
@ -563,6 +565,31 @@ class LegacyTest(BaseTest):
('@all_tags',),
('@tags', 0), ('@tags', 1), ('@tags', 2),
))
db.close()
ndb = self.init_legacy(self.cloned_library)
db = self.init_old(self.cloned_library)
a = {v:k for k, v in ndb.new_api.get_id_map('authors').iteritems()}['Author One']
t = {v:k for k, v in ndb.new_api.get_id_map('tags').iteritems()}['Tag One']
s = {v:k for k, v in ndb.new_api.get_id_map('series').iteritems()}['A Series One']
p = {v:k for k, v in ndb.new_api.get_id_map('publisher').iteritems()}['Publisher One']
run_funcs(self, db, ndb, (
('rename_author', a, 'Author Two'),
('rename_tag', t, 'News'),
('rename_series', s, 'ss'),
('rename_publisher', p, 'publisher one'),
(db.clean,),
(db.refresh,),
('@all_tags',),
('tags', 0), ('tags', 1), ('tags', 2),
('series', 0), ('series', 1), ('series', 2),
('publisher', 0), ('publisher', 1), ('publisher', 2),
('series_index', 0), ('series_index', 1), ('series_index', 2),
('authors', 0), ('authors', 1), ('authors', 2),
('author_sort', 0), ('author_sort', 1), ('author_sort', 2),
))
db.close()
# }}}

View File

@ -541,5 +541,5 @@ class WritingTest(BaseTest):
self.assertEqual(c.all_field_names('#series'), {'My Series Two'})
self.assertEqual(c.field_for('#series', 2), 'My Series Two')
self.assertEqual(c.field_for('#series_index', 1), 3.0)
self.assertEqual(c.field_for('#series_index', 2), 1.0)
self.assertEqual(c.field_for('#series_index', 2), 4.0)
# }}}