mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
Fix #2688 (duplicate tag entries in tag database) and add PDB, RB to internally viewed formats
This commit is contained in:
parent
f8f4a0fbe3
commit
998d7647a3
@ -48,7 +48,8 @@ def _config():
|
|||||||
help=_('Defaults for conversion to LRF'))
|
help=_('Defaults for conversion to LRF'))
|
||||||
c.add_opt('LRF_ebook_viewer_options', default=None,
|
c.add_opt('LRF_ebook_viewer_options', default=None,
|
||||||
help=_('Options for the LRF ebook viewer'))
|
help=_('Options for the LRF ebook viewer'))
|
||||||
c.add_opt('internally_viewed_formats', default=['LRF', 'EPUB', 'LIT', 'MOBI', 'PRC', 'HTML', 'FB2'],
|
c.add_opt('internally_viewed_formats', default=['LRF', 'EPUB', 'LIT',
|
||||||
|
'MOBI', 'PRC', 'HTML', 'FB2', 'PDB', 'RB'],
|
||||||
help=_('Formats that are viewed using the internal viewer'))
|
help=_('Formats that are viewed using the internal viewer'))
|
||||||
c.add_opt('column_map', default=ALL_COLUMNS,
|
c.add_opt('column_map', default=ALL_COLUMNS,
|
||||||
help=_('Columns to be displayed in the book list'))
|
help=_('Columns to be displayed in the book list'))
|
||||||
|
@ -224,6 +224,10 @@ class ResultCache(SearchQueryParser):
|
|||||||
id = row if row_is_id else self._map_filtered[row]
|
id = row if row_is_id else self._map_filtered[row]
|
||||||
self._data[id][col] = val
|
self._data[id][col] = val
|
||||||
|
|
||||||
|
def get(self, row, col, row_is_id=False):
|
||||||
|
id = row if row_is_id else self._map_filtered[row]
|
||||||
|
return self._data[id][col]
|
||||||
|
|
||||||
def index(self, id, cache=False):
|
def index(self, id, cache=False):
|
||||||
x = self._map if cache else self._map_filtered
|
x = self._map if cache else self._map_filtered
|
||||||
return x.index(id)
|
return x.index(id)
|
||||||
@ -1105,6 +1109,14 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
if notify:
|
if notify:
|
||||||
self.notify('metadata', [id])
|
self.notify('metadata', [id])
|
||||||
|
|
||||||
|
def get_tags(self, id):
|
||||||
|
result = self.conn.get(
|
||||||
|
'SELECT name FROM tags WHERE id IN (SELECT tag FROM books_tags_link WHERE book=?)',
|
||||||
|
(id,), all=True)
|
||||||
|
if not result:
|
||||||
|
return set([])
|
||||||
|
return set([r[0] for r in result])
|
||||||
|
|
||||||
def set_tags(self, id, tags, append=False, notify=True):
|
def set_tags(self, id, tags, append=False, notify=True):
|
||||||
'''
|
'''
|
||||||
@param tags: list of strings
|
@param tags: list of strings
|
||||||
@ -1113,7 +1125,8 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
if not append:
|
if not append:
|
||||||
self.conn.execute('DELETE FROM books_tags_link WHERE book=?', (id,))
|
self.conn.execute('DELETE FROM books_tags_link WHERE book=?', (id,))
|
||||||
self.conn.execute('DELETE FROM tags WHERE (SELECT COUNT(id) FROM books_tags_link WHERE tag=tags.id) < 1')
|
self.conn.execute('DELETE FROM tags WHERE (SELECT COUNT(id) FROM books_tags_link WHERE tag=tags.id) < 1')
|
||||||
for tag in set(tags):
|
otags = self.get_tags(id)
|
||||||
|
for tag in (set(tags)-otags):
|
||||||
tag = tag.strip()
|
tag = tag.strip()
|
||||||
if not tag:
|
if not tag:
|
||||||
continue
|
continue
|
||||||
@ -1138,13 +1151,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.conn.execute('INSERT INTO books_tags_link(book, tag) VALUES (?,?)',
|
self.conn.execute('INSERT INTO books_tags_link(book, tag) VALUES (?,?)',
|
||||||
(id, tid))
|
(id, tid))
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
try:
|
tags = ','.join(self.get_tags(id))
|
||||||
otags = [t.strip() for t in self.data[self.data.row(id)][FIELD_MAP['tags']].split(',')]
|
|
||||||
except AttributeError:
|
|
||||||
otags = []
|
|
||||||
if not append:
|
|
||||||
otags = []
|
|
||||||
tags = ','.join(otags+tags)
|
|
||||||
self.data.set(id, FIELD_MAP['tags'], tags, row_is_id=True)
|
self.data.set(id, FIELD_MAP['tags'], tags, row_is_id=True)
|
||||||
if notify:
|
if notify:
|
||||||
self.notify('metadata', [id])
|
self.notify('metadata', [id])
|
||||||
|
Loading…
x
Reference in New Issue
Block a user