mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
Implement adding of catalogs and news
This commit is contained in:
parent
9f1b2ee1fa
commit
937f475a9e
@ -100,3 +100,66 @@ def recursive_import(db, root, single_book_per_directory=True,
|
||||
break
|
||||
return duplicates
|
||||
|
||||
def add_catalog(cache, path, title):
|
||||
from calibre.ebooks.metadata.book.base import Metadata
|
||||
from calibre.ebooks.metadata.meta import get_metadata
|
||||
from calibre.utils.date import utcnow
|
||||
|
||||
fmt = os.path.splitext(path)[1][1:].lower()
|
||||
with lopen(path, 'rb') as stream, cache.write_lock:
|
||||
matches = cache._search('title:="%s" and tags:="%s"' % (title.replace('"', '\\"'), _('Catalog')), None)
|
||||
db_id = None
|
||||
if matches:
|
||||
db_id = list(matches)[0]
|
||||
try:
|
||||
mi = get_metadata(stream, fmt)
|
||||
mi.authors = ['calibre']
|
||||
except:
|
||||
mi = Metadata(title, ['calibre'])
|
||||
mi.title, mi.authors = title, ['calibre']
|
||||
mi.tags = [_('Catalog')]
|
||||
mi.pubdate = mi.timestamp = utcnow()
|
||||
if fmt == 'mobi':
|
||||
mi.cover, mi.cover_data = None, (None, None)
|
||||
if db_id is None:
|
||||
db_id = cache._create_book_entry(mi, apply_import_tags=False)
|
||||
else:
|
||||
cache._set_metadata(db_id, mi)
|
||||
cache._add_format(db_id, fmt, stream)
|
||||
|
||||
return db_id
|
||||
|
||||
def add_news(cache, path, arg):
|
||||
from calibre.ebooks.metadata.meta import get_metadata
|
||||
from calibre.utils.date import utcnow
|
||||
|
||||
fmt = os.path.splitext(getattr(path, 'name', path))[1][1:].lower()
|
||||
stream = path if hasattr(path, 'read') else lopen(path, 'rb')
|
||||
stream.seek(0)
|
||||
mi = get_metadata(stream, fmt, use_libprs_metadata=False,
|
||||
force_read_metadata=True)
|
||||
# Force the author to calibre as the auto delete of old news checks for
|
||||
# both the author==calibre and the tag News
|
||||
mi.authors = ['calibre']
|
||||
stream.seek(0)
|
||||
with cache.write_lock:
|
||||
if mi.series_index is None:
|
||||
mi.series_index = cache._get_next_series_num_for(mi.series)
|
||||
mi.tags = [_('News')]
|
||||
if arg['add_title_tag']:
|
||||
mi.tags += [arg['title']]
|
||||
if arg['custom_tags']:
|
||||
mi.tags += arg['custom_tags']
|
||||
if mi.pubdate is None:
|
||||
mi.pubdate = utcnow()
|
||||
if mi.timestamp is None:
|
||||
mi.timestamp = utcnow()
|
||||
|
||||
db_id = cache._create_book_entry(mi, apply_import_tags=False)
|
||||
cache._add_format(db_id, fmt, stream)
|
||||
|
||||
if not hasattr(path, 'read'):
|
||||
stream.close()
|
||||
return db_id
|
||||
|
||||
|
||||
|
@ -11,7 +11,9 @@ from functools import partial
|
||||
from future_builtins import zip
|
||||
|
||||
from calibre.db import _get_next_series_num_for_list, _get_series_values
|
||||
from calibre.db.adding import find_books_in_directory, import_book_directory_multiple, import_book_directory, recursive_import
|
||||
from calibre.db.adding import (
|
||||
find_books_in_directory, import_book_directory_multiple,
|
||||
import_book_directory, recursive_import, add_catalog, add_news)
|
||||
from calibre.db.backend import DB
|
||||
from calibre.db.cache import Cache
|
||||
from calibre.db.categories import CATEGORY_SORTS
|
||||
@ -205,6 +207,13 @@ class LibraryDatabase(object):
|
||||
def recursive_import(self, root, single_book_per_directory=True,
|
||||
callback=None, added_ids=None):
|
||||
return recursive_import(self, root, single_book_per_directory=single_book_per_directory, callback=callback, added_ids=added_ids)
|
||||
|
||||
def add_catalog(self, path, title):
|
||||
return add_catalog(self.new_api, path, title)
|
||||
|
||||
def add_news(self, path, arg):
|
||||
return add_news(self.new_api, path, arg)
|
||||
|
||||
# }}}
|
||||
|
||||
# Private interface {{{
|
||||
|
@ -526,7 +526,7 @@ class Parser(SearchQueryParser):
|
||||
if dt == 'bool':
|
||||
return self.bool_search(icu_lower(query),
|
||||
partial(self.field_iter, location, candidates),
|
||||
self.dbcache.pref('bools_are_tristate'))
|
||||
self.dbcache._pref('bools_are_tristate'))
|
||||
|
||||
# special case: colon-separated fields such as identifiers. isbn
|
||||
# is a special case within the case
|
||||
@ -630,7 +630,7 @@ class Parser(SearchQueryParser):
|
||||
if len(query) < 2:
|
||||
return matches
|
||||
|
||||
user_cats = self.dbcache.pref('user_categories')
|
||||
user_cats = self.dbcache._pref('user_categories')
|
||||
c = set(candidates)
|
||||
|
||||
if query.startswith('.'):
|
||||
@ -674,7 +674,7 @@ class Search(object):
|
||||
if search_restriction:
|
||||
q = u'(%s) and (%s)' % (search_restriction, query)
|
||||
|
||||
all_book_ids = dbcache.all_book_ids(type=set)
|
||||
all_book_ids = dbcache._all_book_ids(type=set)
|
||||
if not q:
|
||||
return all_book_ids
|
||||
|
||||
@ -686,7 +686,7 @@ class Search(object):
|
||||
# takes 0.000975 seconds and restoring it from a pickle takes
|
||||
# 0.000974 seconds.
|
||||
sqp = Parser(
|
||||
dbcache, all_book_ids, dbcache.pref('grouped_search_terms'),
|
||||
dbcache, all_book_ids, dbcache._pref('grouped_search_terms'),
|
||||
self.date_search, self.num_search, self.bool_search,
|
||||
self.keypair_search,
|
||||
prefs['limit_search_columns'],
|
||||
|
@ -178,6 +178,25 @@ class LegacyTest(BaseTest):
|
||||
T()
|
||||
T({'add_duplicates':False})
|
||||
T({'force_id':1000})
|
||||
|
||||
with NamedTemporaryFile(suffix='.txt') as f:
|
||||
f.write(b'tttttt')
|
||||
f.seek(0)
|
||||
bid = legacy.add_catalog(f.name, 'My Catalog')
|
||||
cache = legacy.new_api
|
||||
self.assertEqual(cache.formats(bid), ('TXT',))
|
||||
self.assertEqual(cache.field_for('title', bid), 'My Catalog')
|
||||
self.assertEqual(cache.field_for('authors', bid), ('calibre',))
|
||||
self.assertEqual(cache.field_for('tags', bid), (_('Catalog'),))
|
||||
self.assertTrue(bid < legacy.add_catalog(f.name, 'Something else'))
|
||||
self.assertEqual(legacy.add_catalog(f.name, 'My Catalog'), bid)
|
||||
|
||||
bid = legacy.add_news(f.name, {'title':'Events', 'add_title_tag':True, 'custom_tags':('one', 'two')})
|
||||
self.assertEqual(cache.formats(bid), ('TXT',))
|
||||
self.assertEqual(cache.field_for('authors', bid), ('calibre',))
|
||||
self.assertEqual(cache.field_for('tags', bid), (_('News'), 'Events', 'one', 'two'))
|
||||
|
||||
old.close()
|
||||
# }}}
|
||||
|
||||
def test_legacy_coverage(self): # {{{
|
||||
|
Loading…
x
Reference in New Issue
Block a user