mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-07 10:14:46 -04:00
Implement recursive import of books into the library (one book per directory)
This commit is contained in:
parent
31e86f0781
commit
6e451f5841
@ -118,7 +118,13 @@ class Main(MainWindow, Ui_MainWindow):
|
||||
md.addAction(_('Edit metadata individually'))
|
||||
md.addAction(_('Edit metadata in bulk'))
|
||||
self.metadata_menu = md
|
||||
self.add_menu = QMenu()
|
||||
self.add_menu.addAction(_('Add books from a single directory'))
|
||||
self.add_menu.addAction(_('Add books recursively (One book per directory)'))
|
||||
self.action_add.setMenu(self.add_menu)
|
||||
QObject.connect(self.action_add, SIGNAL("triggered(bool)"), self.add_books)
|
||||
QObject.connect(self.add_menu.actions()[0], SIGNAL("triggered(bool)"), self.add_books)
|
||||
QObject.connect(self.add_menu.actions()[1], SIGNAL("triggered(bool)"), self.add_recursive_single)
|
||||
QObject.connect(self.action_del, SIGNAL("triggered(bool)"), self.delete_books)
|
||||
QObject.connect(self.action_edit, SIGNAL("triggered(bool)"), self.edit_metadata)
|
||||
QObject.connect(md.actions()[0], SIGNAL('triggered(bool)'), self.edit_metadata)
|
||||
@ -156,6 +162,7 @@ class Main(MainWindow, Ui_MainWindow):
|
||||
self.tool_bar.widgetForAction(self.action_sync).setPopupMode(QToolButton.MenuButtonPopup)
|
||||
self.tool_bar.widgetForAction(self.action_convert).setPopupMode(QToolButton.MenuButtonPopup)
|
||||
self.tool_bar.widgetForAction(self.action_save).setPopupMode(QToolButton.MenuButtonPopup)
|
||||
self.tool_bar.widgetForAction(self.action_add).setPopupMode(QToolButton.MenuButtonPopup)
|
||||
self.tool_bar.setContextMenuPolicy(Qt.PreventContextMenu)
|
||||
|
||||
QObject.connect(self.config_button, SIGNAL('clicked(bool)'), self.do_config)
|
||||
@ -286,6 +293,30 @@ class Main(MainWindow, Ui_MainWindow):
|
||||
|
||||
|
||||
################################# Add books ################################
|
||||
|
||||
def add_recursive_single(self, checked):
|
||||
'''
|
||||
Add books from the local filesystem to either the library or the device
|
||||
recursively assuming one book per folder.
|
||||
'''
|
||||
root = choose_dir(self, 'recursive book import root dir dialog', 'Select root folder')
|
||||
if not root:
|
||||
return
|
||||
duplicates = self.library_view.model().db.recursive_import(root)
|
||||
|
||||
if duplicates:
|
||||
files = _('<p>Books with the same title as the following already exist in the database. Add them anyway?<ul>')
|
||||
for mi, path in duplicates:
|
||||
files += '<li>'+mi.title+'</li>\n'
|
||||
d = question_dialog(self, _('Duplicates found!'), files+'</ul></p>')
|
||||
if d.exec_() == QMessageBox.Yes:
|
||||
for mi, path in duplicates:
|
||||
self.library_view.model().db.import_book_directory(path, add_duplicates=True)
|
||||
|
||||
self.library_view.model().resort()
|
||||
self.library_view.model().research()
|
||||
|
||||
|
||||
def add_books(self, checked):
|
||||
'''
|
||||
Add books from the local filesystem to either the library or the device.
|
||||
|
@ -20,9 +20,10 @@ import datetime, re, os, cPickle, traceback
|
||||
from zlib import compress, decompress
|
||||
|
||||
from libprs500 import sanitize_file_name
|
||||
from libprs500.ebooks.metadata.meta import set_metadata
|
||||
from libprs500.ebooks.metadata.meta import set_metadata, get_metadata
|
||||
from libprs500.ebooks.metadata.opf import OPFCreator
|
||||
from libprs500.ebooks.metadata import MetaInformation
|
||||
from libprs500.ebooks import BOOK_EXTENSIONS
|
||||
|
||||
class Concatenate(object):
|
||||
'''String concatenation aggregator for sqlite'''
|
||||
@ -1179,9 +1180,31 @@ ALTER TABLE books ADD COLUMN isbn TEXT DEFAULT "" COLLATE NOCASE;
|
||||
(id, usize, sqlite.Binary(data)))
|
||||
self.conn.commit()
|
||||
|
||||
def set_metadata(self, id, mi):
|
||||
'''
|
||||
Set metadata for the book C{id} from the L{MetaInformation} object C{mi}
|
||||
'''
|
||||
if not mi.authors:
|
||||
mi.authors = ['Unknown']
|
||||
authors = []
|
||||
for a in mi.authors:
|
||||
authors += a.split('&')
|
||||
self.set_authors(id, authors)
|
||||
if mi.author_sort:
|
||||
self.set_author_sort(id, mi.author_sort)
|
||||
if mi.publisher:
|
||||
self.set_publisher(id, mi.publisher)
|
||||
if mi.rating:
|
||||
self.set_rating(id, mi.rating)
|
||||
if mi.series:
|
||||
self.set_series(id, mi.series)
|
||||
if mi.cover_data[1] is not None:
|
||||
self.set_cover(id, mi.cover_data[1])
|
||||
|
||||
def add_books(self, paths, formats, metadata, uris=[], add_duplicates=True):
|
||||
'''
|
||||
Add a book to the database. self.data and self.cache are not updated.
|
||||
@param paths: List of paths to book files of file-like objects
|
||||
'''
|
||||
formats, metadata, uris = iter(formats), iter(metadata), iter(uris)
|
||||
duplicates = []
|
||||
@ -1200,30 +1223,16 @@ ALTER TABLE books ADD COLUMN isbn TEXT DEFAULT "" COLLATE NOCASE;
|
||||
(mi.title, uri, series_index))
|
||||
id = obj.lastrowid
|
||||
self.conn.commit()
|
||||
if not mi.authors:
|
||||
mi.authors = ['Unknown']
|
||||
authors = []
|
||||
for a in mi.authors:
|
||||
authors += a.split('&')
|
||||
self.set_authors(id, authors)
|
||||
if mi.author_sort:
|
||||
self.set_author_sort(id, mi.author_sort)
|
||||
if mi.publisher:
|
||||
self.set_publisher(id, mi.publisher)
|
||||
if mi.rating:
|
||||
self.set_rating(id, mi.rating)
|
||||
if mi.series:
|
||||
self.set_series(id, mi.series)
|
||||
if mi.cover_data[1] is not None:
|
||||
self.set_cover(id, mi.cover_data[1])
|
||||
stream = open(path, 'rb')
|
||||
self.set_metadata(id, mi)
|
||||
stream = path if hasattr(path, 'read') else open(path, 'rb')
|
||||
stream.seek(0, 2)
|
||||
usize = stream.tell()
|
||||
stream.seek(0)
|
||||
|
||||
self.conn.execute('INSERT INTO data(book, format, uncompressed_size, data) VALUES (?,?,?,?)',
|
||||
(id, format, usize, sqlite.Binary(compress(stream.read()))))
|
||||
stream.close()
|
||||
if not hasattr(path, 'read'):
|
||||
stream.close()
|
||||
self.conn.commit()
|
||||
if duplicates:
|
||||
paths = tuple(duplicate[0] for duplicate in duplicates)
|
||||
@ -1345,7 +1354,52 @@ ALTER TABLE books ADD COLUMN isbn TEXT DEFAULT "" COLLATE NOCASE;
|
||||
traceback.print_exc()
|
||||
f.close()
|
||||
|
||||
|
||||
|
||||
def import_book_directory(self, dirpath, add_duplicates=False):
|
||||
mi = MetaInformation(None, None)
|
||||
dirpath = os.path.abspath(dirpath)
|
||||
formats = []
|
||||
for path in os.listdir(dirpath):
|
||||
path = os.path.join(dirpath, path)
|
||||
if os.path.isdir(path) or not os.access(path, os.R_OK):
|
||||
continue
|
||||
ext = os.path.splitext(path)[1]
|
||||
if not ext:
|
||||
continue
|
||||
ext = ext[1:].lower()
|
||||
if ext not in BOOK_EXTENSIONS:
|
||||
continue
|
||||
f = open(path, 'rb')
|
||||
mi.smart_update(get_metadata(f, stream_type=ext, use_libprs_metadata=True))
|
||||
formats.append((ext, path))
|
||||
if mi.title is None or not formats:
|
||||
return
|
||||
if not add_duplicates and self.conn.execute('SELECT id FROM books where title=?', (mi.title,)).fetchone():
|
||||
return mi, dirpath
|
||||
series_index = 1 if mi.series_index is None else mi.series_index
|
||||
obj = self.conn.execute('INSERT INTO books(title, uri, series_index) VALUES (?, ?, ?)',
|
||||
(mi.title, None, series_index))
|
||||
id = obj.lastrowid
|
||||
self.conn.commit()
|
||||
self.set_metadata(id, mi)
|
||||
for ext, path in formats:
|
||||
stream = open(path, 'rb')
|
||||
stream.seek(0, 2)
|
||||
usize = stream.tell()
|
||||
stream.seek(0)
|
||||
self.conn.execute('INSERT INTO data(book, format, uncompressed_size, data) VALUES (?,?,?,?)',
|
||||
(id, ext, usize, sqlite.Binary(compress(stream.read()))))
|
||||
self.conn.commit()
|
||||
|
||||
|
||||
def recursive_import(self, root):
|
||||
root = os.path.abspath(root)
|
||||
duplicates = []
|
||||
for dirpath in os.walk(root):
|
||||
res = self.import_book_directory(dirpath[0])
|
||||
if res is not None:
|
||||
duplicates.append(res)
|
||||
return duplicates
|
||||
|
||||
|
||||
class SearchToken(object):
|
||||
|
Loading…
x
Reference in New Issue
Block a user