diff --git a/src/calibre/devices/prs505/driver.py b/src/calibre/devices/prs505/driver.py
index f4256c4c14..6e21c60d1b 100644
--- a/src/calibre/devices/prs505/driver.py
+++ b/src/calibre/devices/prs505/driver.py
@@ -24,7 +24,7 @@ class File(object):
path = path[:-1]
self.path = path
self.name = os.path.basename(path)
-
+
class PRS505(Device):
VENDOR_ID = 0x054c #: SONY Vendor Id
@@ -33,17 +33,17 @@ class PRS505(Device):
PRODUCT_NAME = 'PRS-505'
VENDOR_NAME = 'SONY'
FORMATS = ['epub', 'lrf', 'lrx', 'rtf', 'pdf', 'txt']
-
+
MEDIA_XML = 'database/cache/media.xml'
CACHE_XML = 'Sony Reader/database/cache.xml'
-
+
MAIN_MEMORY_VOLUME_LABEL = 'Sony Reader Main Memory'
STORAGE_CARD_VOLUME_LABEL = 'Sony Reader Storage Card'
-
+
OSX_NAME = 'Sony PRS-505'
-
+
CARD_PATH_PREFIX = __appname__
-
+
FDI_TEMPLATE = \
'''
@@ -75,11 +75,11 @@ class PRS505(Device):
'''.replace('%(app)s', __appname__)
-
-
+
+
def __init__(self, log_packets=False):
self._main_prefix = self._card_prefix = None
-
+
@classmethod
def get_fdi(cls):
return cls.FDI_TEMPLATE%dict(
@@ -90,7 +90,7 @@ class PRS505(Device):
main_memory=cls.MAIN_MEMORY_VOLUME_LABEL,
storage_card=cls.STORAGE_CARD_VOLUME_LABEL,
)
-
+
@classmethod
def is_device(cls, device_id):
device_id = device_id.upper()
@@ -104,7 +104,7 @@ class PRS505(Device):
'PID_'+pid in device_id:
return True
return False
-
+
@classmethod
def get_osx_mountpoints(cls, raw=None):
if raw is None:
@@ -112,7 +112,7 @@ class PRS505(Device):
if not os.access(ioreg, os.X_OK):
ioreg = 'ioreg'
raw = subprocess.Popen((ioreg+' -w 0 -S -c IOMedia').split(),
- stdout=subprocess.PIPE).stdout.read()
+ stdout=subprocess.PIPE).communicate()[0]
lines = raw.splitlines()
names = {}
for i, line in enumerate(lines):
@@ -130,9 +130,9 @@ class PRS505(Device):
break
return names
-
+
def open_osx(self):
- mount = subprocess.Popen('mount', shell=True,
+ mount = subprocess.Popen('mount', shell=True,
stdout=subprocess.PIPE).stdout.read()
names = self.get_osx_mountpoints()
dev_pat = r'/dev/%s(\w*)\s+on\s+([^\(]+)\s+'
@@ -144,12 +144,12 @@ class PRS505(Device):
if card_pat is not None:
card_pat = dev_pat%card_pat
self._card_prefix = re.search(card_pat, mount).group(2) + os.sep
-
-
+
+
def open_windows(self):
time.sleep(6)
drives = []
- wmi = __import__('wmi', globals(), locals(), [], -1)
+ wmi = __import__('wmi', globals(), locals(), [], -1)
c = wmi.WMI()
for drive in c.Win32_DiskDrive():
if self.__class__.is_device(str(drive.PNPDeviceID)):
@@ -162,22 +162,22 @@ class PRS505(Device):
drives.append((drive.Index, prefix))
except IndexError:
continue
-
-
+
+
if not drives:
raise DeviceError(_('Unable to detect the %s disk drive. Try rebooting.')%self.__class__.__name__)
-
+
drives.sort(cmp=lambda a, b: cmp(a[0], b[0]))
self._main_prefix = drives[0][1]
if len(drives) > 1:
self._card_prefix = drives[1][1]
-
-
+
+
def open_linux(self):
import dbus
- bus = dbus.SystemBus()
+ bus = dbus.SystemBus()
hm = dbus.Interface(bus.get_object("org.freedesktop.Hal", "/org/freedesktop/Hal/Manager"), "org.freedesktop.Hal.Manager")
-
+
def conditional_mount(dev, main_mem=True):
mmo = bus.get_object("org.freedesktop.Hal", dev)
label = mmo.GetPropertyString('volume.label', dbus_interface='org.freedesktop.Hal.Device')
@@ -186,11 +186,11 @@ class PRS505(Device):
fstype = mmo.GetPropertyString('volume.fstype', dbus_interface='org.freedesktop.Hal.Device')
if is_mounted:
return str(mount_point)
- mmo.Mount(label, fstype, ['umask=077', 'uid='+str(os.getuid()), 'sync'],
+ mmo.Mount(label, fstype, ['umask=077', 'uid='+str(os.getuid()), 'sync'],
dbus_interface='org.freedesktop.Hal.Device.Volume')
return os.path.normpath('/media/'+label)+'/'
-
-
+
+
mm = hm.FindDeviceStringMatch(__appname__+'.mainvolume', self.__class__.__name__)
if not mm:
raise DeviceError(_('Unable to detect the %s disk drive. Try rebooting.')%(self.__class__.__name__,))
@@ -201,21 +201,21 @@ class PRS505(Device):
break
except dbus.exceptions.DBusException:
continue
-
-
+
+
if not self._main_prefix:
raise DeviceError('Could not open device for reading. Try a reboot.')
-
+
self._card_prefix = None
cards = hm.FindDeviceStringMatch(__appname__+'.cardvolume', self.__class__.__name__)
keys = []
for card in cards:
keys.append(int('UC_SD' in bus.get_object("org.freedesktop.Hal", card).GetPropertyString('info.parent', dbus_interface='org.freedesktop.Hal.Device')))
-
+
cards = zip(cards, keys)
cards.sort(cmp=lambda x, y: cmp(x[1], y[1]))
cards = [i[0] for i in cards]
-
+
for dev in cards:
try:
self._card_prefix = conditional_mount(dev, False)+os.sep
@@ -224,8 +224,8 @@ class PRS505(Device):
import traceback
print traceback
continue
-
-
+
+
def open(self):
time.sleep(5)
self._main_prefix = self._card_prefix = None
@@ -262,16 +262,16 @@ class PRS505(Device):
self._card_prefix = None
import traceback
traceback.print_exc()
-
+
def set_progress_reporter(self, pr):
self.report_progress = pr
-
+
def get_device_information(self, end_session=True):
return (self.__class__.__name__, '', '', '')
-
+
def card_prefix(self, end_session=True):
return self._card_prefix
-
+
@classmethod
def _windows_space(cls, prefix):
if prefix is None:
@@ -288,7 +288,7 @@ class PRS505(Device):
else: raise
mult = sectors_per_cluster * bytes_per_sector
return total_clusters * mult, free_clusters * mult
-
+
def total_space(self, end_session=True):
msz = csz = 0
if not iswindows:
@@ -301,9 +301,9 @@ class PRS505(Device):
else:
msz = self._windows_space(self._main_prefix)[0]
csz = self._windows_space(self._card_prefix)[0]
-
+
return (msz, 0, csz)
-
+
def free_space(self, end_session=True):
msz = csz = 0
if not iswindows:
@@ -316,9 +316,9 @@ class PRS505(Device):
else:
msz = self._windows_space(self._main_prefix)[1]
csz = self._windows_space(self._card_prefix)[1]
-
+
return (msz, 0, csz)
-
+
def books(self, oncard=False, end_session=True):
if oncard and self._card_prefix is None:
return []
@@ -331,7 +331,7 @@ class PRS505(Device):
if os.path.exists(path):
os.unlink(path)
return bl
-
+
def munge_path(self, path):
if path.startswith('/') and not (path.startswith(self._main_prefix) or \
(self._card_prefix and path.startswith(self._card_prefix))):
@@ -339,12 +339,12 @@ class PRS505(Device):
elif path.startswith('card:'):
path = path.replace('card:', self._card_prefix[:-1])
return path
-
+
def mkdir(self, path, end_session=True):
""" Make directory """
path = self.munge_path(path)
os.mkdir(path)
-
+
def list(self, path, recurse=False, end_session=True, munge=True):
if munge:
path = self.munge_path(path)
@@ -356,12 +356,12 @@ class PRS505(Device):
if recurse and _file.is_dir:
dirs[len(dirs):] = self.list(_file.path, recurse=True, munge=False)
return dirs
-
+
def get_file(self, path, outfile, end_session=True):
path = self.munge_path(path)
src = open(path, 'rb')
shutil.copyfileobj(src, outfile, 10*1024*1024)
-
+
def put_file(self, infile, path, replace_file=False, end_session=True):
path = self.munge_path(path)
if os.path.isdir(path):
@@ -372,25 +372,25 @@ class PRS505(Device):
shutil.copyfileobj(infile, dest, 10*1024*1024)
dest.flush()
dest.close()
-
+
def rm(self, path, end_session=True):
path = self.munge_path(path)
os.unlink(path)
-
+
def touch(self, path, end_session=True):
path = self.munge_path(path)
if not os.path.exists(path):
open(path, 'w').close()
if not os.path.isdir(path):
os.utime(path, None)
-
- def upload_books(self, files, names, on_card=False, end_session=True,
+
+ def upload_books(self, files, names, on_card=False, end_session=True,
metadata=None):
if on_card and not self._card_prefix:
raise ValueError(_('The reader has no storage card connected.'))
path = os.path.join(self._card_prefix, self.CARD_PATH_PREFIX) if on_card \
else os.path.join(self._main_prefix, 'database', 'media', 'books')
-
+
def get_size(obj):
if hasattr(obj, 'seek'):
obj.seek(0, 2)
@@ -398,27 +398,27 @@ class PRS505(Device):
obj.seek(0)
return size
return os.path.getsize(obj)
-
+
sizes = map(get_size, files)
size = sum(sizes)
space = self.free_space()
mspace = space[0]
cspace = space[2]
- if on_card and size > cspace - 1024*1024:
+ if on_card and size > cspace - 1024*1024:
raise FreeSpaceError("There is insufficient free space "+\
"on the storage card")
- if not on_card and size > mspace - 2*1024*1024:
+ if not on_card and size > mspace - 2*1024*1024:
raise FreeSpaceError("There is insufficient free space " +\
"in main memory")
-
+
paths, ctimes = [], []
-
+
names = iter(names)
for infile in files:
close = False
if not hasattr(infile, 'read'):
infile, close = open(infile, 'rb'), True
- infile.seek(0)
+ infile.seek(0)
name = names.next()
paths.append(os.path.join(path, name))
if not os.path.exists(os.path.dirname(paths[-1])):
@@ -428,7 +428,7 @@ class PRS505(Device):
infile.close()
ctimes.append(os.path.getctime(paths[-1]))
return zip(paths, sizes, ctimes, cycle([on_card]))
-
+
@classmethod
def add_books_to_metadata(cls, locations, metadata, booklists):
metadata = iter(metadata)
@@ -441,12 +441,12 @@ class PRS505(Device):
name = name.replace('//', '/')
booklists[on_card].add_book(info, name, *location[1:-1])
fix_ids(*booklists)
-
+
def delete_books(self, paths, end_session=True):
for path in paths:
if os.path.exists(path):
os.unlink(path)
-
+
@classmethod
def remove_books_from_metadata(cls, paths, booklists):
for path in paths:
@@ -454,7 +454,7 @@ class PRS505(Device):
if hasattr(bl, 'remove_book'):
bl.remove_book(path)
fix_ids(*booklists)
-
+
def sync_booklists(self, booklists, end_session=True):
fix_ids(*booklists)
if not os.path.exists(self._main_prefix):
@@ -468,9 +468,9 @@ class PRS505(Device):
f = open(self._card_prefix + self.__class__.CACHE_XML, 'wb')
booklists[1].write(f)
f.close()
-
-
-
+
+
+
def main(args=sys.argv):
return 0
diff --git a/src/calibre/devices/usbms/device.py b/src/calibre/devices/usbms/device.py
index 5943e2e13f..eb86cb7edd 100644
--- a/src/calibre/devices/usbms/device.py
+++ b/src/calibre/devices/usbms/device.py
@@ -190,7 +190,7 @@ class Device(_Device):
self._main_prefix = drives.get('main')
self._card_prefix = drives.get('card')
-
+
if not self._main_prefix:
raise DeviceError(_('Unable to detect the %s disk drive. Try rebooting.') % self.__class__.__name__)
@@ -200,7 +200,7 @@ class Device(_Device):
if not os.access(ioreg, os.X_OK):
ioreg = 'ioreg'
raw = subprocess.Popen((ioreg+' -w 0 -S -c IOMedia').split(),
- stdout=subprocess.PIPE).stdout.read()
+ stdout=subprocess.PIPE).communicate()[0]
lines = raw.splitlines()
names = {}
diff --git a/src/calibre/ebooks/mobi/mobiml.py b/src/calibre/ebooks/mobi/mobiml.py
index 8dca993a64..5cbc2a454f 100644
--- a/src/calibre/ebooks/mobi/mobiml.py
+++ b/src/calibre/ebooks/mobi/mobiml.py
@@ -79,7 +79,7 @@ class FormatState(object):
class MobiMLizer(object):
def __init__(self, ignore_tables=False):
self.ignore_tables = ignore_tables
-
+
def transform(self, oeb, context):
oeb.logger.info('Converting XHTML to Mobipocket markup...')
self.oeb = oeb
@@ -98,10 +98,10 @@ class MobiMLizer(object):
del oeb.guide['cover']
item = oeb.manifest.hrefs[href]
if item.spine_position is not None:
- oeb.spine.remove(item)
+ oeb.spine.remove(item)
if item.media_type in OEB_DOCS:
self.oeb.manifest.remove(item)
-
+
def mobimlize_spine(self):
for item in self.oeb.spine:
stylizer = Stylizer(item.data, item.href, self.oeb, self.profile)
@@ -134,7 +134,7 @@ class MobiMLizer(object):
if line:
result.append(line)
return result
-
+
def mobimlize_content(self, tag, text, bstate, istates):
if text or tag != 'br':
bstate.content = True
@@ -239,7 +239,7 @@ class MobiMLizer(object):
last.tail = (last.tail or '') + item
else:
inline.append(item)
-
+
def mobimlize_elem(self, elem, stylizer, bstate, istates):
if not isinstance(elem.tag, basestring) \
or namespace(elem.tag) != XHTML_NS:
diff --git a/src/calibre/ebooks/mobi/writer.py b/src/calibre/ebooks/mobi/writer.py
index 467e2c6dc7..9ab641104f 100644
--- a/src/calibre/ebooks/mobi/writer.py
+++ b/src/calibre/ebooks/mobi/writer.py
@@ -211,12 +211,13 @@ class Serializer(object):
def serialize_item(self, item):
buffer = self.buffer
+ buffer.write('')
if not item.linear:
self.breaks.append(buffer.tell() - 1)
self.id_offsets[item.href] = buffer.tell()
for elem in item.data.find(XHTML('body')):
self.serialize_elem(elem, item)
- buffer.write('')
+ buffer.write('')
def serialize_elem(self, elem, item, nsrmap=NSRMAP):
buffer = self.buffer
diff --git a/src/calibre/gui2/library.py b/src/calibre/gui2/library.py
index 8d97c8fba0..a8698c4571 100644
--- a/src/calibre/gui2/library.py
+++ b/src/calibre/gui2/library.py
@@ -93,7 +93,7 @@ class DateDelegate(QStyledItemDelegate):
def createEditor(self, parent, option, index):
qde = QStyledItemDelegate.createEditor(self, parent, option, index)
- qde.setDisplayFormat('MM/dd/yyyy')
+ qde.setDisplayFormat(unicode(qde.displayFormat()).replace('yy', 'yyyy'))
qde.setMinimumDate(QDate(101,1,1))
qde.setCalendarPopup(True)
return qde
@@ -635,7 +635,8 @@ class BooksView(TableView):
def columns_sorted(self, rating_col, timestamp_col):
for i in range(self.model().columnCount(None)):
- if self.itemDelegateForColumn(i) == self.rating_delegate:
+ if self.itemDelegateForColumn(i) in (self.rating_delegate,
+ self.timestamp_delegate):
self.setItemDelegateForColumn(i, self.itemDelegate())
if rating_col > -1:
self.setItemDelegateForColumn(rating_col, self.rating_delegate)
@@ -706,7 +707,7 @@ class BooksView(TableView):
def close(self):
self._model.close()
-
+
def set_editable(self, editable):
self._model.set_editable(editable)
@@ -999,10 +1000,10 @@ class DeviceBooksModel(BooksModel):
self.sort(col, self.sorted_on[1])
done = True
return done
-
+
def set_editable(self, editable):
self.editable = editable
-
+
class SearchBox(QLineEdit):
diff --git a/src/calibre/library/database2.py b/src/calibre/library/database2.py
index da5790a621..999a242986 100644
--- a/src/calibre/library/database2.py
+++ b/src/calibre/library/database2.py
@@ -33,14 +33,14 @@ from calibre.ebooks import BOOK_EXTENSIONS
copyfile = os.link if hasattr(os, 'link') else shutil.copyfile
-FIELD_MAP = {'id':0, 'title':1, 'authors':2, 'publisher':3, 'rating':4, 'timestamp':5,
+FIELD_MAP = {'id':0, 'title':1, 'authors':2, 'publisher':3, 'rating':4, 'timestamp':5,
'size':6, 'tags':7, 'comments':8, 'series':9, 'series_index':10,
'sort':11, 'author_sort':12, 'formats':13, 'isbn':14, 'path':15}
INDEX_MAP = dict(zip(FIELD_MAP.values(), FIELD_MAP.keys()))
class CoverCache(QThread):
-
+
def __init__(self, library_path, parent=None):
QThread.__init__(self, parent)
self.library_path = library_path
@@ -52,7 +52,7 @@ class CoverCache(QThread):
self.cache_lock = QReadWriteLock()
self.id_map_stale = True
self.keep_running = True
-
+
def build_id_map(self):
self.id_map_lock.lockForWrite()
self.id_map = {}
@@ -65,8 +65,8 @@ class CoverCache(QThread):
continue
self.id_map_lock.unlock()
self.id_map_stale = False
-
-
+
+
def set_cache(self, ids):
self.cache_lock.lockForWrite()
already_loaded = set([])
@@ -80,8 +80,8 @@ class CoverCache(QThread):
self.load_queue_lock.lockForWrite()
self.load_queue = collections.deque(ids)
self.load_queue_lock.unlock()
-
-
+
+
def run(self):
while self.keep_running:
if self.id_map is None or self.id_map_stale:
@@ -94,7 +94,7 @@ class CoverCache(QThread):
break
finally:
self.load_queue_lock.unlock()
-
+
self.cache_lock.lockForRead()
need = True
if id in self.cache.keys():
@@ -121,19 +121,19 @@ class CoverCache(QThread):
self.cache_lock.lockForWrite()
self.cache[id] = img
self.cache_lock.unlock()
-
+
self.sleep(1)
-
+
def stop(self):
self.keep_running = False
-
+
def cover(self, id):
val = None
if self.cache_lock.tryLockForRead(50):
val = self.cache.get(id, None)
self.cache_lock.unlock()
return val
-
+
def clear_cache(self):
self.cache_lock.lockForWrite()
self.cache = {}
@@ -148,24 +148,24 @@ class CoverCache(QThread):
for id in ids:
self.load_queue.appendleft(id)
self.load_queue_lock.unlock()
-
+
class ResultCache(SearchQueryParser):
-
+
'''
Stores sorted and filtered metadata in memory.
'''
-
+
def __init__(self):
self._map = self._map_filtered = self._data = []
self.first_sort = True
SearchQueryParser.__init__(self)
-
+
def __getitem__(self, row):
return self._data[self._map_filtered[row]]
-
+
def __len__(self):
return len(self._map_filtered)
-
+
def __iter__(self):
for id in self._map_filtered:
yield self._data[id]
@@ -194,45 +194,49 @@ class ResultCache(SearchQueryParser):
matches.add(item[0])
break
return matches
-
+
def remove(self, id):
self._data[id] = None
if id in self._map:
self._map.remove(id)
if id in self._map_filtered:
self._map_filtered.remove(id)
-
+
def set(self, row, col, val, row_is_id=False):
- id = row if row_is_id else self._map_filtered[row]
+ id = row if row_is_id else self._map_filtered[row]
self._data[id][col] = val
-
+
def index(self, id, cache=False):
x = self._map if cache else self._map_filtered
return x.index(id)
-
+
def row(self, id):
return self.index(id)
-
+
def has_id(self, id):
try:
return self._data[id] is not None
except IndexError:
pass
return False
-
+
def refresh_ids(self, conn, ids):
'''
Refresh the data in the cache for books identified by ids.
Returns a list of affected rows or None if the rows are filtered.
'''
for id in ids:
- self._data[id] = conn.get('SELECT * from meta WHERE id=?', (id,))[0]
+ try:
+ self._data[id] = conn.get('SELECT * from meta WHERE id=?',
+ (id,))[0]
+ except IndexError:
+ return None
try:
return map(self.row, ids)
except ValueError:
pass
return None
-
+
def books_added(self, ids, conn):
if not ids:
return
@@ -241,16 +245,16 @@ class ResultCache(SearchQueryParser):
self._data[id] = conn.get('SELECT * from meta WHERE id=?', (id,))[0]
self._map[0:0] = ids
self._map_filtered[0:0] = ids
-
+
def books_deleted(self, ids):
for id in ids:
self._data[id] = None
if id in self._map: self._map.remove(id)
if id in self._map_filtered: self._map_filtered.remove(id)
-
+
def count(self):
return len(self._map)
-
+
def refresh(self, db, field=None, ascending=True):
temp = db.conn.get('SELECT * FROM meta')
self._data = list(itertools.repeat(None, temp[-1][0]+2)) if temp else []
@@ -260,7 +264,7 @@ class ResultCache(SearchQueryParser):
if field is not None:
self.sort(field, ascending)
self._map_filtered = list(self._map)
-
+
def seriescmp(self, x, y):
try:
ans = cmp(self._data[x][9].lower(), self._data[y][9].lower()) if str else\
@@ -269,7 +273,7 @@ class ResultCache(SearchQueryParser):
ans = cmp(self._data[x][9], self._data[y][9])
if ans != 0: return ans
return cmp(self._data[x][10], self._data[y][10])
-
+
def cmp(self, loc, x, y, str=True, subsort=False):
try:
ans = cmp(self._data[x][loc].lower(), self._data[y][loc].lower()) if str else\
@@ -279,7 +283,7 @@ class ResultCache(SearchQueryParser):
if subsort and ans == 0:
return cmp(self._data[x][11].lower(), self._data[y][11].lower())
return ans
-
+
def sort(self, field, ascending, subsort=False):
field = field.lower().strip()
if field in ('author', 'tag', 'comment'):
@@ -291,28 +295,28 @@ class ResultCache(SearchQueryParser):
subsort = True
self.first_sort = False
fcmp = self.seriescmp if field == 'series' else \
- functools.partial(self.cmp, FIELD_MAP[field], subsort=subsort,
+ functools.partial(self.cmp, FIELD_MAP[field], subsort=subsort,
str=field not in ('size', 'rating', 'timestamp'))
-
+
self._map.sort(cmp=fcmp, reverse=not ascending)
self._map_filtered = [id for id in self._map if id in self._map_filtered]
-
+
def search(self, query):
if not query or not query.strip():
self._map_filtered = list(self._map)
return
matches = sorted(self.parse(query))
self._map_filtered = [id for id in self._map if id in matches]
-
-
+
+
class Tag(unicode):
-
+
def __new__(cls, *args):
obj = super(Tag, cls).__new__(cls, *args)
obj.count = 0
obj.state = 0
return obj
-
+
def as_string(self):
return u'[%d] %s'%(self.count, self)
@@ -324,16 +328,16 @@ class LibraryDatabase2(LibraryDatabase):
@apply
def user_version():
doc = 'The user version of this database'
-
+
def fget(self):
return self.conn.get('pragma user_version;', all=False)
-
+
def fset(self, val):
self.conn.execute('pragma user_version=%d'%int(val))
self.conn.commit()
-
+
return property(doc=doc, fget=fget, fset=fset)
-
+
def connect(self):
if 'win32' in sys.platform and len(self.library_path) + 4*self.PATH_LIMIT + 10 > 259:
raise ValueError('Path to library too long. Must be less than %d characters.'%(259-4*self.PATH_LIMIT-10))
@@ -343,9 +347,9 @@ class LibraryDatabase2(LibraryDatabase):
self.conn.close()
os.remove(self.dbpath)
self.conn = connect(self.dbpath, self.row_factory)
- if self.user_version == 0:
+ if self.user_version == 0:
self.initialize_database()
-
+
def __init__(self, library_path, row_factory=False):
if not os.path.exists(library_path):
os.makedirs(library_path)
@@ -358,7 +362,7 @@ class LibraryDatabase2(LibraryDatabase):
self.connect()
self.is_case_sensitive = not iswindows and not isosx and \
not os.path.exists(self.dbpath.replace('metadata.db', 'MeTAdAtA.dB'))
- # Upgrade database
+ # Upgrade database
while True:
meth = getattr(self, 'upgrade_version_%d'%self.user_version, None)
if meth is None:
@@ -368,7 +372,7 @@ class LibraryDatabase2(LibraryDatabase):
meth()
self.conn.commit()
self.user_version += 1
-
+
self.data = ResultCache()
self.search = self.data.search
self.refresh = functools.partial(self.data.refresh, self)
@@ -378,24 +382,24 @@ class LibraryDatabase2(LibraryDatabase):
self.row = self.data.row
self.has_id = self.data.has_id
self.count = self.data.count
-
+
self.refresh()
-
+
def get_property(idx, index_is_id=False, loc=-1):
row = self.data._data[idx] if index_is_id else self.data[idx]
return row[loc]
-
- for prop in ('author_sort', 'authors', 'comment', 'comments', 'isbn',
- 'publisher', 'rating', 'series', 'series_index', 'tags',
+
+ for prop in ('author_sort', 'authors', 'comment', 'comments', 'isbn',
+ 'publisher', 'rating', 'series', 'series_index', 'tags',
'title', 'timestamp'):
- setattr(self, prop, functools.partial(get_property,
+ setattr(self, prop, functools.partial(get_property,
loc=FIELD_MAP['comments' if prop == 'comment' else prop]))
-
+
def initialize_database(self):
from calibre.resources import metadata_sqlite
self.conn.executescript(metadata_sqlite)
self.user_version = 1
-
+
def upgrade_version_1(self):
'''
Normalize indices.
@@ -407,7 +411,7 @@ class LibraryDatabase2(LibraryDatabase):
CREATE INDEX series_idx ON series (name COLLATE NOCASE);
CREATE INDEX series_sort_idx ON books (series_index, id);
'''))
-
+
def upgrade_version_2(self):
''' Fix Foreign key constraints for deleting from link tables. '''
script = textwrap.dedent('''\
@@ -426,7 +430,7 @@ class LibraryDatabase2(LibraryDatabase):
self.conn.executescript(script%dict(ltable='publishers', table='publishers', ltable_col='publisher'))
self.conn.executescript(script%dict(ltable='tags', table='tags', ltable_col='tag'))
self.conn.executescript(script%dict(ltable='series', table='series', ltable_col='series'))
-
+
def upgrade_version_3(self):
' Add path to result cache '
self.conn.executescript('''
@@ -450,25 +454,25 @@ class LibraryDatabase2(LibraryDatabase):
FROM books;
''')
-
+
def last_modified(self):
''' Return last modified time as a UTC datetime object'''
return datetime.utcfromtimestamp(os.stat(self.dbpath).st_mtime)
-
+
def path(self, index, index_is_id=False):
'Return the relative path to the directory containing this books files as a unicode string.'
row = self.data._data[index] if index_is_id else self.data[index]
return row[FIELD_MAP['path']].replace('/', os.sep)
-
-
+
+
def abspath(self, index, index_is_id=False):
'Return the absolute path to the directory containing this books files as a unicode string.'
path = os.path.join(self.library_path, self.path(index, index_is_id=index_is_id))
if not os.path.exists(path):
os.makedirs(path)
return path
-
-
+
+
def construct_path_name(self, id):
'''
Construct the directory name for this book based on its metadata.
@@ -480,7 +484,7 @@ class LibraryDatabase2(LibraryDatabase):
title = sanitize_file_name(self.title(id, index_is_id=True)[:self.PATH_LIMIT]).decode(filesystem_encoding, 'ignore')
path = author + '/' + title + ' (%d)'%id
return path
-
+
def construct_file_name(self, id):
'''
Construct the file name for this book based on its metadata.
@@ -492,17 +496,17 @@ class LibraryDatabase2(LibraryDatabase):
title = sanitize_file_name(self.title(id, index_is_id=True)[:self.PATH_LIMIT]).decode(filesystem_encoding, 'replace')
name = title + ' - ' + author
return name
-
+
def rmtree(self, path):
if not self.normpath(self.library_path).startswith(self.normpath(path)):
shutil.rmtree(path)
-
+
def normpath(self, path):
path = os.path.abspath(os.path.realpath(path))
if not self.is_case_sensitive:
path = path.lower()
return path
-
+
def set_path(self, index, index_is_id=False):
'''
Set the path to the directory containing this books files based on its
@@ -524,12 +528,12 @@ class LibraryDatabase2(LibraryDatabase):
break
if path == current_path and not changed:
return
-
+
tpath = os.path.join(self.library_path, *path.split('/'))
if not os.path.exists(tpath):
os.makedirs(tpath)
spath = os.path.join(self.library_path, *current_path.split('/'))
-
+
if current_path and os.path.exists(spath): # Migrate existing files
cdata = self.cover(id, index_is_id=True)
if cdata is not None:
@@ -551,14 +555,14 @@ class LibraryDatabase2(LibraryDatabase):
parent = os.path.dirname(spath)
if len(os.listdir(parent)) == 0:
self.rmtree(parent)
-
+
def add_listener(self, listener):
'''
Add a listener. Will be called on change events with two arguments.
Event name and list of affected ids.
'''
self.listeners.add(listener)
-
+
def notify(self, event, ids=[]):
'Notify all listeners'
for listener in self.listeners:
@@ -567,12 +571,12 @@ class LibraryDatabase2(LibraryDatabase):
except:
traceback.print_exc()
continue
-
- def cover(self, index, index_is_id=False, as_file=False, as_image=False,
+
+ def cover(self, index, index_is_id=False, as_file=False, as_image=False,
as_path=False):
'''
Return the cover image as a bytestring (in JPEG format) or None.
-
+
`as_file` : If True return the image as an open file object
`as_image`: If True return the image as a QImage object
'''
@@ -587,7 +591,7 @@ class LibraryDatabase2(LibraryDatabase):
img.loadFromData(f.read())
return img
return f if as_file else f.read()
-
+
def get_metadata(self, idx, index_is_id=False, get_cover=False):
'''
Convenience method to return metadata as a L{MetaInformation} object.
@@ -612,7 +616,7 @@ class LibraryDatabase2(LibraryDatabase):
if get_cover:
mi.cover = self.cover(id, index_is_id=True, as_path=True)
return mi
-
+
def has_book(self, mi):
title = mi.title
if title:
@@ -620,16 +624,16 @@ class LibraryDatabase2(LibraryDatabase):
title = title.decode(preferred_encoding, 'replace')
return bool(self.conn.get('SELECT id FROM books where title=?', (title,), all=False))
return False
-
+
def has_cover(self, index, index_is_id=False):
id = index if index_is_id else self.id(index)
path = os.path.join(self.library_path, self.path(id, index_is_id=True), 'cover.jpg')
return os.access(path, os.R_OK)
-
+
def set_cover(self, id, data):
'''
Set the cover for this book.
-
+
`data`: Can be either a QImage, QPixmap, file object or bytestring
'''
path = os.path.join(self.library_path, self.path(id, index_is_id=True), 'cover.jpg')
@@ -644,13 +648,13 @@ class LibraryDatabase2(LibraryDatabase):
data = data.read()
p.loadFromData(data)
p.save(path)
-
+
def all_formats(self):
formats = self.conn.get('SELECT format from data')
if not formats:
return set([])
return set([f[0] for f in formats])
-
+
def formats(self, index, index_is_id=False):
''' Return available formats as a comma separated list or None if there are no available formats '''
id = index if index_is_id else self.id(index)
@@ -667,7 +671,7 @@ class LibraryDatabase2(LibraryDatabase):
if os.access(os.path.join(path, name+_format), os.R_OK|os.W_OK):
ans.append(format)
return ','.join(ans)
-
+
def has_format(self, index, format, index_is_id=False):
id = index if index_is_id else self.id(index)
name = self.conn.get('SELECT name FROM data WHERE book=? AND format=?', (id, format), all=False)
@@ -677,7 +681,7 @@ class LibraryDatabase2(LibraryDatabase):
path = os.path.join(path, name+format)
return os.access(path, os.R_OK|os.W_OK)
return False
-
+
def format_abspath(self, index, format, index_is_id=False):
'Return absolute path to the ebook file of format `format`'
id = index if index_is_id else self.id(index)
@@ -688,13 +692,13 @@ class LibraryDatabase2(LibraryDatabase):
path = os.path.join(path, name+format)
if os.access(path, os.R_OK|os.W_OK):
return path
-
+
def format(self, index, format, index_is_id=False, as_file=False, mode='r+b'):
'''
Return the ebook format as a bytestring or `None` if the format doesn't exist,
- or we don't have permission to write to the ebook file.
-
- `as_file`: If True the ebook format is returned as a file object opened in `mode`
+ or we don't have permission to write to the ebook file.
+
+ `as_file`: If True the ebook format is returned as a file object opened in `mode`
'''
path = self.format_abspath(index, format, index_is_id=index_is_id)
if path is not None:
@@ -702,14 +706,14 @@ class LibraryDatabase2(LibraryDatabase):
return f if as_file else f.read()
if self.has_format(index, format, index_is_id):
self.remove_format(id, format, index_is_id=True)
-
- def add_format_with_hooks(self, index, format, fpath, index_is_id=False,
+
+ def add_format_with_hooks(self, index, format, fpath, index_is_id=False,
path=None, notify=True):
npath = self.run_import_plugins(fpath, format)
format = os.path.splitext(npath)[-1].lower().replace('.', '').upper()
- return self.add_format(index, format, open(npath, 'rb'),
+ return self.add_format(index, format, open(npath, 'rb'),
index_is_id=index_is_id, path=path, notify=notify)
-
+
def add_format(self, index, format, stream, index_is_id=False, path=None, notify=True):
id = index if index_is_id else self.id(index)
if path is None:
@@ -733,7 +737,7 @@ class LibraryDatabase2(LibraryDatabase):
self.refresh_ids([id])
if notify:
self.notify('metadata', [id])
-
+
def delete_book(self, id, notify=True):
'''
Removes book from the result cache and the underlying database.
@@ -751,7 +755,7 @@ class LibraryDatabase2(LibraryDatabase):
self.data.books_deleted([id])
if notify:
self.notify('delete', [id])
-
+
def remove_format(self, index, format, index_is_id=False, notify=True):
id = index if index_is_id else self.id(index)
path = os.path.join(self.library_path, *self.path(id, index_is_id=True).split(os.sep))
@@ -768,7 +772,7 @@ class LibraryDatabase2(LibraryDatabase):
self.refresh_ids([id])
if notify:
self.notify('metadata', [id])
-
+
def clean(self):
'''
Remove orphaned entries.
@@ -779,13 +783,13 @@ class LibraryDatabase2(LibraryDatabase):
self.conn.execute(st%dict(ltable='tags', table='tags', ltable_col='tag'))
self.conn.execute(st%dict(ltable='series', table='series', ltable_col='series'))
self.conn.commit()
-
+
def get_recipes(self):
return self.conn.get('SELECT id, script FROM feeds')
-
+
def get_recipe(self, id):
return self.conn.get('SELECT script FROM feeds WHERE id=?', (id,), all=False)
-
+
def get_categories(self, sort_on_count=False):
categories = {}
def get(name, category, field='name'):
@@ -807,11 +811,11 @@ class LibraryDatabase2(LibraryDatabase):
for tag in tags:
tag.count = self.conn.get('SELECT COUNT(format) FROM data WHERE format=?', (tag,), all=False)
tags.sort(reverse=sort_on_count, cmp=(lambda x,y:cmp(x.count,y.count)) if sort_on_count else cmp)
- for x in (('authors', 'author'), ('tags', 'tag'), ('publishers', 'publisher'),
+ for x in (('authors', 'author'), ('tags', 'tag'), ('publishers', 'publisher'),
('series', 'series')):
get(*x)
get('data', 'format', 'format')
-
+
categories['news'] = []
newspapers = self.conn.get('SELECT name FROM tags WHERE id IN (SELECT DISTINCT tag FROM books_tags_link WHERE book IN (select book from books_tags_link where tag IN (SELECT id FROM tags WHERE name=?)))', (_('News'),))
if newspapers:
@@ -823,10 +827,10 @@ class LibraryDatabase2(LibraryDatabase):
categories['news'] = list(map(Tag, newspapers))
for tag in categories['news']:
tag.count = self.conn.get('SELECT COUNT(id) FROM books_tags_link WHERE tag IN (SELECT DISTINCT id FROM tags WHERE name=?)', (tag,), all=False)
-
+
return categories
-
-
+
+
def tags_older_than(self, tag, delta):
tag = tag.lower().strip()
now = datetime.now()
@@ -836,9 +840,9 @@ class LibraryDatabase2(LibraryDatabase):
tags = r[FIELD_MAP['tags']]
if tags and tag in tags.lower():
yield r[FIELD_MAP['id']]
-
-
-
+
+
+
def set(self, row, column, val):
'''
Convenience method for setting the title, authors, publisher or rating
@@ -861,10 +865,10 @@ class LibraryDatabase2(LibraryDatabase):
self.data.refresh_ids(self.conn, [id])
self.set_path(id, True)
self.notify('metadata', [id])
-
+
def set_metadata(self, id, mi):
'''
- Set metadata for the book `id` from the `MetaInformation` object `mi`
+ Set metadata for the book `id` from the `MetaInformation` object `mi`
'''
if mi.title:
self.set_title(id, mi.title)
@@ -898,7 +902,7 @@ class LibraryDatabase2(LibraryDatabase):
self.set_timestamp(id, mi.timestamp, notify=False)
self.set_path(id, True)
self.notify('metadata', [id])
-
+
def set_authors(self, id, authors, notify=True):
'''
`authors`: A list of authors.
@@ -925,18 +929,18 @@ class LibraryDatabase2(LibraryDatabase):
(id, aid))
except IntegrityError: # Sometimes books specify the same author twice in their metadata
pass
- ss = authors_to_sort_string(authors)
+ ss = authors_to_sort_string(authors)
self.conn.execute('UPDATE books SET author_sort=? WHERE id=?',
(ss, id))
self.conn.commit()
- self.data.set(id, FIELD_MAP['authors'],
- ','.join([a.replace(',', '|') for a in authors]),
+ self.data.set(id, FIELD_MAP['authors'],
+ ','.join([a.replace(',', '|') for a in authors]),
row_is_id=True)
- self.data.set(id, FIELD_MAP['author_sort'], ss, row_is_id=True)
+ self.data.set(id, FIELD_MAP['author_sort'], ss, row_is_id=True)
self.set_path(id, True)
if notify:
self.notify('metadata', [id])
-
+
def set_title(self, id, title, notify=True):
if not title:
return
@@ -949,7 +953,7 @@ class LibraryDatabase2(LibraryDatabase):
self.conn.commit()
if notify:
self.notify('metadata', [id])
-
+
def set_timestamp(self, id, dt, notify=True):
if dt:
self.conn.execute('UPDATE books SET timestamp=? WHERE id=?', (dt, id))
@@ -957,7 +961,7 @@ class LibraryDatabase2(LibraryDatabase):
self.conn.commit()
if notify:
self.notify('metadata', [id])
-
+
def set_publisher(self, id, publisher, notify=True):
self.conn.execute('DELETE FROM books_publishers_link WHERE book=?',(id,))
self.conn.execute('DELETE FROM publishers WHERE (SELECT COUNT(id) FROM books_publishers_link WHERE publisher=publishers.id) < 1')
@@ -974,7 +978,7 @@ class LibraryDatabase2(LibraryDatabase):
self.data.set(id, FIELD_MAP['publisher'], publisher, row_is_id=True)
if notify:
self.notify('metadata', [id])
-
+
def set_tags(self, id, tags, append=False, notify=True):
'''
@param tags: list of strings
@@ -1018,7 +1022,7 @@ class LibraryDatabase2(LibraryDatabase):
self.data.set(id, FIELD_MAP['tags'], tags, row_is_id=True)
if notify:
self.notify('metadata', [id])
-
+
def unapply_tags(self, book_id, tags, notify=True):
for tag in tags:
id = self.conn.get('SELECT id FROM tags WHERE name=?', (tag,), all=False)
@@ -1028,7 +1032,7 @@ class LibraryDatabase2(LibraryDatabase):
self.data.refresh_ids(self.conn, [book_id])
if notify:
self.notify('metadata', [id])
-
+
def is_tag_used(self, tag):
existing_tags = self.all_tags()
lt = [t.lower() for t in existing_tags]
@@ -1037,7 +1041,7 @@ class LibraryDatabase2(LibraryDatabase):
return True
except ValueError:
return False
-
+
def delete_tag(self, tag):
existing_tags = self.all_tags()
lt = [t.lower() for t in existing_tags]
@@ -1052,7 +1056,7 @@ class LibraryDatabase2(LibraryDatabase):
self.conn.execute('DELETE FROM tags WHERE id=?', (id,))
self.conn.commit()
-
+
def set_series(self, id, series, notify=True):
self.conn.execute('DELETE FROM books_series_link WHERE book=?',(id,))
self.conn.execute('DELETE FROM series WHERE (SELECT COUNT(id) FROM books_series_link WHERE series=series.id) < 1')
@@ -1075,7 +1079,7 @@ class LibraryDatabase2(LibraryDatabase):
self.data.set(id, FIELD_MAP['series'], series, row_is_id=True)
if notify:
self.notify('metadata', [id])
-
+
def set_series_index(self, id, idx, notify=True):
if idx is None:
idx = 1
@@ -1091,7 +1095,7 @@ class LibraryDatabase2(LibraryDatabase):
self.data.set(id, FIELD_MAP['series_index'], int(idx), row_is_id=True)
if notify:
self.notify('metadata', [id])
-
+
def set_rating(self, id, rating, notify=True):
rating = int(rating)
self.conn.execute('DELETE FROM books_ratings_link WHERE book=?',(id,))
@@ -1102,7 +1106,7 @@ class LibraryDatabase2(LibraryDatabase):
self.data.set(id, FIELD_MAP['rating'], rating, row_is_id=True)
if notify:
self.notify('metadata', [id])
-
+
def set_comment(self, id, text, notify=True):
self.conn.execute('DELETE FROM comments WHERE book=?', (id,))
self.conn.execute('INSERT INTO comments(book,text) VALUES (?,?)', (id, text))
@@ -1110,21 +1114,21 @@ class LibraryDatabase2(LibraryDatabase):
self.data.set(id, FIELD_MAP['comments'], text, row_is_id=True)
if notify:
self.notify('metadata', [id])
-
+
def set_author_sort(self, id, sort, notify=True):
self.conn.execute('UPDATE books SET author_sort=? WHERE id=?', (sort, id))
self.conn.commit()
self.data.set(id, FIELD_MAP['author_sort'], sort, row_is_id=True)
if notify:
self.notify('metadata', [id])
-
+
def set_isbn(self, id, isbn, notify=True):
self.conn.execute('UPDATE books SET isbn=? WHERE id=?', (isbn, id))
self.conn.commit()
self.data.set(id, FIELD_MAP['isbn'], isbn, row_is_id=True)
if notify:
self.notify('metadata', [id])
-
+
def add_news(self, path, recipe):
format = os.path.splitext(path)[1][1:].lower()
stream = path if hasattr(path, 'read') else open(path, 'rb')
@@ -1133,21 +1137,21 @@ class LibraryDatabase2(LibraryDatabase):
stream.seek(0)
mi.series_index = 1
mi.tags = [_('News'), recipe.title]
- obj = self.conn.execute('INSERT INTO books(title, author_sort) VALUES (?, ?)',
+ obj = self.conn.execute('INSERT INTO books(title, author_sort) VALUES (?, ?)',
(mi.title, mi.authors[0]))
id = obj.lastrowid
self.data.books_added([id], self.conn)
self.set_path(id, index_is_id=True)
self.conn.commit()
self.set_metadata(id, mi)
-
+
self.add_format(id, format, stream, index_is_id=True)
if not hasattr(path, 'read'):
stream.close()
self.conn.commit()
self.data.refresh_ids(self.conn, [id]) # Needed to update format list and size
return id
-
+
def run_import_plugins(self, path_or_stream, format):
format = format.lower()
if hasattr(path_or_stream, 'seek'):
@@ -1159,7 +1163,7 @@ class LibraryDatabase2(LibraryDatabase):
else:
path = path_or_stream
return run_plugins_on_import(path, format)
-
+
def add_books(self, paths, formats, metadata, uris=[], add_duplicates=True):
'''
Add a book to the database. The result cache is not updated.
@@ -1185,7 +1189,7 @@ class LibraryDatabase2(LibraryDatabase):
aus = aus.decode(preferred_encoding, 'replace')
if isinstance(title, str):
title = title.decode(preferred_encoding)
- obj = self.conn.execute('INSERT INTO books(title, uri, series_index, author_sort) VALUES (?, ?, ?, ?)',
+ obj = self.conn.execute('INSERT INTO books(title, uri, series_index, author_sort) VALUES (?, ?, ?, ?)',
(title, uri, series_index, aus))
id = obj.lastrowid
self.data.books_added([id], self.conn)
@@ -1207,7 +1211,7 @@ class LibraryDatabase2(LibraryDatabase):
uris = list(duplicate[3] for duplicate in duplicates)
return (paths, formats, metadata, uris), len(ids)
return None, len(ids)
-
+
def import_book(self, mi, formats, notify=True):
series_index = 1 if mi.series_index is None else mi.series_index
if not mi.title:
@@ -1219,7 +1223,7 @@ class LibraryDatabase2(LibraryDatabase):
aus = aus.decode(preferred_encoding, 'replace')
title = mi.title if isinstance(mi.title, unicode) else \
mi.title.decode(preferred_encoding, 'replace')
- obj = self.conn.execute('INSERT INTO books(title, uri, series_index, author_sort) VALUES (?, ?, ?, ?)',
+ obj = self.conn.execute('INSERT INTO books(title, uri, series_index, author_sort) VALUES (?, ?, ?, ?)',
(title, None, series_index, aus))
id = obj.lastrowid
self.data.books_added([id], self.conn)
@@ -1234,7 +1238,7 @@ class LibraryDatabase2(LibraryDatabase):
self.data.refresh_ids(self.conn, [id]) # Needed to update format list and size
if notify:
self.notify('add', [id])
-
+
def move_library_to(self, newloc, progress=None):
header = _(u'
Copying books to %s
')%newloc
books = self.conn.get('SELECT id, path, title FROM books')
@@ -1263,7 +1267,7 @@ class LibraryDatabase2(LibraryDatabase):
old_dirs.add(srcdir)
if progress is not None:
progress.setValue(i+1)
-
+
dbpath = os.path.join(newloc, os.path.basename(self.dbpath))
shutil.copyfile(self.dbpath, dbpath)
opath = self.dbpath
@@ -1279,22 +1283,22 @@ class LibraryDatabase2(LibraryDatabase):
if progress is not None:
progress.reset()
progress.hide()
-
-
+
+
def __iter__(self):
for record in self.data._data:
if record is not None:
yield record
-
+
def all_ids(self):
for i in iter(self):
yield i['id']
-
+
def get_data_as_dict(self, prefix=None, authors_as_string=False):
'''
Return all metadata stored in the database as a dict. Includes paths to
the cover and each format.
-
+
:param prefix: The prefix for all paths. By default, the prefix is the absolute path
to the library folder.
'''
@@ -1325,9 +1329,9 @@ class LibraryDatabase2(LibraryDatabase):
x['formats'].append(path%fmt.lower())
x['fmt_'+fmt.lower()] = path%fmt.lower()
x['available_formats'] = [i.upper() for i in formats.split(',')]
-
+
return data
-
+
def migrate_old(self, db, progress):
header = _(u'Migrating old database to ebook library in %s
')%self.library_path
progress.setValue(0)
@@ -1338,23 +1342,23 @@ class LibraryDatabase2(LibraryDatabase):
books = db.conn.get('SELECT id, title, sort, timestamp, uri, series_index, author_sort, isbn FROM books ORDER BY id ASC')
progress.setAutoReset(False)
progress.setRange(0, len(books))
-
+
for book in books:
self.conn.execute('INSERT INTO books(id, title, sort, timestamp, uri, series_index, author_sort, isbn) VALUES(?, ?, ?, ?, ?, ?, ?, ?);', book)
-
+
tables = '''
-authors ratings tags series books_tags_link
+authors ratings tags series books_tags_link
comments publishers
-books_authors_link conversion_options
-books_publishers_link
-books_ratings_link
+books_authors_link conversion_options
+books_publishers_link
+books_ratings_link
books_series_link feeds
'''.split()
for table in tables:
- rows = db.conn.get('SELECT * FROM %s ORDER BY id ASC'%table)
+ rows = db.conn.get('SELECT * FROM %s ORDER BY id ASC'%table)
for row in rows:
self.conn.execute('INSERT INTO %s VALUES(%s)'%(table, ','.join(repeat('?', len(row)))), row)
-
+
self.conn.commit()
self.refresh('timestamp', True)
for i, book in enumerate(books):
@@ -1379,7 +1383,7 @@ books_series_link feeds
self.vacuum()
progress.reset()
return len(books)
-
+
def export_to_dir(self, dir, indices, byauthor=False, single_dir=False,
index_is_id=False, callback=None):
if not os.path.exists(dir):
@@ -1425,7 +1429,7 @@ books_series_link feeds
opf = OPFCreator(base, mi)
opf.render(f)
f.close()
-
+
fmts = self.formats(idx, index_is_id=index_is_id)
if not fmts:
fmts = ''
@@ -1449,7 +1453,7 @@ books_series_link feeds
if not callback(count, mi.title):
return
- def export_single_format_to_dir(self, dir, indices, format,
+ def export_single_format_to_dir(self, dir, indices, format,
index_is_id=False, callback=None):
dir = os.path.abspath(dir)
if not index_is_id:
@@ -1476,7 +1480,7 @@ books_series_link feeds
f.write(data)
f.seek(0)
try:
- set_metadata(f, self.get_metadata(id, index_is_id=True, get_cover=True),
+ set_metadata(f, self.get_metadata(id, index_is_id=True, get_cover=True),
stream_type=format.lower())
except:
pass
@@ -1485,7 +1489,7 @@ books_series_link feeds
if not callback(count, title):
break
return failures
-
+
def find_books_in_directory(self, dirpath, single_book_per_directory):
dirpath = os.path.abspath(dirpath)
if single_book_per_directory:
@@ -1514,12 +1518,12 @@ books_series_link feeds
ext = ext[1:].lower()
if ext not in BOOK_EXTENSIONS:
continue
-
+
key = os.path.splitext(path)[0]
if not books.has_key(key):
books[key] = []
books[key].append(path)
-
+
for formats in books.values():
yield formats
@@ -1543,7 +1547,7 @@ books_series_link feeds
formats = self.find_books_in_directory(dirpath, True)
if not formats:
return
-
+
mi = metadata_from_formats(formats)
if mi.title is None:
return
@@ -1552,7 +1556,7 @@ books_series_link feeds
self.import_book(mi, formats)
if callable(callback):
callback(mi.title)
-
+
def recursive_import(self, root, single_book_per_directory=True, callback=None):
root = os.path.abspath(root)
duplicates = []
@@ -1565,8 +1569,8 @@ books_series_link feeds
if callable(callback):
if callback(''):
break
-
+
return duplicates
-
+
diff --git a/src/calibre/www/apps/feedjack/fjlib.py b/src/calibre/www/apps/feedjack/fjlib.py
index e13fd5e5af..2801d59a70 100644
--- a/src/calibre/www/apps/feedjack/fjlib.py
+++ b/src/calibre/www/apps/feedjack/fjlib.py
@@ -128,7 +128,7 @@ def get_extra_content(site, sfeeds_ids, ctx):
def get_posts_tags(object_list, sfeeds_obj, user_id, tag_name):
""" Adds a qtags property in every post object in a page.
- Use "qtags" instead of "tags" in templates to avoid innecesary DB hits.
+ Use "qtags" instead of "tags" in templates to avoid unnecessary DB hits.
"""
tagd = {}
user_obj = None