mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
IGN:Misc. minor fixes. Also inset <mbp:section> tags around the content of every individual file when it is merged into the MOBI stream
This commit is contained in:
parent
01b1a28392
commit
a40d47956f
@ -24,7 +24,7 @@ class File(object):
|
|||||||
path = path[:-1]
|
path = path[:-1]
|
||||||
self.path = path
|
self.path = path
|
||||||
self.name = os.path.basename(path)
|
self.name = os.path.basename(path)
|
||||||
|
|
||||||
|
|
||||||
class PRS505(Device):
|
class PRS505(Device):
|
||||||
VENDOR_ID = 0x054c #: SONY Vendor Id
|
VENDOR_ID = 0x054c #: SONY Vendor Id
|
||||||
@ -33,17 +33,17 @@ class PRS505(Device):
|
|||||||
PRODUCT_NAME = 'PRS-505'
|
PRODUCT_NAME = 'PRS-505'
|
||||||
VENDOR_NAME = 'SONY'
|
VENDOR_NAME = 'SONY'
|
||||||
FORMATS = ['epub', 'lrf', 'lrx', 'rtf', 'pdf', 'txt']
|
FORMATS = ['epub', 'lrf', 'lrx', 'rtf', 'pdf', 'txt']
|
||||||
|
|
||||||
MEDIA_XML = 'database/cache/media.xml'
|
MEDIA_XML = 'database/cache/media.xml'
|
||||||
CACHE_XML = 'Sony Reader/database/cache.xml'
|
CACHE_XML = 'Sony Reader/database/cache.xml'
|
||||||
|
|
||||||
MAIN_MEMORY_VOLUME_LABEL = 'Sony Reader Main Memory'
|
MAIN_MEMORY_VOLUME_LABEL = 'Sony Reader Main Memory'
|
||||||
STORAGE_CARD_VOLUME_LABEL = 'Sony Reader Storage Card'
|
STORAGE_CARD_VOLUME_LABEL = 'Sony Reader Storage Card'
|
||||||
|
|
||||||
OSX_NAME = 'Sony PRS-505'
|
OSX_NAME = 'Sony PRS-505'
|
||||||
|
|
||||||
CARD_PATH_PREFIX = __appname__
|
CARD_PATH_PREFIX = __appname__
|
||||||
|
|
||||||
FDI_TEMPLATE = \
|
FDI_TEMPLATE = \
|
||||||
'''
|
'''
|
||||||
<device>
|
<device>
|
||||||
@ -75,11 +75,11 @@ class PRS505(Device):
|
|||||||
</match>
|
</match>
|
||||||
</device>
|
</device>
|
||||||
'''.replace('%(app)s', __appname__)
|
'''.replace('%(app)s', __appname__)
|
||||||
|
|
||||||
|
|
||||||
def __init__(self, log_packets=False):
|
def __init__(self, log_packets=False):
|
||||||
self._main_prefix = self._card_prefix = None
|
self._main_prefix = self._card_prefix = None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_fdi(cls):
|
def get_fdi(cls):
|
||||||
return cls.FDI_TEMPLATE%dict(
|
return cls.FDI_TEMPLATE%dict(
|
||||||
@ -90,7 +90,7 @@ class PRS505(Device):
|
|||||||
main_memory=cls.MAIN_MEMORY_VOLUME_LABEL,
|
main_memory=cls.MAIN_MEMORY_VOLUME_LABEL,
|
||||||
storage_card=cls.STORAGE_CARD_VOLUME_LABEL,
|
storage_card=cls.STORAGE_CARD_VOLUME_LABEL,
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def is_device(cls, device_id):
|
def is_device(cls, device_id):
|
||||||
device_id = device_id.upper()
|
device_id = device_id.upper()
|
||||||
@ -104,7 +104,7 @@ class PRS505(Device):
|
|||||||
'PID_'+pid in device_id:
|
'PID_'+pid in device_id:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_osx_mountpoints(cls, raw=None):
|
def get_osx_mountpoints(cls, raw=None):
|
||||||
if raw is None:
|
if raw is None:
|
||||||
@ -112,7 +112,7 @@ class PRS505(Device):
|
|||||||
if not os.access(ioreg, os.X_OK):
|
if not os.access(ioreg, os.X_OK):
|
||||||
ioreg = 'ioreg'
|
ioreg = 'ioreg'
|
||||||
raw = subprocess.Popen((ioreg+' -w 0 -S -c IOMedia').split(),
|
raw = subprocess.Popen((ioreg+' -w 0 -S -c IOMedia').split(),
|
||||||
stdout=subprocess.PIPE).stdout.read()
|
stdout=subprocess.PIPE).communicate()[0]
|
||||||
lines = raw.splitlines()
|
lines = raw.splitlines()
|
||||||
names = {}
|
names = {}
|
||||||
for i, line in enumerate(lines):
|
for i, line in enumerate(lines):
|
||||||
@ -130,9 +130,9 @@ class PRS505(Device):
|
|||||||
break
|
break
|
||||||
return names
|
return names
|
||||||
|
|
||||||
|
|
||||||
def open_osx(self):
|
def open_osx(self):
|
||||||
mount = subprocess.Popen('mount', shell=True,
|
mount = subprocess.Popen('mount', shell=True,
|
||||||
stdout=subprocess.PIPE).stdout.read()
|
stdout=subprocess.PIPE).stdout.read()
|
||||||
names = self.get_osx_mountpoints()
|
names = self.get_osx_mountpoints()
|
||||||
dev_pat = r'/dev/%s(\w*)\s+on\s+([^\(]+)\s+'
|
dev_pat = r'/dev/%s(\w*)\s+on\s+([^\(]+)\s+'
|
||||||
@ -144,12 +144,12 @@ class PRS505(Device):
|
|||||||
if card_pat is not None:
|
if card_pat is not None:
|
||||||
card_pat = dev_pat%card_pat
|
card_pat = dev_pat%card_pat
|
||||||
self._card_prefix = re.search(card_pat, mount).group(2) + os.sep
|
self._card_prefix = re.search(card_pat, mount).group(2) + os.sep
|
||||||
|
|
||||||
|
|
||||||
def open_windows(self):
|
def open_windows(self):
|
||||||
time.sleep(6)
|
time.sleep(6)
|
||||||
drives = []
|
drives = []
|
||||||
wmi = __import__('wmi', globals(), locals(), [], -1)
|
wmi = __import__('wmi', globals(), locals(), [], -1)
|
||||||
c = wmi.WMI()
|
c = wmi.WMI()
|
||||||
for drive in c.Win32_DiskDrive():
|
for drive in c.Win32_DiskDrive():
|
||||||
if self.__class__.is_device(str(drive.PNPDeviceID)):
|
if self.__class__.is_device(str(drive.PNPDeviceID)):
|
||||||
@ -162,22 +162,22 @@ class PRS505(Device):
|
|||||||
drives.append((drive.Index, prefix))
|
drives.append((drive.Index, prefix))
|
||||||
except IndexError:
|
except IndexError:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
||||||
if not drives:
|
if not drives:
|
||||||
raise DeviceError(_('Unable to detect the %s disk drive. Try rebooting.')%self.__class__.__name__)
|
raise DeviceError(_('Unable to detect the %s disk drive. Try rebooting.')%self.__class__.__name__)
|
||||||
|
|
||||||
drives.sort(cmp=lambda a, b: cmp(a[0], b[0]))
|
drives.sort(cmp=lambda a, b: cmp(a[0], b[0]))
|
||||||
self._main_prefix = drives[0][1]
|
self._main_prefix = drives[0][1]
|
||||||
if len(drives) > 1:
|
if len(drives) > 1:
|
||||||
self._card_prefix = drives[1][1]
|
self._card_prefix = drives[1][1]
|
||||||
|
|
||||||
|
|
||||||
def open_linux(self):
|
def open_linux(self):
|
||||||
import dbus
|
import dbus
|
||||||
bus = dbus.SystemBus()
|
bus = dbus.SystemBus()
|
||||||
hm = dbus.Interface(bus.get_object("org.freedesktop.Hal", "/org/freedesktop/Hal/Manager"), "org.freedesktop.Hal.Manager")
|
hm = dbus.Interface(bus.get_object("org.freedesktop.Hal", "/org/freedesktop/Hal/Manager"), "org.freedesktop.Hal.Manager")
|
||||||
|
|
||||||
def conditional_mount(dev, main_mem=True):
|
def conditional_mount(dev, main_mem=True):
|
||||||
mmo = bus.get_object("org.freedesktop.Hal", dev)
|
mmo = bus.get_object("org.freedesktop.Hal", dev)
|
||||||
label = mmo.GetPropertyString('volume.label', dbus_interface='org.freedesktop.Hal.Device')
|
label = mmo.GetPropertyString('volume.label', dbus_interface='org.freedesktop.Hal.Device')
|
||||||
@ -186,11 +186,11 @@ class PRS505(Device):
|
|||||||
fstype = mmo.GetPropertyString('volume.fstype', dbus_interface='org.freedesktop.Hal.Device')
|
fstype = mmo.GetPropertyString('volume.fstype', dbus_interface='org.freedesktop.Hal.Device')
|
||||||
if is_mounted:
|
if is_mounted:
|
||||||
return str(mount_point)
|
return str(mount_point)
|
||||||
mmo.Mount(label, fstype, ['umask=077', 'uid='+str(os.getuid()), 'sync'],
|
mmo.Mount(label, fstype, ['umask=077', 'uid='+str(os.getuid()), 'sync'],
|
||||||
dbus_interface='org.freedesktop.Hal.Device.Volume')
|
dbus_interface='org.freedesktop.Hal.Device.Volume')
|
||||||
return os.path.normpath('/media/'+label)+'/'
|
return os.path.normpath('/media/'+label)+'/'
|
||||||
|
|
||||||
|
|
||||||
mm = hm.FindDeviceStringMatch(__appname__+'.mainvolume', self.__class__.__name__)
|
mm = hm.FindDeviceStringMatch(__appname__+'.mainvolume', self.__class__.__name__)
|
||||||
if not mm:
|
if not mm:
|
||||||
raise DeviceError(_('Unable to detect the %s disk drive. Try rebooting.')%(self.__class__.__name__,))
|
raise DeviceError(_('Unable to detect the %s disk drive. Try rebooting.')%(self.__class__.__name__,))
|
||||||
@ -201,21 +201,21 @@ class PRS505(Device):
|
|||||||
break
|
break
|
||||||
except dbus.exceptions.DBusException:
|
except dbus.exceptions.DBusException:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
||||||
if not self._main_prefix:
|
if not self._main_prefix:
|
||||||
raise DeviceError('Could not open device for reading. Try a reboot.')
|
raise DeviceError('Could not open device for reading. Try a reboot.')
|
||||||
|
|
||||||
self._card_prefix = None
|
self._card_prefix = None
|
||||||
cards = hm.FindDeviceStringMatch(__appname__+'.cardvolume', self.__class__.__name__)
|
cards = hm.FindDeviceStringMatch(__appname__+'.cardvolume', self.__class__.__name__)
|
||||||
keys = []
|
keys = []
|
||||||
for card in cards:
|
for card in cards:
|
||||||
keys.append(int('UC_SD' in bus.get_object("org.freedesktop.Hal", card).GetPropertyString('info.parent', dbus_interface='org.freedesktop.Hal.Device')))
|
keys.append(int('UC_SD' in bus.get_object("org.freedesktop.Hal", card).GetPropertyString('info.parent', dbus_interface='org.freedesktop.Hal.Device')))
|
||||||
|
|
||||||
cards = zip(cards, keys)
|
cards = zip(cards, keys)
|
||||||
cards.sort(cmp=lambda x, y: cmp(x[1], y[1]))
|
cards.sort(cmp=lambda x, y: cmp(x[1], y[1]))
|
||||||
cards = [i[0] for i in cards]
|
cards = [i[0] for i in cards]
|
||||||
|
|
||||||
for dev in cards:
|
for dev in cards:
|
||||||
try:
|
try:
|
||||||
self._card_prefix = conditional_mount(dev, False)+os.sep
|
self._card_prefix = conditional_mount(dev, False)+os.sep
|
||||||
@ -224,8 +224,8 @@ class PRS505(Device):
|
|||||||
import traceback
|
import traceback
|
||||||
print traceback
|
print traceback
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
||||||
def open(self):
|
def open(self):
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
self._main_prefix = self._card_prefix = None
|
self._main_prefix = self._card_prefix = None
|
||||||
@ -262,16 +262,16 @@ class PRS505(Device):
|
|||||||
self._card_prefix = None
|
self._card_prefix = None
|
||||||
import traceback
|
import traceback
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
|
|
||||||
def set_progress_reporter(self, pr):
|
def set_progress_reporter(self, pr):
|
||||||
self.report_progress = pr
|
self.report_progress = pr
|
||||||
|
|
||||||
def get_device_information(self, end_session=True):
|
def get_device_information(self, end_session=True):
|
||||||
return (self.__class__.__name__, '', '', '')
|
return (self.__class__.__name__, '', '', '')
|
||||||
|
|
||||||
def card_prefix(self, end_session=True):
|
def card_prefix(self, end_session=True):
|
||||||
return self._card_prefix
|
return self._card_prefix
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _windows_space(cls, prefix):
|
def _windows_space(cls, prefix):
|
||||||
if prefix is None:
|
if prefix is None:
|
||||||
@ -288,7 +288,7 @@ class PRS505(Device):
|
|||||||
else: raise
|
else: raise
|
||||||
mult = sectors_per_cluster * bytes_per_sector
|
mult = sectors_per_cluster * bytes_per_sector
|
||||||
return total_clusters * mult, free_clusters * mult
|
return total_clusters * mult, free_clusters * mult
|
||||||
|
|
||||||
def total_space(self, end_session=True):
|
def total_space(self, end_session=True):
|
||||||
msz = csz = 0
|
msz = csz = 0
|
||||||
if not iswindows:
|
if not iswindows:
|
||||||
@ -301,9 +301,9 @@ class PRS505(Device):
|
|||||||
else:
|
else:
|
||||||
msz = self._windows_space(self._main_prefix)[0]
|
msz = self._windows_space(self._main_prefix)[0]
|
||||||
csz = self._windows_space(self._card_prefix)[0]
|
csz = self._windows_space(self._card_prefix)[0]
|
||||||
|
|
||||||
return (msz, 0, csz)
|
return (msz, 0, csz)
|
||||||
|
|
||||||
def free_space(self, end_session=True):
|
def free_space(self, end_session=True):
|
||||||
msz = csz = 0
|
msz = csz = 0
|
||||||
if not iswindows:
|
if not iswindows:
|
||||||
@ -316,9 +316,9 @@ class PRS505(Device):
|
|||||||
else:
|
else:
|
||||||
msz = self._windows_space(self._main_prefix)[1]
|
msz = self._windows_space(self._main_prefix)[1]
|
||||||
csz = self._windows_space(self._card_prefix)[1]
|
csz = self._windows_space(self._card_prefix)[1]
|
||||||
|
|
||||||
return (msz, 0, csz)
|
return (msz, 0, csz)
|
||||||
|
|
||||||
def books(self, oncard=False, end_session=True):
|
def books(self, oncard=False, end_session=True):
|
||||||
if oncard and self._card_prefix is None:
|
if oncard and self._card_prefix is None:
|
||||||
return []
|
return []
|
||||||
@ -331,7 +331,7 @@ class PRS505(Device):
|
|||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
os.unlink(path)
|
os.unlink(path)
|
||||||
return bl
|
return bl
|
||||||
|
|
||||||
def munge_path(self, path):
|
def munge_path(self, path):
|
||||||
if path.startswith('/') and not (path.startswith(self._main_prefix) or \
|
if path.startswith('/') and not (path.startswith(self._main_prefix) or \
|
||||||
(self._card_prefix and path.startswith(self._card_prefix))):
|
(self._card_prefix and path.startswith(self._card_prefix))):
|
||||||
@ -339,12 +339,12 @@ class PRS505(Device):
|
|||||||
elif path.startswith('card:'):
|
elif path.startswith('card:'):
|
||||||
path = path.replace('card:', self._card_prefix[:-1])
|
path = path.replace('card:', self._card_prefix[:-1])
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def mkdir(self, path, end_session=True):
|
def mkdir(self, path, end_session=True):
|
||||||
""" Make directory """
|
""" Make directory """
|
||||||
path = self.munge_path(path)
|
path = self.munge_path(path)
|
||||||
os.mkdir(path)
|
os.mkdir(path)
|
||||||
|
|
||||||
def list(self, path, recurse=False, end_session=True, munge=True):
|
def list(self, path, recurse=False, end_session=True, munge=True):
|
||||||
if munge:
|
if munge:
|
||||||
path = self.munge_path(path)
|
path = self.munge_path(path)
|
||||||
@ -356,12 +356,12 @@ class PRS505(Device):
|
|||||||
if recurse and _file.is_dir:
|
if recurse and _file.is_dir:
|
||||||
dirs[len(dirs):] = self.list(_file.path, recurse=True, munge=False)
|
dirs[len(dirs):] = self.list(_file.path, recurse=True, munge=False)
|
||||||
return dirs
|
return dirs
|
||||||
|
|
||||||
def get_file(self, path, outfile, end_session=True):
|
def get_file(self, path, outfile, end_session=True):
|
||||||
path = self.munge_path(path)
|
path = self.munge_path(path)
|
||||||
src = open(path, 'rb')
|
src = open(path, 'rb')
|
||||||
shutil.copyfileobj(src, outfile, 10*1024*1024)
|
shutil.copyfileobj(src, outfile, 10*1024*1024)
|
||||||
|
|
||||||
def put_file(self, infile, path, replace_file=False, end_session=True):
|
def put_file(self, infile, path, replace_file=False, end_session=True):
|
||||||
path = self.munge_path(path)
|
path = self.munge_path(path)
|
||||||
if os.path.isdir(path):
|
if os.path.isdir(path):
|
||||||
@ -372,25 +372,25 @@ class PRS505(Device):
|
|||||||
shutil.copyfileobj(infile, dest, 10*1024*1024)
|
shutil.copyfileobj(infile, dest, 10*1024*1024)
|
||||||
dest.flush()
|
dest.flush()
|
||||||
dest.close()
|
dest.close()
|
||||||
|
|
||||||
def rm(self, path, end_session=True):
|
def rm(self, path, end_session=True):
|
||||||
path = self.munge_path(path)
|
path = self.munge_path(path)
|
||||||
os.unlink(path)
|
os.unlink(path)
|
||||||
|
|
||||||
def touch(self, path, end_session=True):
|
def touch(self, path, end_session=True):
|
||||||
path = self.munge_path(path)
|
path = self.munge_path(path)
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
open(path, 'w').close()
|
open(path, 'w').close()
|
||||||
if not os.path.isdir(path):
|
if not os.path.isdir(path):
|
||||||
os.utime(path, None)
|
os.utime(path, None)
|
||||||
|
|
||||||
def upload_books(self, files, names, on_card=False, end_session=True,
|
def upload_books(self, files, names, on_card=False, end_session=True,
|
||||||
metadata=None):
|
metadata=None):
|
||||||
if on_card and not self._card_prefix:
|
if on_card and not self._card_prefix:
|
||||||
raise ValueError(_('The reader has no storage card connected.'))
|
raise ValueError(_('The reader has no storage card connected.'))
|
||||||
path = os.path.join(self._card_prefix, self.CARD_PATH_PREFIX) if on_card \
|
path = os.path.join(self._card_prefix, self.CARD_PATH_PREFIX) if on_card \
|
||||||
else os.path.join(self._main_prefix, 'database', 'media', 'books')
|
else os.path.join(self._main_prefix, 'database', 'media', 'books')
|
||||||
|
|
||||||
def get_size(obj):
|
def get_size(obj):
|
||||||
if hasattr(obj, 'seek'):
|
if hasattr(obj, 'seek'):
|
||||||
obj.seek(0, 2)
|
obj.seek(0, 2)
|
||||||
@ -398,27 +398,27 @@ class PRS505(Device):
|
|||||||
obj.seek(0)
|
obj.seek(0)
|
||||||
return size
|
return size
|
||||||
return os.path.getsize(obj)
|
return os.path.getsize(obj)
|
||||||
|
|
||||||
sizes = map(get_size, files)
|
sizes = map(get_size, files)
|
||||||
size = sum(sizes)
|
size = sum(sizes)
|
||||||
space = self.free_space()
|
space = self.free_space()
|
||||||
mspace = space[0]
|
mspace = space[0]
|
||||||
cspace = space[2]
|
cspace = space[2]
|
||||||
if on_card and size > cspace - 1024*1024:
|
if on_card and size > cspace - 1024*1024:
|
||||||
raise FreeSpaceError("There is insufficient free space "+\
|
raise FreeSpaceError("There is insufficient free space "+\
|
||||||
"on the storage card")
|
"on the storage card")
|
||||||
if not on_card and size > mspace - 2*1024*1024:
|
if not on_card and size > mspace - 2*1024*1024:
|
||||||
raise FreeSpaceError("There is insufficient free space " +\
|
raise FreeSpaceError("There is insufficient free space " +\
|
||||||
"in main memory")
|
"in main memory")
|
||||||
|
|
||||||
paths, ctimes = [], []
|
paths, ctimes = [], []
|
||||||
|
|
||||||
names = iter(names)
|
names = iter(names)
|
||||||
for infile in files:
|
for infile in files:
|
||||||
close = False
|
close = False
|
||||||
if not hasattr(infile, 'read'):
|
if not hasattr(infile, 'read'):
|
||||||
infile, close = open(infile, 'rb'), True
|
infile, close = open(infile, 'rb'), True
|
||||||
infile.seek(0)
|
infile.seek(0)
|
||||||
name = names.next()
|
name = names.next()
|
||||||
paths.append(os.path.join(path, name))
|
paths.append(os.path.join(path, name))
|
||||||
if not os.path.exists(os.path.dirname(paths[-1])):
|
if not os.path.exists(os.path.dirname(paths[-1])):
|
||||||
@ -428,7 +428,7 @@ class PRS505(Device):
|
|||||||
infile.close()
|
infile.close()
|
||||||
ctimes.append(os.path.getctime(paths[-1]))
|
ctimes.append(os.path.getctime(paths[-1]))
|
||||||
return zip(paths, sizes, ctimes, cycle([on_card]))
|
return zip(paths, sizes, ctimes, cycle([on_card]))
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def add_books_to_metadata(cls, locations, metadata, booklists):
|
def add_books_to_metadata(cls, locations, metadata, booklists):
|
||||||
metadata = iter(metadata)
|
metadata = iter(metadata)
|
||||||
@ -441,12 +441,12 @@ class PRS505(Device):
|
|||||||
name = name.replace('//', '/')
|
name = name.replace('//', '/')
|
||||||
booklists[on_card].add_book(info, name, *location[1:-1])
|
booklists[on_card].add_book(info, name, *location[1:-1])
|
||||||
fix_ids(*booklists)
|
fix_ids(*booklists)
|
||||||
|
|
||||||
def delete_books(self, paths, end_session=True):
|
def delete_books(self, paths, end_session=True):
|
||||||
for path in paths:
|
for path in paths:
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
os.unlink(path)
|
os.unlink(path)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def remove_books_from_metadata(cls, paths, booklists):
|
def remove_books_from_metadata(cls, paths, booklists):
|
||||||
for path in paths:
|
for path in paths:
|
||||||
@ -454,7 +454,7 @@ class PRS505(Device):
|
|||||||
if hasattr(bl, 'remove_book'):
|
if hasattr(bl, 'remove_book'):
|
||||||
bl.remove_book(path)
|
bl.remove_book(path)
|
||||||
fix_ids(*booklists)
|
fix_ids(*booklists)
|
||||||
|
|
||||||
def sync_booklists(self, booklists, end_session=True):
|
def sync_booklists(self, booklists, end_session=True):
|
||||||
fix_ids(*booklists)
|
fix_ids(*booklists)
|
||||||
if not os.path.exists(self._main_prefix):
|
if not os.path.exists(self._main_prefix):
|
||||||
@ -468,9 +468,9 @@ class PRS505(Device):
|
|||||||
f = open(self._card_prefix + self.__class__.CACHE_XML, 'wb')
|
f = open(self._card_prefix + self.__class__.CACHE_XML, 'wb')
|
||||||
booklists[1].write(f)
|
booklists[1].write(f)
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def main(args=sys.argv):
|
def main(args=sys.argv):
|
||||||
return 0
|
return 0
|
||||||
|
@ -190,7 +190,7 @@ class Device(_Device):
|
|||||||
|
|
||||||
self._main_prefix = drives.get('main')
|
self._main_prefix = drives.get('main')
|
||||||
self._card_prefix = drives.get('card')
|
self._card_prefix = drives.get('card')
|
||||||
|
|
||||||
if not self._main_prefix:
|
if not self._main_prefix:
|
||||||
raise DeviceError(_('Unable to detect the %s disk drive. Try rebooting.') % self.__class__.__name__)
|
raise DeviceError(_('Unable to detect the %s disk drive. Try rebooting.') % self.__class__.__name__)
|
||||||
|
|
||||||
@ -200,7 +200,7 @@ class Device(_Device):
|
|||||||
if not os.access(ioreg, os.X_OK):
|
if not os.access(ioreg, os.X_OK):
|
||||||
ioreg = 'ioreg'
|
ioreg = 'ioreg'
|
||||||
raw = subprocess.Popen((ioreg+' -w 0 -S -c IOMedia').split(),
|
raw = subprocess.Popen((ioreg+' -w 0 -S -c IOMedia').split(),
|
||||||
stdout=subprocess.PIPE).stdout.read()
|
stdout=subprocess.PIPE).communicate()[0]
|
||||||
lines = raw.splitlines()
|
lines = raw.splitlines()
|
||||||
names = {}
|
names = {}
|
||||||
|
|
||||||
|
@ -79,7 +79,7 @@ class FormatState(object):
|
|||||||
class MobiMLizer(object):
|
class MobiMLizer(object):
|
||||||
def __init__(self, ignore_tables=False):
|
def __init__(self, ignore_tables=False):
|
||||||
self.ignore_tables = ignore_tables
|
self.ignore_tables = ignore_tables
|
||||||
|
|
||||||
def transform(self, oeb, context):
|
def transform(self, oeb, context):
|
||||||
oeb.logger.info('Converting XHTML to Mobipocket markup...')
|
oeb.logger.info('Converting XHTML to Mobipocket markup...')
|
||||||
self.oeb = oeb
|
self.oeb = oeb
|
||||||
@ -98,10 +98,10 @@ class MobiMLizer(object):
|
|||||||
del oeb.guide['cover']
|
del oeb.guide['cover']
|
||||||
item = oeb.manifest.hrefs[href]
|
item = oeb.manifest.hrefs[href]
|
||||||
if item.spine_position is not None:
|
if item.spine_position is not None:
|
||||||
oeb.spine.remove(item)
|
oeb.spine.remove(item)
|
||||||
if item.media_type in OEB_DOCS:
|
if item.media_type in OEB_DOCS:
|
||||||
self.oeb.manifest.remove(item)
|
self.oeb.manifest.remove(item)
|
||||||
|
|
||||||
def mobimlize_spine(self):
|
def mobimlize_spine(self):
|
||||||
for item in self.oeb.spine:
|
for item in self.oeb.spine:
|
||||||
stylizer = Stylizer(item.data, item.href, self.oeb, self.profile)
|
stylizer = Stylizer(item.data, item.href, self.oeb, self.profile)
|
||||||
@ -134,7 +134,7 @@ class MobiMLizer(object):
|
|||||||
if line:
|
if line:
|
||||||
result.append(line)
|
result.append(line)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def mobimlize_content(self, tag, text, bstate, istates):
|
def mobimlize_content(self, tag, text, bstate, istates):
|
||||||
if text or tag != 'br':
|
if text or tag != 'br':
|
||||||
bstate.content = True
|
bstate.content = True
|
||||||
@ -239,7 +239,7 @@ class MobiMLizer(object):
|
|||||||
last.tail = (last.tail or '') + item
|
last.tail = (last.tail or '') + item
|
||||||
else:
|
else:
|
||||||
inline.append(item)
|
inline.append(item)
|
||||||
|
|
||||||
def mobimlize_elem(self, elem, stylizer, bstate, istates):
|
def mobimlize_elem(self, elem, stylizer, bstate, istates):
|
||||||
if not isinstance(elem.tag, basestring) \
|
if not isinstance(elem.tag, basestring) \
|
||||||
or namespace(elem.tag) != XHTML_NS:
|
or namespace(elem.tag) != XHTML_NS:
|
||||||
|
@ -211,12 +211,13 @@ class Serializer(object):
|
|||||||
|
|
||||||
def serialize_item(self, item):
|
def serialize_item(self, item):
|
||||||
buffer = self.buffer
|
buffer = self.buffer
|
||||||
|
buffer.write('<mbp:section>')
|
||||||
if not item.linear:
|
if not item.linear:
|
||||||
self.breaks.append(buffer.tell() - 1)
|
self.breaks.append(buffer.tell() - 1)
|
||||||
self.id_offsets[item.href] = buffer.tell()
|
self.id_offsets[item.href] = buffer.tell()
|
||||||
for elem in item.data.find(XHTML('body')):
|
for elem in item.data.find(XHTML('body')):
|
||||||
self.serialize_elem(elem, item)
|
self.serialize_elem(elem, item)
|
||||||
buffer.write('<mbp:pagebreak/>')
|
buffer.write('</mbp:section></mbp:pagebreak>')
|
||||||
|
|
||||||
def serialize_elem(self, elem, item, nsrmap=NSRMAP):
|
def serialize_elem(self, elem, item, nsrmap=NSRMAP):
|
||||||
buffer = self.buffer
|
buffer = self.buffer
|
||||||
|
@ -93,7 +93,7 @@ class DateDelegate(QStyledItemDelegate):
|
|||||||
|
|
||||||
def createEditor(self, parent, option, index):
|
def createEditor(self, parent, option, index):
|
||||||
qde = QStyledItemDelegate.createEditor(self, parent, option, index)
|
qde = QStyledItemDelegate.createEditor(self, parent, option, index)
|
||||||
qde.setDisplayFormat('MM/dd/yyyy')
|
qde.setDisplayFormat(unicode(qde.displayFormat()).replace('yy', 'yyyy'))
|
||||||
qde.setMinimumDate(QDate(101,1,1))
|
qde.setMinimumDate(QDate(101,1,1))
|
||||||
qde.setCalendarPopup(True)
|
qde.setCalendarPopup(True)
|
||||||
return qde
|
return qde
|
||||||
@ -635,7 +635,8 @@ class BooksView(TableView):
|
|||||||
|
|
||||||
def columns_sorted(self, rating_col, timestamp_col):
|
def columns_sorted(self, rating_col, timestamp_col):
|
||||||
for i in range(self.model().columnCount(None)):
|
for i in range(self.model().columnCount(None)):
|
||||||
if self.itemDelegateForColumn(i) == self.rating_delegate:
|
if self.itemDelegateForColumn(i) in (self.rating_delegate,
|
||||||
|
self.timestamp_delegate):
|
||||||
self.setItemDelegateForColumn(i, self.itemDelegate())
|
self.setItemDelegateForColumn(i, self.itemDelegate())
|
||||||
if rating_col > -1:
|
if rating_col > -1:
|
||||||
self.setItemDelegateForColumn(rating_col, self.rating_delegate)
|
self.setItemDelegateForColumn(rating_col, self.rating_delegate)
|
||||||
@ -706,7 +707,7 @@ class BooksView(TableView):
|
|||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
self._model.close()
|
self._model.close()
|
||||||
|
|
||||||
def set_editable(self, editable):
|
def set_editable(self, editable):
|
||||||
self._model.set_editable(editable)
|
self._model.set_editable(editable)
|
||||||
|
|
||||||
@ -999,10 +1000,10 @@ class DeviceBooksModel(BooksModel):
|
|||||||
self.sort(col, self.sorted_on[1])
|
self.sort(col, self.sorted_on[1])
|
||||||
done = True
|
done = True
|
||||||
return done
|
return done
|
||||||
|
|
||||||
def set_editable(self, editable):
|
def set_editable(self, editable):
|
||||||
self.editable = editable
|
self.editable = editable
|
||||||
|
|
||||||
|
|
||||||
class SearchBox(QLineEdit):
|
class SearchBox(QLineEdit):
|
||||||
|
|
||||||
|
@ -33,14 +33,14 @@ from calibre.ebooks import BOOK_EXTENSIONS
|
|||||||
|
|
||||||
copyfile = os.link if hasattr(os, 'link') else shutil.copyfile
|
copyfile = os.link if hasattr(os, 'link') else shutil.copyfile
|
||||||
|
|
||||||
FIELD_MAP = {'id':0, 'title':1, 'authors':2, 'publisher':3, 'rating':4, 'timestamp':5,
|
FIELD_MAP = {'id':0, 'title':1, 'authors':2, 'publisher':3, 'rating':4, 'timestamp':5,
|
||||||
'size':6, 'tags':7, 'comments':8, 'series':9, 'series_index':10,
|
'size':6, 'tags':7, 'comments':8, 'series':9, 'series_index':10,
|
||||||
'sort':11, 'author_sort':12, 'formats':13, 'isbn':14, 'path':15}
|
'sort':11, 'author_sort':12, 'formats':13, 'isbn':14, 'path':15}
|
||||||
INDEX_MAP = dict(zip(FIELD_MAP.values(), FIELD_MAP.keys()))
|
INDEX_MAP = dict(zip(FIELD_MAP.values(), FIELD_MAP.keys()))
|
||||||
|
|
||||||
|
|
||||||
class CoverCache(QThread):
|
class CoverCache(QThread):
|
||||||
|
|
||||||
def __init__(self, library_path, parent=None):
|
def __init__(self, library_path, parent=None):
|
||||||
QThread.__init__(self, parent)
|
QThread.__init__(self, parent)
|
||||||
self.library_path = library_path
|
self.library_path = library_path
|
||||||
@ -52,7 +52,7 @@ class CoverCache(QThread):
|
|||||||
self.cache_lock = QReadWriteLock()
|
self.cache_lock = QReadWriteLock()
|
||||||
self.id_map_stale = True
|
self.id_map_stale = True
|
||||||
self.keep_running = True
|
self.keep_running = True
|
||||||
|
|
||||||
def build_id_map(self):
|
def build_id_map(self):
|
||||||
self.id_map_lock.lockForWrite()
|
self.id_map_lock.lockForWrite()
|
||||||
self.id_map = {}
|
self.id_map = {}
|
||||||
@ -65,8 +65,8 @@ class CoverCache(QThread):
|
|||||||
continue
|
continue
|
||||||
self.id_map_lock.unlock()
|
self.id_map_lock.unlock()
|
||||||
self.id_map_stale = False
|
self.id_map_stale = False
|
||||||
|
|
||||||
|
|
||||||
def set_cache(self, ids):
|
def set_cache(self, ids):
|
||||||
self.cache_lock.lockForWrite()
|
self.cache_lock.lockForWrite()
|
||||||
already_loaded = set([])
|
already_loaded = set([])
|
||||||
@ -80,8 +80,8 @@ class CoverCache(QThread):
|
|||||||
self.load_queue_lock.lockForWrite()
|
self.load_queue_lock.lockForWrite()
|
||||||
self.load_queue = collections.deque(ids)
|
self.load_queue = collections.deque(ids)
|
||||||
self.load_queue_lock.unlock()
|
self.load_queue_lock.unlock()
|
||||||
|
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
while self.keep_running:
|
while self.keep_running:
|
||||||
if self.id_map is None or self.id_map_stale:
|
if self.id_map is None or self.id_map_stale:
|
||||||
@ -94,7 +94,7 @@ class CoverCache(QThread):
|
|||||||
break
|
break
|
||||||
finally:
|
finally:
|
||||||
self.load_queue_lock.unlock()
|
self.load_queue_lock.unlock()
|
||||||
|
|
||||||
self.cache_lock.lockForRead()
|
self.cache_lock.lockForRead()
|
||||||
need = True
|
need = True
|
||||||
if id in self.cache.keys():
|
if id in self.cache.keys():
|
||||||
@ -121,19 +121,19 @@ class CoverCache(QThread):
|
|||||||
self.cache_lock.lockForWrite()
|
self.cache_lock.lockForWrite()
|
||||||
self.cache[id] = img
|
self.cache[id] = img
|
||||||
self.cache_lock.unlock()
|
self.cache_lock.unlock()
|
||||||
|
|
||||||
self.sleep(1)
|
self.sleep(1)
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
self.keep_running = False
|
self.keep_running = False
|
||||||
|
|
||||||
def cover(self, id):
|
def cover(self, id):
|
||||||
val = None
|
val = None
|
||||||
if self.cache_lock.tryLockForRead(50):
|
if self.cache_lock.tryLockForRead(50):
|
||||||
val = self.cache.get(id, None)
|
val = self.cache.get(id, None)
|
||||||
self.cache_lock.unlock()
|
self.cache_lock.unlock()
|
||||||
return val
|
return val
|
||||||
|
|
||||||
def clear_cache(self):
|
def clear_cache(self):
|
||||||
self.cache_lock.lockForWrite()
|
self.cache_lock.lockForWrite()
|
||||||
self.cache = {}
|
self.cache = {}
|
||||||
@ -148,24 +148,24 @@ class CoverCache(QThread):
|
|||||||
for id in ids:
|
for id in ids:
|
||||||
self.load_queue.appendleft(id)
|
self.load_queue.appendleft(id)
|
||||||
self.load_queue_lock.unlock()
|
self.load_queue_lock.unlock()
|
||||||
|
|
||||||
class ResultCache(SearchQueryParser):
|
class ResultCache(SearchQueryParser):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
Stores sorted and filtered metadata in memory.
|
Stores sorted and filtered metadata in memory.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self._map = self._map_filtered = self._data = []
|
self._map = self._map_filtered = self._data = []
|
||||||
self.first_sort = True
|
self.first_sort = True
|
||||||
SearchQueryParser.__init__(self)
|
SearchQueryParser.__init__(self)
|
||||||
|
|
||||||
def __getitem__(self, row):
|
def __getitem__(self, row):
|
||||||
return self._data[self._map_filtered[row]]
|
return self._data[self._map_filtered[row]]
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
return len(self._map_filtered)
|
return len(self._map_filtered)
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
for id in self._map_filtered:
|
for id in self._map_filtered:
|
||||||
yield self._data[id]
|
yield self._data[id]
|
||||||
@ -194,45 +194,49 @@ class ResultCache(SearchQueryParser):
|
|||||||
matches.add(item[0])
|
matches.add(item[0])
|
||||||
break
|
break
|
||||||
return matches
|
return matches
|
||||||
|
|
||||||
def remove(self, id):
|
def remove(self, id):
|
||||||
self._data[id] = None
|
self._data[id] = None
|
||||||
if id in self._map:
|
if id in self._map:
|
||||||
self._map.remove(id)
|
self._map.remove(id)
|
||||||
if id in self._map_filtered:
|
if id in self._map_filtered:
|
||||||
self._map_filtered.remove(id)
|
self._map_filtered.remove(id)
|
||||||
|
|
||||||
def set(self, row, col, val, row_is_id=False):
|
def set(self, row, col, val, row_is_id=False):
|
||||||
id = row if row_is_id else self._map_filtered[row]
|
id = row if row_is_id else self._map_filtered[row]
|
||||||
self._data[id][col] = val
|
self._data[id][col] = val
|
||||||
|
|
||||||
def index(self, id, cache=False):
|
def index(self, id, cache=False):
|
||||||
x = self._map if cache else self._map_filtered
|
x = self._map if cache else self._map_filtered
|
||||||
return x.index(id)
|
return x.index(id)
|
||||||
|
|
||||||
def row(self, id):
|
def row(self, id):
|
||||||
return self.index(id)
|
return self.index(id)
|
||||||
|
|
||||||
def has_id(self, id):
|
def has_id(self, id):
|
||||||
try:
|
try:
|
||||||
return self._data[id] is not None
|
return self._data[id] is not None
|
||||||
except IndexError:
|
except IndexError:
|
||||||
pass
|
pass
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def refresh_ids(self, conn, ids):
|
def refresh_ids(self, conn, ids):
|
||||||
'''
|
'''
|
||||||
Refresh the data in the cache for books identified by ids.
|
Refresh the data in the cache for books identified by ids.
|
||||||
Returns a list of affected rows or None if the rows are filtered.
|
Returns a list of affected rows or None if the rows are filtered.
|
||||||
'''
|
'''
|
||||||
for id in ids:
|
for id in ids:
|
||||||
self._data[id] = conn.get('SELECT * from meta WHERE id=?', (id,))[0]
|
try:
|
||||||
|
self._data[id] = conn.get('SELECT * from meta WHERE id=?',
|
||||||
|
(id,))[0]
|
||||||
|
except IndexError:
|
||||||
|
return None
|
||||||
try:
|
try:
|
||||||
return map(self.row, ids)
|
return map(self.row, ids)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def books_added(self, ids, conn):
|
def books_added(self, ids, conn):
|
||||||
if not ids:
|
if not ids:
|
||||||
return
|
return
|
||||||
@ -241,16 +245,16 @@ class ResultCache(SearchQueryParser):
|
|||||||
self._data[id] = conn.get('SELECT * from meta WHERE id=?', (id,))[0]
|
self._data[id] = conn.get('SELECT * from meta WHERE id=?', (id,))[0]
|
||||||
self._map[0:0] = ids
|
self._map[0:0] = ids
|
||||||
self._map_filtered[0:0] = ids
|
self._map_filtered[0:0] = ids
|
||||||
|
|
||||||
def books_deleted(self, ids):
|
def books_deleted(self, ids):
|
||||||
for id in ids:
|
for id in ids:
|
||||||
self._data[id] = None
|
self._data[id] = None
|
||||||
if id in self._map: self._map.remove(id)
|
if id in self._map: self._map.remove(id)
|
||||||
if id in self._map_filtered: self._map_filtered.remove(id)
|
if id in self._map_filtered: self._map_filtered.remove(id)
|
||||||
|
|
||||||
def count(self):
|
def count(self):
|
||||||
return len(self._map)
|
return len(self._map)
|
||||||
|
|
||||||
def refresh(self, db, field=None, ascending=True):
|
def refresh(self, db, field=None, ascending=True):
|
||||||
temp = db.conn.get('SELECT * FROM meta')
|
temp = db.conn.get('SELECT * FROM meta')
|
||||||
self._data = list(itertools.repeat(None, temp[-1][0]+2)) if temp else []
|
self._data = list(itertools.repeat(None, temp[-1][0]+2)) if temp else []
|
||||||
@ -260,7 +264,7 @@ class ResultCache(SearchQueryParser):
|
|||||||
if field is not None:
|
if field is not None:
|
||||||
self.sort(field, ascending)
|
self.sort(field, ascending)
|
||||||
self._map_filtered = list(self._map)
|
self._map_filtered = list(self._map)
|
||||||
|
|
||||||
def seriescmp(self, x, y):
|
def seriescmp(self, x, y):
|
||||||
try:
|
try:
|
||||||
ans = cmp(self._data[x][9].lower(), self._data[y][9].lower()) if str else\
|
ans = cmp(self._data[x][9].lower(), self._data[y][9].lower()) if str else\
|
||||||
@ -269,7 +273,7 @@ class ResultCache(SearchQueryParser):
|
|||||||
ans = cmp(self._data[x][9], self._data[y][9])
|
ans = cmp(self._data[x][9], self._data[y][9])
|
||||||
if ans != 0: return ans
|
if ans != 0: return ans
|
||||||
return cmp(self._data[x][10], self._data[y][10])
|
return cmp(self._data[x][10], self._data[y][10])
|
||||||
|
|
||||||
def cmp(self, loc, x, y, str=True, subsort=False):
|
def cmp(self, loc, x, y, str=True, subsort=False):
|
||||||
try:
|
try:
|
||||||
ans = cmp(self._data[x][loc].lower(), self._data[y][loc].lower()) if str else\
|
ans = cmp(self._data[x][loc].lower(), self._data[y][loc].lower()) if str else\
|
||||||
@ -279,7 +283,7 @@ class ResultCache(SearchQueryParser):
|
|||||||
if subsort and ans == 0:
|
if subsort and ans == 0:
|
||||||
return cmp(self._data[x][11].lower(), self._data[y][11].lower())
|
return cmp(self._data[x][11].lower(), self._data[y][11].lower())
|
||||||
return ans
|
return ans
|
||||||
|
|
||||||
def sort(self, field, ascending, subsort=False):
|
def sort(self, field, ascending, subsort=False):
|
||||||
field = field.lower().strip()
|
field = field.lower().strip()
|
||||||
if field in ('author', 'tag', 'comment'):
|
if field in ('author', 'tag', 'comment'):
|
||||||
@ -291,28 +295,28 @@ class ResultCache(SearchQueryParser):
|
|||||||
subsort = True
|
subsort = True
|
||||||
self.first_sort = False
|
self.first_sort = False
|
||||||
fcmp = self.seriescmp if field == 'series' else \
|
fcmp = self.seriescmp if field == 'series' else \
|
||||||
functools.partial(self.cmp, FIELD_MAP[field], subsort=subsort,
|
functools.partial(self.cmp, FIELD_MAP[field], subsort=subsort,
|
||||||
str=field not in ('size', 'rating', 'timestamp'))
|
str=field not in ('size', 'rating', 'timestamp'))
|
||||||
|
|
||||||
self._map.sort(cmp=fcmp, reverse=not ascending)
|
self._map.sort(cmp=fcmp, reverse=not ascending)
|
||||||
self._map_filtered = [id for id in self._map if id in self._map_filtered]
|
self._map_filtered = [id for id in self._map if id in self._map_filtered]
|
||||||
|
|
||||||
def search(self, query):
|
def search(self, query):
|
||||||
if not query or not query.strip():
|
if not query or not query.strip():
|
||||||
self._map_filtered = list(self._map)
|
self._map_filtered = list(self._map)
|
||||||
return
|
return
|
||||||
matches = sorted(self.parse(query))
|
matches = sorted(self.parse(query))
|
||||||
self._map_filtered = [id for id in self._map if id in matches]
|
self._map_filtered = [id for id in self._map if id in matches]
|
||||||
|
|
||||||
|
|
||||||
class Tag(unicode):
|
class Tag(unicode):
|
||||||
|
|
||||||
def __new__(cls, *args):
|
def __new__(cls, *args):
|
||||||
obj = super(Tag, cls).__new__(cls, *args)
|
obj = super(Tag, cls).__new__(cls, *args)
|
||||||
obj.count = 0
|
obj.count = 0
|
||||||
obj.state = 0
|
obj.state = 0
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
def as_string(self):
|
def as_string(self):
|
||||||
return u'[%d] %s'%(self.count, self)
|
return u'[%d] %s'%(self.count, self)
|
||||||
|
|
||||||
@ -324,16 +328,16 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
@apply
|
@apply
|
||||||
def user_version():
|
def user_version():
|
||||||
doc = 'The user version of this database'
|
doc = 'The user version of this database'
|
||||||
|
|
||||||
def fget(self):
|
def fget(self):
|
||||||
return self.conn.get('pragma user_version;', all=False)
|
return self.conn.get('pragma user_version;', all=False)
|
||||||
|
|
||||||
def fset(self, val):
|
def fset(self, val):
|
||||||
self.conn.execute('pragma user_version=%d'%int(val))
|
self.conn.execute('pragma user_version=%d'%int(val))
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
|
|
||||||
return property(doc=doc, fget=fget, fset=fset)
|
return property(doc=doc, fget=fget, fset=fset)
|
||||||
|
|
||||||
def connect(self):
|
def connect(self):
|
||||||
if 'win32' in sys.platform and len(self.library_path) + 4*self.PATH_LIMIT + 10 > 259:
|
if 'win32' in sys.platform and len(self.library_path) + 4*self.PATH_LIMIT + 10 > 259:
|
||||||
raise ValueError('Path to library too long. Must be less than %d characters.'%(259-4*self.PATH_LIMIT-10))
|
raise ValueError('Path to library too long. Must be less than %d characters.'%(259-4*self.PATH_LIMIT-10))
|
||||||
@ -343,9 +347,9 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.conn.close()
|
self.conn.close()
|
||||||
os.remove(self.dbpath)
|
os.remove(self.dbpath)
|
||||||
self.conn = connect(self.dbpath, self.row_factory)
|
self.conn = connect(self.dbpath, self.row_factory)
|
||||||
if self.user_version == 0:
|
if self.user_version == 0:
|
||||||
self.initialize_database()
|
self.initialize_database()
|
||||||
|
|
||||||
def __init__(self, library_path, row_factory=False):
|
def __init__(self, library_path, row_factory=False):
|
||||||
if not os.path.exists(library_path):
|
if not os.path.exists(library_path):
|
||||||
os.makedirs(library_path)
|
os.makedirs(library_path)
|
||||||
@ -358,7 +362,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.connect()
|
self.connect()
|
||||||
self.is_case_sensitive = not iswindows and not isosx and \
|
self.is_case_sensitive = not iswindows and not isosx and \
|
||||||
not os.path.exists(self.dbpath.replace('metadata.db', 'MeTAdAtA.dB'))
|
not os.path.exists(self.dbpath.replace('metadata.db', 'MeTAdAtA.dB'))
|
||||||
# Upgrade database
|
# Upgrade database
|
||||||
while True:
|
while True:
|
||||||
meth = getattr(self, 'upgrade_version_%d'%self.user_version, None)
|
meth = getattr(self, 'upgrade_version_%d'%self.user_version, None)
|
||||||
if meth is None:
|
if meth is None:
|
||||||
@ -368,7 +372,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
meth()
|
meth()
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
self.user_version += 1
|
self.user_version += 1
|
||||||
|
|
||||||
self.data = ResultCache()
|
self.data = ResultCache()
|
||||||
self.search = self.data.search
|
self.search = self.data.search
|
||||||
self.refresh = functools.partial(self.data.refresh, self)
|
self.refresh = functools.partial(self.data.refresh, self)
|
||||||
@ -378,24 +382,24 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.row = self.data.row
|
self.row = self.data.row
|
||||||
self.has_id = self.data.has_id
|
self.has_id = self.data.has_id
|
||||||
self.count = self.data.count
|
self.count = self.data.count
|
||||||
|
|
||||||
self.refresh()
|
self.refresh()
|
||||||
|
|
||||||
def get_property(idx, index_is_id=False, loc=-1):
|
def get_property(idx, index_is_id=False, loc=-1):
|
||||||
row = self.data._data[idx] if index_is_id else self.data[idx]
|
row = self.data._data[idx] if index_is_id else self.data[idx]
|
||||||
return row[loc]
|
return row[loc]
|
||||||
|
|
||||||
for prop in ('author_sort', 'authors', 'comment', 'comments', 'isbn',
|
for prop in ('author_sort', 'authors', 'comment', 'comments', 'isbn',
|
||||||
'publisher', 'rating', 'series', 'series_index', 'tags',
|
'publisher', 'rating', 'series', 'series_index', 'tags',
|
||||||
'title', 'timestamp'):
|
'title', 'timestamp'):
|
||||||
setattr(self, prop, functools.partial(get_property,
|
setattr(self, prop, functools.partial(get_property,
|
||||||
loc=FIELD_MAP['comments' if prop == 'comment' else prop]))
|
loc=FIELD_MAP['comments' if prop == 'comment' else prop]))
|
||||||
|
|
||||||
def initialize_database(self):
|
def initialize_database(self):
|
||||||
from calibre.resources import metadata_sqlite
|
from calibre.resources import metadata_sqlite
|
||||||
self.conn.executescript(metadata_sqlite)
|
self.conn.executescript(metadata_sqlite)
|
||||||
self.user_version = 1
|
self.user_version = 1
|
||||||
|
|
||||||
def upgrade_version_1(self):
|
def upgrade_version_1(self):
|
||||||
'''
|
'''
|
||||||
Normalize indices.
|
Normalize indices.
|
||||||
@ -407,7 +411,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
CREATE INDEX series_idx ON series (name COLLATE NOCASE);
|
CREATE INDEX series_idx ON series (name COLLATE NOCASE);
|
||||||
CREATE INDEX series_sort_idx ON books (series_index, id);
|
CREATE INDEX series_sort_idx ON books (series_index, id);
|
||||||
'''))
|
'''))
|
||||||
|
|
||||||
def upgrade_version_2(self):
|
def upgrade_version_2(self):
|
||||||
''' Fix Foreign key constraints for deleting from link tables. '''
|
''' Fix Foreign key constraints for deleting from link tables. '''
|
||||||
script = textwrap.dedent('''\
|
script = textwrap.dedent('''\
|
||||||
@ -426,7 +430,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.conn.executescript(script%dict(ltable='publishers', table='publishers', ltable_col='publisher'))
|
self.conn.executescript(script%dict(ltable='publishers', table='publishers', ltable_col='publisher'))
|
||||||
self.conn.executescript(script%dict(ltable='tags', table='tags', ltable_col='tag'))
|
self.conn.executescript(script%dict(ltable='tags', table='tags', ltable_col='tag'))
|
||||||
self.conn.executescript(script%dict(ltable='series', table='series', ltable_col='series'))
|
self.conn.executescript(script%dict(ltable='series', table='series', ltable_col='series'))
|
||||||
|
|
||||||
def upgrade_version_3(self):
|
def upgrade_version_3(self):
|
||||||
' Add path to result cache '
|
' Add path to result cache '
|
||||||
self.conn.executescript('''
|
self.conn.executescript('''
|
||||||
@ -450,25 +454,25 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
FROM books;
|
FROM books;
|
||||||
''')
|
''')
|
||||||
|
|
||||||
|
|
||||||
def last_modified(self):
|
def last_modified(self):
|
||||||
''' Return last modified time as a UTC datetime object'''
|
''' Return last modified time as a UTC datetime object'''
|
||||||
return datetime.utcfromtimestamp(os.stat(self.dbpath).st_mtime)
|
return datetime.utcfromtimestamp(os.stat(self.dbpath).st_mtime)
|
||||||
|
|
||||||
def path(self, index, index_is_id=False):
|
def path(self, index, index_is_id=False):
|
||||||
'Return the relative path to the directory containing this books files as a unicode string.'
|
'Return the relative path to the directory containing this books files as a unicode string.'
|
||||||
row = self.data._data[index] if index_is_id else self.data[index]
|
row = self.data._data[index] if index_is_id else self.data[index]
|
||||||
return row[FIELD_MAP['path']].replace('/', os.sep)
|
return row[FIELD_MAP['path']].replace('/', os.sep)
|
||||||
|
|
||||||
|
|
||||||
def abspath(self, index, index_is_id=False):
|
def abspath(self, index, index_is_id=False):
|
||||||
'Return the absolute path to the directory containing this books files as a unicode string.'
|
'Return the absolute path to the directory containing this books files as a unicode string.'
|
||||||
path = os.path.join(self.library_path, self.path(index, index_is_id=index_is_id))
|
path = os.path.join(self.library_path, self.path(index, index_is_id=index_is_id))
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
os.makedirs(path)
|
os.makedirs(path)
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
def construct_path_name(self, id):
|
def construct_path_name(self, id):
|
||||||
'''
|
'''
|
||||||
Construct the directory name for this book based on its metadata.
|
Construct the directory name for this book based on its metadata.
|
||||||
@ -480,7 +484,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
title = sanitize_file_name(self.title(id, index_is_id=True)[:self.PATH_LIMIT]).decode(filesystem_encoding, 'ignore')
|
title = sanitize_file_name(self.title(id, index_is_id=True)[:self.PATH_LIMIT]).decode(filesystem_encoding, 'ignore')
|
||||||
path = author + '/' + title + ' (%d)'%id
|
path = author + '/' + title + ' (%d)'%id
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def construct_file_name(self, id):
|
def construct_file_name(self, id):
|
||||||
'''
|
'''
|
||||||
Construct the file name for this book based on its metadata.
|
Construct the file name for this book based on its metadata.
|
||||||
@ -492,17 +496,17 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
title = sanitize_file_name(self.title(id, index_is_id=True)[:self.PATH_LIMIT]).decode(filesystem_encoding, 'replace')
|
title = sanitize_file_name(self.title(id, index_is_id=True)[:self.PATH_LIMIT]).decode(filesystem_encoding, 'replace')
|
||||||
name = title + ' - ' + author
|
name = title + ' - ' + author
|
||||||
return name
|
return name
|
||||||
|
|
||||||
def rmtree(self, path):
|
def rmtree(self, path):
|
||||||
if not self.normpath(self.library_path).startswith(self.normpath(path)):
|
if not self.normpath(self.library_path).startswith(self.normpath(path)):
|
||||||
shutil.rmtree(path)
|
shutil.rmtree(path)
|
||||||
|
|
||||||
def normpath(self, path):
|
def normpath(self, path):
|
||||||
path = os.path.abspath(os.path.realpath(path))
|
path = os.path.abspath(os.path.realpath(path))
|
||||||
if not self.is_case_sensitive:
|
if not self.is_case_sensitive:
|
||||||
path = path.lower()
|
path = path.lower()
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def set_path(self, index, index_is_id=False):
|
def set_path(self, index, index_is_id=False):
|
||||||
'''
|
'''
|
||||||
Set the path to the directory containing this books files based on its
|
Set the path to the directory containing this books files based on its
|
||||||
@ -524,12 +528,12 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
break
|
break
|
||||||
if path == current_path and not changed:
|
if path == current_path and not changed:
|
||||||
return
|
return
|
||||||
|
|
||||||
tpath = os.path.join(self.library_path, *path.split('/'))
|
tpath = os.path.join(self.library_path, *path.split('/'))
|
||||||
if not os.path.exists(tpath):
|
if not os.path.exists(tpath):
|
||||||
os.makedirs(tpath)
|
os.makedirs(tpath)
|
||||||
spath = os.path.join(self.library_path, *current_path.split('/'))
|
spath = os.path.join(self.library_path, *current_path.split('/'))
|
||||||
|
|
||||||
if current_path and os.path.exists(spath): # Migrate existing files
|
if current_path and os.path.exists(spath): # Migrate existing files
|
||||||
cdata = self.cover(id, index_is_id=True)
|
cdata = self.cover(id, index_is_id=True)
|
||||||
if cdata is not None:
|
if cdata is not None:
|
||||||
@ -551,14 +555,14 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
parent = os.path.dirname(spath)
|
parent = os.path.dirname(spath)
|
||||||
if len(os.listdir(parent)) == 0:
|
if len(os.listdir(parent)) == 0:
|
||||||
self.rmtree(parent)
|
self.rmtree(parent)
|
||||||
|
|
||||||
def add_listener(self, listener):
|
def add_listener(self, listener):
|
||||||
'''
|
'''
|
||||||
Add a listener. Will be called on change events with two arguments.
|
Add a listener. Will be called on change events with two arguments.
|
||||||
Event name and list of affected ids.
|
Event name and list of affected ids.
|
||||||
'''
|
'''
|
||||||
self.listeners.add(listener)
|
self.listeners.add(listener)
|
||||||
|
|
||||||
def notify(self, event, ids=[]):
|
def notify(self, event, ids=[]):
|
||||||
'Notify all listeners'
|
'Notify all listeners'
|
||||||
for listener in self.listeners:
|
for listener in self.listeners:
|
||||||
@ -567,12 +571,12 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
except:
|
except:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
continue
|
continue
|
||||||
|
|
||||||
def cover(self, index, index_is_id=False, as_file=False, as_image=False,
|
def cover(self, index, index_is_id=False, as_file=False, as_image=False,
|
||||||
as_path=False):
|
as_path=False):
|
||||||
'''
|
'''
|
||||||
Return the cover image as a bytestring (in JPEG format) or None.
|
Return the cover image as a bytestring (in JPEG format) or None.
|
||||||
|
|
||||||
`as_file` : If True return the image as an open file object
|
`as_file` : If True return the image as an open file object
|
||||||
`as_image`: If True return the image as a QImage object
|
`as_image`: If True return the image as a QImage object
|
||||||
'''
|
'''
|
||||||
@ -587,7 +591,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
img.loadFromData(f.read())
|
img.loadFromData(f.read())
|
||||||
return img
|
return img
|
||||||
return f if as_file else f.read()
|
return f if as_file else f.read()
|
||||||
|
|
||||||
def get_metadata(self, idx, index_is_id=False, get_cover=False):
|
def get_metadata(self, idx, index_is_id=False, get_cover=False):
|
||||||
'''
|
'''
|
||||||
Convenience method to return metadata as a L{MetaInformation} object.
|
Convenience method to return metadata as a L{MetaInformation} object.
|
||||||
@ -612,7 +616,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
if get_cover:
|
if get_cover:
|
||||||
mi.cover = self.cover(id, index_is_id=True, as_path=True)
|
mi.cover = self.cover(id, index_is_id=True, as_path=True)
|
||||||
return mi
|
return mi
|
||||||
|
|
||||||
def has_book(self, mi):
|
def has_book(self, mi):
|
||||||
title = mi.title
|
title = mi.title
|
||||||
if title:
|
if title:
|
||||||
@ -620,16 +624,16 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
title = title.decode(preferred_encoding, 'replace')
|
title = title.decode(preferred_encoding, 'replace')
|
||||||
return bool(self.conn.get('SELECT id FROM books where title=?', (title,), all=False))
|
return bool(self.conn.get('SELECT id FROM books where title=?', (title,), all=False))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def has_cover(self, index, index_is_id=False):
|
def has_cover(self, index, index_is_id=False):
|
||||||
id = index if index_is_id else self.id(index)
|
id = index if index_is_id else self.id(index)
|
||||||
path = os.path.join(self.library_path, self.path(id, index_is_id=True), 'cover.jpg')
|
path = os.path.join(self.library_path, self.path(id, index_is_id=True), 'cover.jpg')
|
||||||
return os.access(path, os.R_OK)
|
return os.access(path, os.R_OK)
|
||||||
|
|
||||||
def set_cover(self, id, data):
|
def set_cover(self, id, data):
|
||||||
'''
|
'''
|
||||||
Set the cover for this book.
|
Set the cover for this book.
|
||||||
|
|
||||||
`data`: Can be either a QImage, QPixmap, file object or bytestring
|
`data`: Can be either a QImage, QPixmap, file object or bytestring
|
||||||
'''
|
'''
|
||||||
path = os.path.join(self.library_path, self.path(id, index_is_id=True), 'cover.jpg')
|
path = os.path.join(self.library_path, self.path(id, index_is_id=True), 'cover.jpg')
|
||||||
@ -644,13 +648,13 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
data = data.read()
|
data = data.read()
|
||||||
p.loadFromData(data)
|
p.loadFromData(data)
|
||||||
p.save(path)
|
p.save(path)
|
||||||
|
|
||||||
def all_formats(self):
|
def all_formats(self):
|
||||||
formats = self.conn.get('SELECT format from data')
|
formats = self.conn.get('SELECT format from data')
|
||||||
if not formats:
|
if not formats:
|
||||||
return set([])
|
return set([])
|
||||||
return set([f[0] for f in formats])
|
return set([f[0] for f in formats])
|
||||||
|
|
||||||
def formats(self, index, index_is_id=False):
|
def formats(self, index, index_is_id=False):
|
||||||
''' Return available formats as a comma separated list or None if there are no available formats '''
|
''' Return available formats as a comma separated list or None if there are no available formats '''
|
||||||
id = index if index_is_id else self.id(index)
|
id = index if index_is_id else self.id(index)
|
||||||
@ -667,7 +671,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
if os.access(os.path.join(path, name+_format), os.R_OK|os.W_OK):
|
if os.access(os.path.join(path, name+_format), os.R_OK|os.W_OK):
|
||||||
ans.append(format)
|
ans.append(format)
|
||||||
return ','.join(ans)
|
return ','.join(ans)
|
||||||
|
|
||||||
def has_format(self, index, format, index_is_id=False):
|
def has_format(self, index, format, index_is_id=False):
|
||||||
id = index if index_is_id else self.id(index)
|
id = index if index_is_id else self.id(index)
|
||||||
name = self.conn.get('SELECT name FROM data WHERE book=? AND format=?', (id, format), all=False)
|
name = self.conn.get('SELECT name FROM data WHERE book=? AND format=?', (id, format), all=False)
|
||||||
@ -677,7 +681,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
path = os.path.join(path, name+format)
|
path = os.path.join(path, name+format)
|
||||||
return os.access(path, os.R_OK|os.W_OK)
|
return os.access(path, os.R_OK|os.W_OK)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def format_abspath(self, index, format, index_is_id=False):
|
def format_abspath(self, index, format, index_is_id=False):
|
||||||
'Return absolute path to the ebook file of format `format`'
|
'Return absolute path to the ebook file of format `format`'
|
||||||
id = index if index_is_id else self.id(index)
|
id = index if index_is_id else self.id(index)
|
||||||
@ -688,13 +692,13 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
path = os.path.join(path, name+format)
|
path = os.path.join(path, name+format)
|
||||||
if os.access(path, os.R_OK|os.W_OK):
|
if os.access(path, os.R_OK|os.W_OK):
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def format(self, index, format, index_is_id=False, as_file=False, mode='r+b'):
|
def format(self, index, format, index_is_id=False, as_file=False, mode='r+b'):
|
||||||
'''
|
'''
|
||||||
Return the ebook format as a bytestring or `None` if the format doesn't exist,
|
Return the ebook format as a bytestring or `None` if the format doesn't exist,
|
||||||
or we don't have permission to write to the ebook file.
|
or we don't have permission to write to the ebook file.
|
||||||
|
|
||||||
`as_file`: If True the ebook format is returned as a file object opened in `mode`
|
`as_file`: If True the ebook format is returned as a file object opened in `mode`
|
||||||
'''
|
'''
|
||||||
path = self.format_abspath(index, format, index_is_id=index_is_id)
|
path = self.format_abspath(index, format, index_is_id=index_is_id)
|
||||||
if path is not None:
|
if path is not None:
|
||||||
@ -702,14 +706,14 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
return f if as_file else f.read()
|
return f if as_file else f.read()
|
||||||
if self.has_format(index, format, index_is_id):
|
if self.has_format(index, format, index_is_id):
|
||||||
self.remove_format(id, format, index_is_id=True)
|
self.remove_format(id, format, index_is_id=True)
|
||||||
|
|
||||||
def add_format_with_hooks(self, index, format, fpath, index_is_id=False,
|
def add_format_with_hooks(self, index, format, fpath, index_is_id=False,
|
||||||
path=None, notify=True):
|
path=None, notify=True):
|
||||||
npath = self.run_import_plugins(fpath, format)
|
npath = self.run_import_plugins(fpath, format)
|
||||||
format = os.path.splitext(npath)[-1].lower().replace('.', '').upper()
|
format = os.path.splitext(npath)[-1].lower().replace('.', '').upper()
|
||||||
return self.add_format(index, format, open(npath, 'rb'),
|
return self.add_format(index, format, open(npath, 'rb'),
|
||||||
index_is_id=index_is_id, path=path, notify=notify)
|
index_is_id=index_is_id, path=path, notify=notify)
|
||||||
|
|
||||||
def add_format(self, index, format, stream, index_is_id=False, path=None, notify=True):
|
def add_format(self, index, format, stream, index_is_id=False, path=None, notify=True):
|
||||||
id = index if index_is_id else self.id(index)
|
id = index if index_is_id else self.id(index)
|
||||||
if path is None:
|
if path is None:
|
||||||
@ -733,7 +737,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.refresh_ids([id])
|
self.refresh_ids([id])
|
||||||
if notify:
|
if notify:
|
||||||
self.notify('metadata', [id])
|
self.notify('metadata', [id])
|
||||||
|
|
||||||
def delete_book(self, id, notify=True):
|
def delete_book(self, id, notify=True):
|
||||||
'''
|
'''
|
||||||
Removes book from the result cache and the underlying database.
|
Removes book from the result cache and the underlying database.
|
||||||
@ -751,7 +755,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.data.books_deleted([id])
|
self.data.books_deleted([id])
|
||||||
if notify:
|
if notify:
|
||||||
self.notify('delete', [id])
|
self.notify('delete', [id])
|
||||||
|
|
||||||
def remove_format(self, index, format, index_is_id=False, notify=True):
|
def remove_format(self, index, format, index_is_id=False, notify=True):
|
||||||
id = index if index_is_id else self.id(index)
|
id = index if index_is_id else self.id(index)
|
||||||
path = os.path.join(self.library_path, *self.path(id, index_is_id=True).split(os.sep))
|
path = os.path.join(self.library_path, *self.path(id, index_is_id=True).split(os.sep))
|
||||||
@ -768,7 +772,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.refresh_ids([id])
|
self.refresh_ids([id])
|
||||||
if notify:
|
if notify:
|
||||||
self.notify('metadata', [id])
|
self.notify('metadata', [id])
|
||||||
|
|
||||||
def clean(self):
|
def clean(self):
|
||||||
'''
|
'''
|
||||||
Remove orphaned entries.
|
Remove orphaned entries.
|
||||||
@ -779,13 +783,13 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.conn.execute(st%dict(ltable='tags', table='tags', ltable_col='tag'))
|
self.conn.execute(st%dict(ltable='tags', table='tags', ltable_col='tag'))
|
||||||
self.conn.execute(st%dict(ltable='series', table='series', ltable_col='series'))
|
self.conn.execute(st%dict(ltable='series', table='series', ltable_col='series'))
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
|
|
||||||
def get_recipes(self):
|
def get_recipes(self):
|
||||||
return self.conn.get('SELECT id, script FROM feeds')
|
return self.conn.get('SELECT id, script FROM feeds')
|
||||||
|
|
||||||
def get_recipe(self, id):
|
def get_recipe(self, id):
|
||||||
return self.conn.get('SELECT script FROM feeds WHERE id=?', (id,), all=False)
|
return self.conn.get('SELECT script FROM feeds WHERE id=?', (id,), all=False)
|
||||||
|
|
||||||
def get_categories(self, sort_on_count=False):
|
def get_categories(self, sort_on_count=False):
|
||||||
categories = {}
|
categories = {}
|
||||||
def get(name, category, field='name'):
|
def get(name, category, field='name'):
|
||||||
@ -807,11 +811,11 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
for tag in tags:
|
for tag in tags:
|
||||||
tag.count = self.conn.get('SELECT COUNT(format) FROM data WHERE format=?', (tag,), all=False)
|
tag.count = self.conn.get('SELECT COUNT(format) FROM data WHERE format=?', (tag,), all=False)
|
||||||
tags.sort(reverse=sort_on_count, cmp=(lambda x,y:cmp(x.count,y.count)) if sort_on_count else cmp)
|
tags.sort(reverse=sort_on_count, cmp=(lambda x,y:cmp(x.count,y.count)) if sort_on_count else cmp)
|
||||||
for x in (('authors', 'author'), ('tags', 'tag'), ('publishers', 'publisher'),
|
for x in (('authors', 'author'), ('tags', 'tag'), ('publishers', 'publisher'),
|
||||||
('series', 'series')):
|
('series', 'series')):
|
||||||
get(*x)
|
get(*x)
|
||||||
get('data', 'format', 'format')
|
get('data', 'format', 'format')
|
||||||
|
|
||||||
categories['news'] = []
|
categories['news'] = []
|
||||||
newspapers = self.conn.get('SELECT name FROM tags WHERE id IN (SELECT DISTINCT tag FROM books_tags_link WHERE book IN (select book from books_tags_link where tag IN (SELECT id FROM tags WHERE name=?)))', (_('News'),))
|
newspapers = self.conn.get('SELECT name FROM tags WHERE id IN (SELECT DISTINCT tag FROM books_tags_link WHERE book IN (select book from books_tags_link where tag IN (SELECT id FROM tags WHERE name=?)))', (_('News'),))
|
||||||
if newspapers:
|
if newspapers:
|
||||||
@ -823,10 +827,10 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
categories['news'] = list(map(Tag, newspapers))
|
categories['news'] = list(map(Tag, newspapers))
|
||||||
for tag in categories['news']:
|
for tag in categories['news']:
|
||||||
tag.count = self.conn.get('SELECT COUNT(id) FROM books_tags_link WHERE tag IN (SELECT DISTINCT id FROM tags WHERE name=?)', (tag,), all=False)
|
tag.count = self.conn.get('SELECT COUNT(id) FROM books_tags_link WHERE tag IN (SELECT DISTINCT id FROM tags WHERE name=?)', (tag,), all=False)
|
||||||
|
|
||||||
return categories
|
return categories
|
||||||
|
|
||||||
|
|
||||||
def tags_older_than(self, tag, delta):
|
def tags_older_than(self, tag, delta):
|
||||||
tag = tag.lower().strip()
|
tag = tag.lower().strip()
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
@ -836,9 +840,9 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
tags = r[FIELD_MAP['tags']]
|
tags = r[FIELD_MAP['tags']]
|
||||||
if tags and tag in tags.lower():
|
if tags and tag in tags.lower():
|
||||||
yield r[FIELD_MAP['id']]
|
yield r[FIELD_MAP['id']]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def set(self, row, column, val):
|
def set(self, row, column, val):
|
||||||
'''
|
'''
|
||||||
Convenience method for setting the title, authors, publisher or rating
|
Convenience method for setting the title, authors, publisher or rating
|
||||||
@ -861,10 +865,10 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.data.refresh_ids(self.conn, [id])
|
self.data.refresh_ids(self.conn, [id])
|
||||||
self.set_path(id, True)
|
self.set_path(id, True)
|
||||||
self.notify('metadata', [id])
|
self.notify('metadata', [id])
|
||||||
|
|
||||||
def set_metadata(self, id, mi):
|
def set_metadata(self, id, mi):
|
||||||
'''
|
'''
|
||||||
Set metadata for the book `id` from the `MetaInformation` object `mi`
|
Set metadata for the book `id` from the `MetaInformation` object `mi`
|
||||||
'''
|
'''
|
||||||
if mi.title:
|
if mi.title:
|
||||||
self.set_title(id, mi.title)
|
self.set_title(id, mi.title)
|
||||||
@ -898,7 +902,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.set_timestamp(id, mi.timestamp, notify=False)
|
self.set_timestamp(id, mi.timestamp, notify=False)
|
||||||
self.set_path(id, True)
|
self.set_path(id, True)
|
||||||
self.notify('metadata', [id])
|
self.notify('metadata', [id])
|
||||||
|
|
||||||
def set_authors(self, id, authors, notify=True):
|
def set_authors(self, id, authors, notify=True):
|
||||||
'''
|
'''
|
||||||
`authors`: A list of authors.
|
`authors`: A list of authors.
|
||||||
@ -925,18 +929,18 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
(id, aid))
|
(id, aid))
|
||||||
except IntegrityError: # Sometimes books specify the same author twice in their metadata
|
except IntegrityError: # Sometimes books specify the same author twice in their metadata
|
||||||
pass
|
pass
|
||||||
ss = authors_to_sort_string(authors)
|
ss = authors_to_sort_string(authors)
|
||||||
self.conn.execute('UPDATE books SET author_sort=? WHERE id=?',
|
self.conn.execute('UPDATE books SET author_sort=? WHERE id=?',
|
||||||
(ss, id))
|
(ss, id))
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
self.data.set(id, FIELD_MAP['authors'],
|
self.data.set(id, FIELD_MAP['authors'],
|
||||||
','.join([a.replace(',', '|') for a in authors]),
|
','.join([a.replace(',', '|') for a in authors]),
|
||||||
row_is_id=True)
|
row_is_id=True)
|
||||||
self.data.set(id, FIELD_MAP['author_sort'], ss, row_is_id=True)
|
self.data.set(id, FIELD_MAP['author_sort'], ss, row_is_id=True)
|
||||||
self.set_path(id, True)
|
self.set_path(id, True)
|
||||||
if notify:
|
if notify:
|
||||||
self.notify('metadata', [id])
|
self.notify('metadata', [id])
|
||||||
|
|
||||||
def set_title(self, id, title, notify=True):
|
def set_title(self, id, title, notify=True):
|
||||||
if not title:
|
if not title:
|
||||||
return
|
return
|
||||||
@ -949,7 +953,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
if notify:
|
if notify:
|
||||||
self.notify('metadata', [id])
|
self.notify('metadata', [id])
|
||||||
|
|
||||||
def set_timestamp(self, id, dt, notify=True):
|
def set_timestamp(self, id, dt, notify=True):
|
||||||
if dt:
|
if dt:
|
||||||
self.conn.execute('UPDATE books SET timestamp=? WHERE id=?', (dt, id))
|
self.conn.execute('UPDATE books SET timestamp=? WHERE id=?', (dt, id))
|
||||||
@ -957,7 +961,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
if notify:
|
if notify:
|
||||||
self.notify('metadata', [id])
|
self.notify('metadata', [id])
|
||||||
|
|
||||||
def set_publisher(self, id, publisher, notify=True):
|
def set_publisher(self, id, publisher, notify=True):
|
||||||
self.conn.execute('DELETE FROM books_publishers_link WHERE book=?',(id,))
|
self.conn.execute('DELETE FROM books_publishers_link WHERE book=?',(id,))
|
||||||
self.conn.execute('DELETE FROM publishers WHERE (SELECT COUNT(id) FROM books_publishers_link WHERE publisher=publishers.id) < 1')
|
self.conn.execute('DELETE FROM publishers WHERE (SELECT COUNT(id) FROM books_publishers_link WHERE publisher=publishers.id) < 1')
|
||||||
@ -974,7 +978,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.data.set(id, FIELD_MAP['publisher'], publisher, row_is_id=True)
|
self.data.set(id, FIELD_MAP['publisher'], publisher, row_is_id=True)
|
||||||
if notify:
|
if notify:
|
||||||
self.notify('metadata', [id])
|
self.notify('metadata', [id])
|
||||||
|
|
||||||
def set_tags(self, id, tags, append=False, notify=True):
|
def set_tags(self, id, tags, append=False, notify=True):
|
||||||
'''
|
'''
|
||||||
@param tags: list of strings
|
@param tags: list of strings
|
||||||
@ -1018,7 +1022,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.data.set(id, FIELD_MAP['tags'], tags, row_is_id=True)
|
self.data.set(id, FIELD_MAP['tags'], tags, row_is_id=True)
|
||||||
if notify:
|
if notify:
|
||||||
self.notify('metadata', [id])
|
self.notify('metadata', [id])
|
||||||
|
|
||||||
def unapply_tags(self, book_id, tags, notify=True):
|
def unapply_tags(self, book_id, tags, notify=True):
|
||||||
for tag in tags:
|
for tag in tags:
|
||||||
id = self.conn.get('SELECT id FROM tags WHERE name=?', (tag,), all=False)
|
id = self.conn.get('SELECT id FROM tags WHERE name=?', (tag,), all=False)
|
||||||
@ -1028,7 +1032,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.data.refresh_ids(self.conn, [book_id])
|
self.data.refresh_ids(self.conn, [book_id])
|
||||||
if notify:
|
if notify:
|
||||||
self.notify('metadata', [id])
|
self.notify('metadata', [id])
|
||||||
|
|
||||||
def is_tag_used(self, tag):
|
def is_tag_used(self, tag):
|
||||||
existing_tags = self.all_tags()
|
existing_tags = self.all_tags()
|
||||||
lt = [t.lower() for t in existing_tags]
|
lt = [t.lower() for t in existing_tags]
|
||||||
@ -1037,7 +1041,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
return True
|
return True
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def delete_tag(self, tag):
|
def delete_tag(self, tag):
|
||||||
existing_tags = self.all_tags()
|
existing_tags = self.all_tags()
|
||||||
lt = [t.lower() for t in existing_tags]
|
lt = [t.lower() for t in existing_tags]
|
||||||
@ -1052,7 +1056,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.conn.execute('DELETE FROM tags WHERE id=?', (id,))
|
self.conn.execute('DELETE FROM tags WHERE id=?', (id,))
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
|
|
||||||
|
|
||||||
def set_series(self, id, series, notify=True):
|
def set_series(self, id, series, notify=True):
|
||||||
self.conn.execute('DELETE FROM books_series_link WHERE book=?',(id,))
|
self.conn.execute('DELETE FROM books_series_link WHERE book=?',(id,))
|
||||||
self.conn.execute('DELETE FROM series WHERE (SELECT COUNT(id) FROM books_series_link WHERE series=series.id) < 1')
|
self.conn.execute('DELETE FROM series WHERE (SELECT COUNT(id) FROM books_series_link WHERE series=series.id) < 1')
|
||||||
@ -1075,7 +1079,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.data.set(id, FIELD_MAP['series'], series, row_is_id=True)
|
self.data.set(id, FIELD_MAP['series'], series, row_is_id=True)
|
||||||
if notify:
|
if notify:
|
||||||
self.notify('metadata', [id])
|
self.notify('metadata', [id])
|
||||||
|
|
||||||
def set_series_index(self, id, idx, notify=True):
|
def set_series_index(self, id, idx, notify=True):
|
||||||
if idx is None:
|
if idx is None:
|
||||||
idx = 1
|
idx = 1
|
||||||
@ -1091,7 +1095,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.data.set(id, FIELD_MAP['series_index'], int(idx), row_is_id=True)
|
self.data.set(id, FIELD_MAP['series_index'], int(idx), row_is_id=True)
|
||||||
if notify:
|
if notify:
|
||||||
self.notify('metadata', [id])
|
self.notify('metadata', [id])
|
||||||
|
|
||||||
def set_rating(self, id, rating, notify=True):
|
def set_rating(self, id, rating, notify=True):
|
||||||
rating = int(rating)
|
rating = int(rating)
|
||||||
self.conn.execute('DELETE FROM books_ratings_link WHERE book=?',(id,))
|
self.conn.execute('DELETE FROM books_ratings_link WHERE book=?',(id,))
|
||||||
@ -1102,7 +1106,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.data.set(id, FIELD_MAP['rating'], rating, row_is_id=True)
|
self.data.set(id, FIELD_MAP['rating'], rating, row_is_id=True)
|
||||||
if notify:
|
if notify:
|
||||||
self.notify('metadata', [id])
|
self.notify('metadata', [id])
|
||||||
|
|
||||||
def set_comment(self, id, text, notify=True):
|
def set_comment(self, id, text, notify=True):
|
||||||
self.conn.execute('DELETE FROM comments WHERE book=?', (id,))
|
self.conn.execute('DELETE FROM comments WHERE book=?', (id,))
|
||||||
self.conn.execute('INSERT INTO comments(book,text) VALUES (?,?)', (id, text))
|
self.conn.execute('INSERT INTO comments(book,text) VALUES (?,?)', (id, text))
|
||||||
@ -1110,21 +1114,21 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.data.set(id, FIELD_MAP['comments'], text, row_is_id=True)
|
self.data.set(id, FIELD_MAP['comments'], text, row_is_id=True)
|
||||||
if notify:
|
if notify:
|
||||||
self.notify('metadata', [id])
|
self.notify('metadata', [id])
|
||||||
|
|
||||||
def set_author_sort(self, id, sort, notify=True):
|
def set_author_sort(self, id, sort, notify=True):
|
||||||
self.conn.execute('UPDATE books SET author_sort=? WHERE id=?', (sort, id))
|
self.conn.execute('UPDATE books SET author_sort=? WHERE id=?', (sort, id))
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
self.data.set(id, FIELD_MAP['author_sort'], sort, row_is_id=True)
|
self.data.set(id, FIELD_MAP['author_sort'], sort, row_is_id=True)
|
||||||
if notify:
|
if notify:
|
||||||
self.notify('metadata', [id])
|
self.notify('metadata', [id])
|
||||||
|
|
||||||
def set_isbn(self, id, isbn, notify=True):
|
def set_isbn(self, id, isbn, notify=True):
|
||||||
self.conn.execute('UPDATE books SET isbn=? WHERE id=?', (isbn, id))
|
self.conn.execute('UPDATE books SET isbn=? WHERE id=?', (isbn, id))
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
self.data.set(id, FIELD_MAP['isbn'], isbn, row_is_id=True)
|
self.data.set(id, FIELD_MAP['isbn'], isbn, row_is_id=True)
|
||||||
if notify:
|
if notify:
|
||||||
self.notify('metadata', [id])
|
self.notify('metadata', [id])
|
||||||
|
|
||||||
def add_news(self, path, recipe):
|
def add_news(self, path, recipe):
|
||||||
format = os.path.splitext(path)[1][1:].lower()
|
format = os.path.splitext(path)[1][1:].lower()
|
||||||
stream = path if hasattr(path, 'read') else open(path, 'rb')
|
stream = path if hasattr(path, 'read') else open(path, 'rb')
|
||||||
@ -1133,21 +1137,21 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
stream.seek(0)
|
stream.seek(0)
|
||||||
mi.series_index = 1
|
mi.series_index = 1
|
||||||
mi.tags = [_('News'), recipe.title]
|
mi.tags = [_('News'), recipe.title]
|
||||||
obj = self.conn.execute('INSERT INTO books(title, author_sort) VALUES (?, ?)',
|
obj = self.conn.execute('INSERT INTO books(title, author_sort) VALUES (?, ?)',
|
||||||
(mi.title, mi.authors[0]))
|
(mi.title, mi.authors[0]))
|
||||||
id = obj.lastrowid
|
id = obj.lastrowid
|
||||||
self.data.books_added([id], self.conn)
|
self.data.books_added([id], self.conn)
|
||||||
self.set_path(id, index_is_id=True)
|
self.set_path(id, index_is_id=True)
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
self.set_metadata(id, mi)
|
self.set_metadata(id, mi)
|
||||||
|
|
||||||
self.add_format(id, format, stream, index_is_id=True)
|
self.add_format(id, format, stream, index_is_id=True)
|
||||||
if not hasattr(path, 'read'):
|
if not hasattr(path, 'read'):
|
||||||
stream.close()
|
stream.close()
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
self.data.refresh_ids(self.conn, [id]) # Needed to update format list and size
|
self.data.refresh_ids(self.conn, [id]) # Needed to update format list and size
|
||||||
return id
|
return id
|
||||||
|
|
||||||
def run_import_plugins(self, path_or_stream, format):
|
def run_import_plugins(self, path_or_stream, format):
|
||||||
format = format.lower()
|
format = format.lower()
|
||||||
if hasattr(path_or_stream, 'seek'):
|
if hasattr(path_or_stream, 'seek'):
|
||||||
@ -1159,7 +1163,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
else:
|
else:
|
||||||
path = path_or_stream
|
path = path_or_stream
|
||||||
return run_plugins_on_import(path, format)
|
return run_plugins_on_import(path, format)
|
||||||
|
|
||||||
def add_books(self, paths, formats, metadata, uris=[], add_duplicates=True):
|
def add_books(self, paths, formats, metadata, uris=[], add_duplicates=True):
|
||||||
'''
|
'''
|
||||||
Add a book to the database. The result cache is not updated.
|
Add a book to the database. The result cache is not updated.
|
||||||
@ -1185,7 +1189,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
aus = aus.decode(preferred_encoding, 'replace')
|
aus = aus.decode(preferred_encoding, 'replace')
|
||||||
if isinstance(title, str):
|
if isinstance(title, str):
|
||||||
title = title.decode(preferred_encoding)
|
title = title.decode(preferred_encoding)
|
||||||
obj = self.conn.execute('INSERT INTO books(title, uri, series_index, author_sort) VALUES (?, ?, ?, ?)',
|
obj = self.conn.execute('INSERT INTO books(title, uri, series_index, author_sort) VALUES (?, ?, ?, ?)',
|
||||||
(title, uri, series_index, aus))
|
(title, uri, series_index, aus))
|
||||||
id = obj.lastrowid
|
id = obj.lastrowid
|
||||||
self.data.books_added([id], self.conn)
|
self.data.books_added([id], self.conn)
|
||||||
@ -1207,7 +1211,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
uris = list(duplicate[3] for duplicate in duplicates)
|
uris = list(duplicate[3] for duplicate in duplicates)
|
||||||
return (paths, formats, metadata, uris), len(ids)
|
return (paths, formats, metadata, uris), len(ids)
|
||||||
return None, len(ids)
|
return None, len(ids)
|
||||||
|
|
||||||
def import_book(self, mi, formats, notify=True):
|
def import_book(self, mi, formats, notify=True):
|
||||||
series_index = 1 if mi.series_index is None else mi.series_index
|
series_index = 1 if mi.series_index is None else mi.series_index
|
||||||
if not mi.title:
|
if not mi.title:
|
||||||
@ -1219,7 +1223,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
aus = aus.decode(preferred_encoding, 'replace')
|
aus = aus.decode(preferred_encoding, 'replace')
|
||||||
title = mi.title if isinstance(mi.title, unicode) else \
|
title = mi.title if isinstance(mi.title, unicode) else \
|
||||||
mi.title.decode(preferred_encoding, 'replace')
|
mi.title.decode(preferred_encoding, 'replace')
|
||||||
obj = self.conn.execute('INSERT INTO books(title, uri, series_index, author_sort) VALUES (?, ?, ?, ?)',
|
obj = self.conn.execute('INSERT INTO books(title, uri, series_index, author_sort) VALUES (?, ?, ?, ?)',
|
||||||
(title, None, series_index, aus))
|
(title, None, series_index, aus))
|
||||||
id = obj.lastrowid
|
id = obj.lastrowid
|
||||||
self.data.books_added([id], self.conn)
|
self.data.books_added([id], self.conn)
|
||||||
@ -1234,7 +1238,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
self.data.refresh_ids(self.conn, [id]) # Needed to update format list and size
|
self.data.refresh_ids(self.conn, [id]) # Needed to update format list and size
|
||||||
if notify:
|
if notify:
|
||||||
self.notify('add', [id])
|
self.notify('add', [id])
|
||||||
|
|
||||||
def move_library_to(self, newloc, progress=None):
|
def move_library_to(self, newloc, progress=None):
|
||||||
header = _(u'<p>Copying books to %s<br><center>')%newloc
|
header = _(u'<p>Copying books to %s<br><center>')%newloc
|
||||||
books = self.conn.get('SELECT id, path, title FROM books')
|
books = self.conn.get('SELECT id, path, title FROM books')
|
||||||
@ -1263,7 +1267,7 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
old_dirs.add(srcdir)
|
old_dirs.add(srcdir)
|
||||||
if progress is not None:
|
if progress is not None:
|
||||||
progress.setValue(i+1)
|
progress.setValue(i+1)
|
||||||
|
|
||||||
dbpath = os.path.join(newloc, os.path.basename(self.dbpath))
|
dbpath = os.path.join(newloc, os.path.basename(self.dbpath))
|
||||||
shutil.copyfile(self.dbpath, dbpath)
|
shutil.copyfile(self.dbpath, dbpath)
|
||||||
opath = self.dbpath
|
opath = self.dbpath
|
||||||
@ -1279,22 +1283,22 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
if progress is not None:
|
if progress is not None:
|
||||||
progress.reset()
|
progress.reset()
|
||||||
progress.hide()
|
progress.hide()
|
||||||
|
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
for record in self.data._data:
|
for record in self.data._data:
|
||||||
if record is not None:
|
if record is not None:
|
||||||
yield record
|
yield record
|
||||||
|
|
||||||
def all_ids(self):
|
def all_ids(self):
|
||||||
for i in iter(self):
|
for i in iter(self):
|
||||||
yield i['id']
|
yield i['id']
|
||||||
|
|
||||||
def get_data_as_dict(self, prefix=None, authors_as_string=False):
|
def get_data_as_dict(self, prefix=None, authors_as_string=False):
|
||||||
'''
|
'''
|
||||||
Return all metadata stored in the database as a dict. Includes paths to
|
Return all metadata stored in the database as a dict. Includes paths to
|
||||||
the cover and each format.
|
the cover and each format.
|
||||||
|
|
||||||
:param prefix: The prefix for all paths. By default, the prefix is the absolute path
|
:param prefix: The prefix for all paths. By default, the prefix is the absolute path
|
||||||
to the library folder.
|
to the library folder.
|
||||||
'''
|
'''
|
||||||
@ -1325,9 +1329,9 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
x['formats'].append(path%fmt.lower())
|
x['formats'].append(path%fmt.lower())
|
||||||
x['fmt_'+fmt.lower()] = path%fmt.lower()
|
x['fmt_'+fmt.lower()] = path%fmt.lower()
|
||||||
x['available_formats'] = [i.upper() for i in formats.split(',')]
|
x['available_formats'] = [i.upper() for i in formats.split(',')]
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def migrate_old(self, db, progress):
|
def migrate_old(self, db, progress):
|
||||||
header = _(u'<p>Migrating old database to ebook library in %s<br><center>')%self.library_path
|
header = _(u'<p>Migrating old database to ebook library in %s<br><center>')%self.library_path
|
||||||
progress.setValue(0)
|
progress.setValue(0)
|
||||||
@ -1338,23 +1342,23 @@ class LibraryDatabase2(LibraryDatabase):
|
|||||||
books = db.conn.get('SELECT id, title, sort, timestamp, uri, series_index, author_sort, isbn FROM books ORDER BY id ASC')
|
books = db.conn.get('SELECT id, title, sort, timestamp, uri, series_index, author_sort, isbn FROM books ORDER BY id ASC')
|
||||||
progress.setAutoReset(False)
|
progress.setAutoReset(False)
|
||||||
progress.setRange(0, len(books))
|
progress.setRange(0, len(books))
|
||||||
|
|
||||||
for book in books:
|
for book in books:
|
||||||
self.conn.execute('INSERT INTO books(id, title, sort, timestamp, uri, series_index, author_sort, isbn) VALUES(?, ?, ?, ?, ?, ?, ?, ?);', book)
|
self.conn.execute('INSERT INTO books(id, title, sort, timestamp, uri, series_index, author_sort, isbn) VALUES(?, ?, ?, ?, ?, ?, ?, ?);', book)
|
||||||
|
|
||||||
tables = '''
|
tables = '''
|
||||||
authors ratings tags series books_tags_link
|
authors ratings tags series books_tags_link
|
||||||
comments publishers
|
comments publishers
|
||||||
books_authors_link conversion_options
|
books_authors_link conversion_options
|
||||||
books_publishers_link
|
books_publishers_link
|
||||||
books_ratings_link
|
books_ratings_link
|
||||||
books_series_link feeds
|
books_series_link feeds
|
||||||
'''.split()
|
'''.split()
|
||||||
for table in tables:
|
for table in tables:
|
||||||
rows = db.conn.get('SELECT * FROM %s ORDER BY id ASC'%table)
|
rows = db.conn.get('SELECT * FROM %s ORDER BY id ASC'%table)
|
||||||
for row in rows:
|
for row in rows:
|
||||||
self.conn.execute('INSERT INTO %s VALUES(%s)'%(table, ','.join(repeat('?', len(row)))), row)
|
self.conn.execute('INSERT INTO %s VALUES(%s)'%(table, ','.join(repeat('?', len(row)))), row)
|
||||||
|
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
self.refresh('timestamp', True)
|
self.refresh('timestamp', True)
|
||||||
for i, book in enumerate(books):
|
for i, book in enumerate(books):
|
||||||
@ -1379,7 +1383,7 @@ books_series_link feeds
|
|||||||
self.vacuum()
|
self.vacuum()
|
||||||
progress.reset()
|
progress.reset()
|
||||||
return len(books)
|
return len(books)
|
||||||
|
|
||||||
def export_to_dir(self, dir, indices, byauthor=False, single_dir=False,
|
def export_to_dir(self, dir, indices, byauthor=False, single_dir=False,
|
||||||
index_is_id=False, callback=None):
|
index_is_id=False, callback=None):
|
||||||
if not os.path.exists(dir):
|
if not os.path.exists(dir):
|
||||||
@ -1425,7 +1429,7 @@ books_series_link feeds
|
|||||||
opf = OPFCreator(base, mi)
|
opf = OPFCreator(base, mi)
|
||||||
opf.render(f)
|
opf.render(f)
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
fmts = self.formats(idx, index_is_id=index_is_id)
|
fmts = self.formats(idx, index_is_id=index_is_id)
|
||||||
if not fmts:
|
if not fmts:
|
||||||
fmts = ''
|
fmts = ''
|
||||||
@ -1449,7 +1453,7 @@ books_series_link feeds
|
|||||||
if not callback(count, mi.title):
|
if not callback(count, mi.title):
|
||||||
return
|
return
|
||||||
|
|
||||||
def export_single_format_to_dir(self, dir, indices, format,
|
def export_single_format_to_dir(self, dir, indices, format,
|
||||||
index_is_id=False, callback=None):
|
index_is_id=False, callback=None):
|
||||||
dir = os.path.abspath(dir)
|
dir = os.path.abspath(dir)
|
||||||
if not index_is_id:
|
if not index_is_id:
|
||||||
@ -1476,7 +1480,7 @@ books_series_link feeds
|
|||||||
f.write(data)
|
f.write(data)
|
||||||
f.seek(0)
|
f.seek(0)
|
||||||
try:
|
try:
|
||||||
set_metadata(f, self.get_metadata(id, index_is_id=True, get_cover=True),
|
set_metadata(f, self.get_metadata(id, index_is_id=True, get_cover=True),
|
||||||
stream_type=format.lower())
|
stream_type=format.lower())
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
@ -1485,7 +1489,7 @@ books_series_link feeds
|
|||||||
if not callback(count, title):
|
if not callback(count, title):
|
||||||
break
|
break
|
||||||
return failures
|
return failures
|
||||||
|
|
||||||
def find_books_in_directory(self, dirpath, single_book_per_directory):
|
def find_books_in_directory(self, dirpath, single_book_per_directory):
|
||||||
dirpath = os.path.abspath(dirpath)
|
dirpath = os.path.abspath(dirpath)
|
||||||
if single_book_per_directory:
|
if single_book_per_directory:
|
||||||
@ -1514,12 +1518,12 @@ books_series_link feeds
|
|||||||
ext = ext[1:].lower()
|
ext = ext[1:].lower()
|
||||||
if ext not in BOOK_EXTENSIONS:
|
if ext not in BOOK_EXTENSIONS:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
key = os.path.splitext(path)[0]
|
key = os.path.splitext(path)[0]
|
||||||
if not books.has_key(key):
|
if not books.has_key(key):
|
||||||
books[key] = []
|
books[key] = []
|
||||||
books[key].append(path)
|
books[key].append(path)
|
||||||
|
|
||||||
for formats in books.values():
|
for formats in books.values():
|
||||||
yield formats
|
yield formats
|
||||||
|
|
||||||
@ -1543,7 +1547,7 @@ books_series_link feeds
|
|||||||
formats = self.find_books_in_directory(dirpath, True)
|
formats = self.find_books_in_directory(dirpath, True)
|
||||||
if not formats:
|
if not formats:
|
||||||
return
|
return
|
||||||
|
|
||||||
mi = metadata_from_formats(formats)
|
mi = metadata_from_formats(formats)
|
||||||
if mi.title is None:
|
if mi.title is None:
|
||||||
return
|
return
|
||||||
@ -1552,7 +1556,7 @@ books_series_link feeds
|
|||||||
self.import_book(mi, formats)
|
self.import_book(mi, formats)
|
||||||
if callable(callback):
|
if callable(callback):
|
||||||
callback(mi.title)
|
callback(mi.title)
|
||||||
|
|
||||||
def recursive_import(self, root, single_book_per_directory=True, callback=None):
|
def recursive_import(self, root, single_book_per_directory=True, callback=None):
|
||||||
root = os.path.abspath(root)
|
root = os.path.abspath(root)
|
||||||
duplicates = []
|
duplicates = []
|
||||||
@ -1565,8 +1569,8 @@ books_series_link feeds
|
|||||||
if callable(callback):
|
if callable(callback):
|
||||||
if callback(''):
|
if callback(''):
|
||||||
break
|
break
|
||||||
|
|
||||||
return duplicates
|
return duplicates
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -128,7 +128,7 @@ def get_extra_content(site, sfeeds_ids, ctx):
|
|||||||
def get_posts_tags(object_list, sfeeds_obj, user_id, tag_name):
|
def get_posts_tags(object_list, sfeeds_obj, user_id, tag_name):
|
||||||
""" Adds a qtags property in every post object in a page.
|
""" Adds a qtags property in every post object in a page.
|
||||||
|
|
||||||
Use "qtags" instead of "tags" in templates to avoid innecesary DB hits.
|
Use "qtags" instead of "tags" in templates to avoid unnecessary DB hits.
|
||||||
"""
|
"""
|
||||||
tagd = {}
|
tagd = {}
|
||||||
user_obj = None
|
user_obj = None
|
||||||
|
Loading…
x
Reference in New Issue
Block a user