KG updates

This commit is contained in:
GRiker 2010-06-08 03:37:06 -06:00
commit eb97c9d963
18 changed files with 412 additions and 193 deletions

Binary file not shown.

After

Width:  |  Height:  |  Size: 739 B

View File

@ -16,7 +16,7 @@ class NYTimes(BasicNewsRecipe):
title = 'New York Times Top Stories' title = 'New York Times Top Stories'
__author__ = 'GRiker' __author__ = 'GRiker'
language = _('English') language = 'en'
description = 'Top Stories from the New York Times' description = 'Top Stories from the New York Times'
# List of sections typically included in Top Stories. Use a keyword from the # List of sections typically included in Top Stories. Use a keyword from the

View File

@ -1,9 +1,7 @@
#!/usr/bin/env python
__license__ = 'GPL v3' __license__ = 'GPL v3'
__copyright__ = '2009, Darko Miletic <darko.miletic at gmail.com>' __copyright__ = '2009-2010, Darko Miletic <darko.miletic at gmail.com>'
''' '''
rbc.org odb.org
''' '''
from calibre.web.feeds.news import BasicNewsRecipe from calibre.web.feeds.news import BasicNewsRecipe
@ -11,27 +9,29 @@ from calibre.web.feeds.news import BasicNewsRecipe
class OurDailyBread(BasicNewsRecipe): class OurDailyBread(BasicNewsRecipe):
title = 'Our Daily Bread' title = 'Our Daily Bread'
__author__ = 'Darko Miletic and Sujata Raman' __author__ = 'Darko Miletic and Sujata Raman'
description = 'Religion' description = "Our Daily Bread is a daily devotional from RBC Ministries which helps readers spend time each day in God's Word."
oldest_article = 15 oldest_article = 15
language = 'en' language = 'en'
lang = 'en'
max_articles_per_feed = 100 max_articles_per_feed = 100
no_stylesheets = True no_stylesheets = True
use_embedded_content = False use_embedded_content = False
category = 'religion' category = 'ODB, Daily Devotional, Bible, Christian Devotional, Devotional, RBC Ministries, Our Daily Bread, Devotionals, Daily Devotionals, Christian Devotionals, Faith, Bible Study, Bible Studies, Scripture, RBC, religion'
encoding = 'utf-8' encoding = 'utf-8'
conversion_options = { conversion_options = {
'comments' : description 'comments' : description
,'tags' : category ,'tags' : category
,'language' : 'en' ,'language' : language
,'linearize_tables' : True
} }
keep_only_tags = [dict(name='div', attrs={'class':['altbg','text']})] keep_only_tags = [dict(attrs={'class':'module-content'})]
remove_tags = [
dict(attrs={'id':'article-zoom'})
,dict(attrs={'class':'listen-now-box'})
]
remove_tags_after = dict(attrs={'class':'readable-area'})
remove_tags = [dict(name='div', attrs={'id':['ctl00_cphPrimary_pnlBookCover']}),
]
extra_css = ''' extra_css = '''
.text{font-family:Arial,Helvetica,sans-serif;font-size:x-small;} .text{font-family:Arial,Helvetica,sans-serif;font-size:x-small;}
.devotionalTitle{font-family:Arial,Helvetica,sans-serif; font-size:large; font-weight: bold;} .devotionalTitle{font-family:Arial,Helvetica,sans-serif; font-size:large; font-weight: bold;}
@ -40,14 +40,9 @@ class OurDailyBread(BasicNewsRecipe):
a{color:#000000;font-family:Arial,Helvetica,sans-serif; font-size:x-small;} a{color:#000000;font-family:Arial,Helvetica,sans-serif; font-size:x-small;}
''' '''
feeds = [(u'Our Daily Bread', u'http://www.rbc.org/rss.ashx?id=50398')] feeds = [(u'Our Daily Bread', u'http://odb.org/feed/')]
def preprocess_html(self, soup): def preprocess_html(self, soup):
soup.html['xml:lang'] = self.lang
soup.html['lang'] = self.lang
mtag = '<meta http-equiv="Content-Type" content="text/html; charset=' + self.encoding + '">'
soup.head.insert(0,mtag)
return self.adeify_images(soup) return self.adeify_images(soup)
def get_cover_url(self): def get_cover_url(self):
@ -61,3 +56,4 @@ class OurDailyBread(BasicNewsRecipe):
cover_url = a.img['src'] cover_url = a.img['src']
return cover_url return cover_url

View File

@ -14,7 +14,6 @@ from calibre.devices.interface import DevicePlugin
from calibre.ebooks.BeautifulSoup import BeautifulSoup from calibre.ebooks.BeautifulSoup import BeautifulSoup
from calibre.ebooks.metadata import MetaInformation from calibre.ebooks.metadata import MetaInformation
from calibre.library.server.utils import strftime from calibre.library.server.utils import strftime
from calibre.ptempfile import PersistentTemporaryFile
from calibre.utils.config import Config, config_dir from calibre.utils.config import Config, config_dir
from calibre.utils.date import parse_date from calibre.utils.date import parse_date
from calibre.utils.logging import Log from calibre.utils.logging import Log
@ -781,10 +780,12 @@ class ITUNES(DevicePlugin):
self._remove_from_iTunes(self.cached_books[path]) self._remove_from_iTunes(self.cached_books[path])
# Add to iTunes Library|Books # Add to iTunes Library|Books
if isinstance(file,PersistentTemporaryFile): fpath = file
added = self.iTunes.add(appscript.mactypes.File(file._name)) if getattr(file, 'orig_file_path', None) is not None:
else: fpath = file.orig_file_path
added = self.iTunes.add(appscript.mactypes.File(file)) elif getattr(file, 'name', None) is not None:
fpath = file.name
added = self.iTunes.add(appscript.mactypes.File(fpath))
thumb = None thumb = None
if metadata[i].cover: if metadata[i].cover:
@ -824,7 +825,7 @@ class ITUNES(DevicePlugin):
this_book.device_collections = [] this_book.device_collections = []
this_book.library_id = added this_book.library_id = added
this_book.path = path this_book.path = path
this_book.size = self._get_device_book_size(file, added.size()) this_book.size = self._get_device_book_size(fpath, added.size())
this_book.thumbnail = thumb this_book.thumbnail = thumb
this_book.iTunes_id = added this_book.iTunes_id = added
@ -932,14 +933,15 @@ class ITUNES(DevicePlugin):
self.log.info(" '%s' not in cached_books" % metadata[i].title) self.log.info(" '%s' not in cached_books" % metadata[i].title)
# Add to iTunes Library|Books # Add to iTunes Library|Books
if isinstance(file,PersistentTemporaryFile): fpath = file
op_status = lib_books.AddFile(file._name) if getattr(file, 'orig_file_path', None) is not None:
if DEBUG: fpath = file.orig_file_path
self.log.info("ITUNES.upload_books():\n iTunes adding '%s'" % file._name) elif getattr(file, 'name', None) is not None:
else: fpath = file.name
op_status = lib_books.AddFile(file)
if DEBUG: op_status = lib_books.AddFile(fpath)
self.log.info(" iTunes adding '%s'" % file) self.log.info("ITUNES.upload_books():\n iTunes adding '%s'"
% fpath)
if DEBUG: if DEBUG:
sys.stdout.write(" iTunes copying '%s' ..." % metadata[i].title) sys.stdout.write(" iTunes copying '%s' ..." % metadata[i].title)
@ -1509,7 +1511,7 @@ class ITUNES(DevicePlugin):
# Read the current storage path for iTunes media # Read the current storage path for iTunes media
cmd = "defaults read com.apple.itunes NSNavLastRootDirectory" cmd = "defaults read com.apple.itunes NSNavLastRootDirectory"
proc = subprocess.Popen( cmd, shell=True, cwd=os.curdir, stdout=subprocess.PIPE) proc = subprocess.Popen( cmd, shell=True, cwd=os.curdir, stdout=subprocess.PIPE)
retcode = proc.wait() proc.wait()
media_dir = os.path.abspath(proc.communicate()[0].strip()) media_dir = os.path.abspath(proc.communicate()[0].strip())
if os.path.exists(media_dir): if os.path.exists(media_dir):
self.iTunes_media = media_dir self.iTunes_media = media_dir

View File

@ -123,5 +123,12 @@ class BOOX(HANLINV3):
EBOOK_DIR_MAIN = 'MyBooks' EBOOK_DIR_MAIN = 'MyBooks'
EBOOK_DIR_CARD_A = 'MyBooks' EBOOK_DIR_CARD_A = 'MyBooks'
def windows_sort_drives(self, drives):
return drives
def osx_sort_names(self, names):
return names
def linux_swap_drives(self, drives):
return drives

View File

@ -287,7 +287,9 @@ class DevicePlugin(Plugin):
This method should raise a L{FreeSpaceError} if there is not enough This method should raise a L{FreeSpaceError} if there is not enough
free space on the device. The text of the FreeSpaceError must contain the free space on the device. The text of the FreeSpaceError must contain the
word "card" if C{on_card} is not None otherwise it must contain the word "memory". word "card" if C{on_card} is not None otherwise it must contain the word "memory".
:files: A list of paths and/or file-like objects. :files: A list of paths and/or file-like objects. If they are paths and
the paths point to temporary files, they may have an additional
attribute, original_file_path pointing to the originals.
:names: A list of file names that the books should have :names: A list of file names that the books should have
once uploaded to the device. len(names) == len(files) once uploaded to the device. len(names) == len(files)
:return: A list of 3-element tuples. The list is meant to be passed :return: A list of 3-element tuples. The list is meant to be passed

View File

@ -337,7 +337,7 @@ def main():
dev.touch(args[0]) dev.touch(args[0])
elif command == 'test_file': elif command == 'test_file':
parser = OptionParser(usage=("usage: %prog test_file path\n" parser = OptionParser(usage=("usage: %prog test_file path\n"
'Open device, copy file psecified by path to device and ' 'Open device, copy file specified by path to device and '
'then eject device.')) 'then eject device.'))
options, args = parser.parse_args(args) options, args = parser.parse_args(args)
if len(args) != 1: if len(args) != 1:

View File

@ -8,7 +8,7 @@ Device driver for the SONY devices
import os, time, re import os, time, re
from calibre.devices.usbms.driver import USBMS from calibre.devices.usbms.driver import USBMS, debug_print
from calibre.devices.prs505 import MEDIA_XML from calibre.devices.prs505 import MEDIA_XML
from calibre.devices.prs505 import CACHE_XML from calibre.devices.prs505 import CACHE_XML
from calibre.devices.prs505.sony_cache import XMLCache from calibre.devices.prs505.sony_cache import XMLCache
@ -128,12 +128,15 @@ class PRS505(USBMS):
return XMLCache(paths, prefixes) return XMLCache(paths, prefixes)
def books(self, oncard=None, end_session=True): def books(self, oncard=None, end_session=True):
debug_print('PRS505: starting fetching books for card', oncard)
bl = USBMS.books(self, oncard=oncard, end_session=end_session) bl = USBMS.books(self, oncard=oncard, end_session=end_session)
c = self.initialize_XML_cache() c = self.initialize_XML_cache()
c.update_booklist(bl, {'carda':1, 'cardb':2}.get(oncard, 0)) c.update_booklist(bl, {'carda':1, 'cardb':2}.get(oncard, 0))
debug_print('PRS505: finished fetching books for card', oncard)
return bl return bl
def sync_booklists(self, booklists, end_session=True): def sync_booklists(self, booklists, end_session=True):
debug_print('PRS505: started sync_booklists')
c = self.initialize_XML_cache() c = self.initialize_XML_cache()
blists = {} blists = {}
for i in c.paths: for i in c.paths:
@ -144,10 +147,11 @@ class PRS505(USBMS):
if opts.extra_customization: if opts.extra_customization:
collections = [x.strip() for x in collections = [x.strip() for x in
opts.extra_customization.split(',')] opts.extra_customization.split(',')]
debug_print('PRS505: collection fields:', collections)
c.update(blists, collections) c.update(blists, collections)
c.write() c.write()
USBMS.sync_booklists(self, booklists, end_session=end_session) USBMS.sync_booklists(self, booklists, end_session=end_session)
debug_print('PRS505: finished sync_booklists')

View File

@ -14,6 +14,7 @@ from lxml import etree
from calibre import prints, guess_type from calibre import prints, guess_type
from calibre.devices.errors import DeviceError from calibre.devices.errors import DeviceError
from calibre.devices.usbms.driver import debug_print
from calibre.constants import DEBUG from calibre.constants import DEBUG
from calibre.ebooks.chardet import xml_to_unicode from calibre.ebooks.chardet import xml_to_unicode
from calibre.ebooks.metadata import authors_to_string, title_sort from calibre.ebooks.metadata import authors_to_string, title_sort
@ -61,7 +62,7 @@ class XMLCache(object):
def __init__(self, paths, prefixes): def __init__(self, paths, prefixes):
if DEBUG: if DEBUG:
prints('Building XMLCache...') debug_print('Building XMLCache...')
pprint(paths) pprint(paths)
self.paths = paths self.paths = paths
self.prefixes = prefixes self.prefixes = prefixes
@ -97,16 +98,17 @@ class XMLCache(object):
self.record_roots[0] = recs[0] self.record_roots[0] = recs[0]
self.detect_namespaces() self.detect_namespaces()
debug_print('Done building XMLCache...')
# Playlist management {{{ # Playlist management {{{
def purge_broken_playlist_items(self, root): def purge_broken_playlist_items(self, root):
id_map = self.build_id_map(root)
for pl in root.xpath('//*[local-name()="playlist"]'): for pl in root.xpath('//*[local-name()="playlist"]'):
seen = set([]) seen = set([])
for item in list(pl): for item in list(pl):
id_ = item.get('id', None) id_ = item.get('id', None)
if id_ is None or id_ in seen or not root.xpath( if id_ is None or id_ in seen or id_map.get(id_, None) is None:
'//*[local-name()!="item" and @id="%s"]'%id_):
if DEBUG: if DEBUG:
if id_ is None: if id_ is None:
cause = 'invalid id' cause = 'invalid id'
@ -127,7 +129,7 @@ class XMLCache(object):
for playlist in root.xpath('//*[local-name()="playlist"]'): for playlist in root.xpath('//*[local-name()="playlist"]'):
if len(playlist) == 0 or not playlist.get('title', None): if len(playlist) == 0 or not playlist.get('title', None):
if DEBUG: if DEBUG:
prints('Removing playlist id:', playlist.get('id', None), debug_print('Removing playlist id:', playlist.get('id', None),
playlist.get('title', None)) playlist.get('title', None))
playlist.getparent().remove(playlist) playlist.getparent().remove(playlist)
@ -149,20 +151,25 @@ class XMLCache(object):
seen.add(title) seen.add(title)
def get_playlist_map(self): def get_playlist_map(self):
debug_print('Start get_playlist_map')
ans = {} ans = {}
self.ensure_unique_playlist_titles() self.ensure_unique_playlist_titles()
debug_print('after ensure_unique_playlist_titles')
self.prune_empty_playlists() self.prune_empty_playlists()
debug_print('get_playlist_map loop')
for i, root in self.record_roots.items(): for i, root in self.record_roots.items():
debug_print('get_playlist_map loop', i)
id_map = self.build_id_map(root)
ans[i] = [] ans[i] = []
for playlist in root.xpath('//*[local-name()="playlist"]'): for playlist in root.xpath('//*[local-name()="playlist"]'):
items = [] items = []
for item in playlist: for item in playlist:
id_ = item.get('id', None) id_ = item.get('id', None)
records = root.xpath( record = id_map.get(id_, None)
'//*[local-name()="text" and @id="%s"]'%id_) if record is not None:
if records: items.append(record)
items.append(records[0])
ans[i].append((playlist.get('title'), items)) ans[i].append((playlist.get('title'), items))
debug_print('end get_playlist_map')
return ans return ans
def get_or_create_playlist(self, bl_idx, title): def get_or_create_playlist(self, bl_idx, title):
@ -171,7 +178,7 @@ class XMLCache(object):
if playlist.get('title', None) == title: if playlist.get('title', None) == title:
return playlist return playlist
if DEBUG: if DEBUG:
prints('Creating playlist:', title) debug_print('Creating playlist:', title)
ans = root.makeelement('{%s}playlist'%self.namespaces[bl_idx], ans = root.makeelement('{%s}playlist'%self.namespaces[bl_idx],
nsmap=root.nsmap, attrib={ nsmap=root.nsmap, attrib={
'uuid' : uuid(), 'uuid' : uuid(),
@ -185,7 +192,7 @@ class XMLCache(object):
def fix_ids(self): # {{{ def fix_ids(self): # {{{
if DEBUG: if DEBUG:
prints('Running fix_ids()') debug_print('Running fix_ids()')
def ensure_numeric_ids(root): def ensure_numeric_ids(root):
idmap = {} idmap = {}
@ -198,8 +205,8 @@ class XMLCache(object):
idmap[id_] = '-1' idmap[id_] = '-1'
if DEBUG and idmap: if DEBUG and idmap:
prints('Found non numeric ids:') debug_print('Found non numeric ids:')
prints(list(idmap.keys())) debug_print(list(idmap.keys()))
return idmap return idmap
def remap_playlist_references(root, idmap): def remap_playlist_references(root, idmap):
@ -210,7 +217,7 @@ class XMLCache(object):
if id_ in idmap: if id_ in idmap:
item.set('id', idmap[id_]) item.set('id', idmap[id_])
if DEBUG: if DEBUG:
prints('Remapping id %s to %s'%(id_, idmap[id_])) debug_print('Remapping id %s to %s'%(id_, idmap[id_]))
def ensure_media_xml_base_ids(root): def ensure_media_xml_base_ids(root):
for num, tag in enumerate(('library', 'watchSpecial')): for num, tag in enumerate(('library', 'watchSpecial')):
@ -260,6 +267,8 @@ class XMLCache(object):
last_bl = max(self.roots.keys()) last_bl = max(self.roots.keys())
max_id = self.max_id(self.roots[last_bl]) max_id = self.max_id(self.roots[last_bl])
self.roots[0].set('nextID', str(max_id+1)) self.roots[0].set('nextID', str(max_id+1))
debug_print('Finished running fix_ids()')
# }}} # }}}
# Update JSON from XML {{{ # Update JSON from XML {{{
@ -267,7 +276,7 @@ class XMLCache(object):
if bl_index not in self.record_roots: if bl_index not in self.record_roots:
return return
if DEBUG: if DEBUG:
prints('Updating JSON cache:', bl_index) debug_print('Updating JSON cache:', bl_index)
root = self.record_roots[bl_index] root = self.record_roots[bl_index]
pmap = self.get_playlist_map()[bl_index] pmap = self.get_playlist_map()[bl_index]
playlist_map = {} playlist_map = {}
@ -279,13 +288,14 @@ class XMLCache(object):
playlist_map[path] = [] playlist_map[path] = []
playlist_map[path].append(title) playlist_map[path].append(title)
lpath_map = self.build_lpath_map(root)
for book in bl: for book in bl:
record = self.book_by_lpath(book.lpath, root) record = lpath_map.get(book.lpath, None)
if record is not None: if record is not None:
title = record.get('title', None) title = record.get('title', None)
if title is not None and title != book.title: if title is not None and title != book.title:
if DEBUG: if DEBUG:
prints('Renaming title', book.title, 'to', title) debug_print('Renaming title', book.title, 'to', title)
book.title = title book.title = title
# We shouldn't do this for Sonys, because the reader strips # We shouldn't do this for Sonys, because the reader strips
# all but the first author. # all but the first author.
@ -310,20 +320,24 @@ class XMLCache(object):
if book.lpath in playlist_map: if book.lpath in playlist_map:
tags = playlist_map[book.lpath] tags = playlist_map[book.lpath]
book.device_collections = tags book.device_collections = tags
debug_print('Finished updating JSON cache:', bl_index)
# }}} # }}}
# Update XML from JSON {{{ # Update XML from JSON {{{
def update(self, booklists, collections_attributes): def update(self, booklists, collections_attributes):
debug_print('Starting update XML from JSON')
playlist_map = self.get_playlist_map() playlist_map = self.get_playlist_map()
for i, booklist in booklists.items(): for i, booklist in booklists.items():
if DEBUG: if DEBUG:
prints('Updating XML Cache:', i) debug_print('Updating XML Cache:', i)
root = self.record_roots[i] root = self.record_roots[i]
lpath_map = self.build_lpath_map(root)
for book in booklist: for book in booklist:
path = os.path.join(self.prefixes[i], *(book.lpath.split('/'))) path = os.path.join(self.prefixes[i], *(book.lpath.split('/')))
record = self.book_by_lpath(book.lpath, root) # record = self.book_by_lpath(book.lpath, root)
record = lpath_map.get(book.lpath, None)
if record is None: if record is None:
record = self.create_text_record(root, i, book.lpath) record = self.create_text_record(root, i, book.lpath)
self.update_text_record(record, book, path, i) self.update_text_record(record, book, path, i)
@ -337,16 +351,19 @@ class XMLCache(object):
# This is needed to update device_collections # This is needed to update device_collections
for i, booklist in booklists.items(): for i, booklist in booklists.items():
self.update_booklist(booklist, i) self.update_booklist(booklist, i)
debug_print('Finished update XML from JSON')
def update_playlists(self, bl_index, root, booklist, playlist_map, def update_playlists(self, bl_index, root, booklist, playlist_map,
collections_attributes): collections_attributes):
debug_print('Starting update_playlists')
collections = booklist.get_collections(collections_attributes) collections = booklist.get_collections(collections_attributes)
lpath_map = self.build_lpath_map(root)
for category, books in collections.items(): for category, books in collections.items():
records = [self.book_by_lpath(b.lpath, root) for b in books] records = [lpath_map.get(b.lpath, None) for b in books]
# Remove any books that were not found, although this # Remove any books that were not found, although this
# *should* never happen # *should* never happen
if DEBUG and None in records: if DEBUG and None in records:
prints('WARNING: Some elements in the JSON cache were not' debug_print('WARNING: Some elements in the JSON cache were not'
' found in the XML cache') ' found in the XML cache')
records = [x for x in records if x is not None] records = [x for x in records if x is not None]
for rec in records: for rec in records:
@ -355,7 +372,7 @@ class XMLCache(object):
ids = [x.get('id', None) for x in records] ids = [x.get('id', None) for x in records]
if None in ids: if None in ids:
if DEBUG: if DEBUG:
prints('WARNING: Some <text> elements do not have ids') debug_print('WARNING: Some <text> elements do not have ids')
ids = [x for x in ids if x is not None] ids = [x for x in ids if x is not None]
playlist = self.get_or_create_playlist(bl_index, category) playlist = self.get_or_create_playlist(bl_index, category)
@ -379,20 +396,21 @@ class XMLCache(object):
title = playlist.get('title', None) title = playlist.get('title', None)
if title not in collections: if title not in collections:
if DEBUG: if DEBUG:
prints('Deleting playlist:', playlist.get('title', '')) debug_print('Deleting playlist:', playlist.get('title', ''))
playlist.getparent().remove(playlist) playlist.getparent().remove(playlist)
continue continue
books = collections[title] books = collections[title]
records = [self.book_by_lpath(b.lpath, root) for b in books] records = [lpath_map.get(b.lpath, None) for b in books]
records = [x for x in records if x is not None] records = [x for x in records if x is not None]
ids = [x.get('id', None) for x in records] ids = [x.get('id', None) for x in records]
ids = [x for x in ids if x is not None] ids = [x for x in ids if x is not None]
for item in list(playlist): for item in list(playlist):
if item.get('id', None) not in ids: if item.get('id', None) not in ids:
if DEBUG: if DEBUG:
prints('Deleting item:', item.get('id', ''), debug_print('Deleting item:', item.get('id', ''),
'from playlist:', playlist.get('title', '')) 'from playlist:', playlist.get('title', ''))
playlist.remove(item) playlist.remove(item)
debug_print('Finishing update_playlists')
def create_text_record(self, root, bl_id, lpath): def create_text_record(self, root, bl_id, lpath):
namespace = self.namespaces[bl_id] namespace = self.namespaces[bl_id]
@ -408,11 +426,6 @@ class XMLCache(object):
timestamp = os.path.getctime(path) timestamp = os.path.getctime(path)
date = strftime(timestamp) date = strftime(timestamp)
if date != record.get('date', None): if date != record.get('date', None):
if DEBUG:
prints('Changing date of', path, 'from',
record.get('date', ''), 'to', date)
prints('\tctime', strftime(os.path.getctime(path)))
prints('\tmtime', strftime(os.path.getmtime(path)))
record.set('date', date) record.set('date', date)
record.set('size', str(os.stat(path).st_size)) record.set('size', str(os.stat(path).st_size))
title = book.title if book.title else _('Unknown') title = book.title if book.title else _('Unknown')
@ -475,12 +488,24 @@ class XMLCache(object):
# }}} # }}}
# Utility methods {{{ # Utility methods {{{
def build_lpath_map(self, root):
m = {}
for bk in root.xpath('//*[local-name()="text"]'):
m[bk.get('path')] = bk
return m
def build_id_map(self, root):
m = {}
for bk in root.xpath('//*[local-name()="text"]'):
m[bk.get('id')] = bk
return m
def book_by_lpath(self, lpath, root): def book_by_lpath(self, lpath, root):
matches = root.xpath(u'//*[local-name()="text" and @path="%s"]'%lpath) matches = root.xpath(u'//*[local-name()="text" and @path="%s"]'%lpath)
if matches: if matches:
return matches[0] return matches[0]
def max_id(self, root): def max_id(self, root):
ans = -1 ans = -1
for x in root.xpath('//*[@id]'): for x in root.xpath('//*[@id]'):
@ -515,10 +540,10 @@ class XMLCache(object):
break break
self.namespaces[i] = ns self.namespaces[i] = ns
if DEBUG: # if DEBUG:
prints('Found nsmaps:') # debug_print('Found nsmaps:')
pprint(self.nsmaps) # pprint(self.nsmaps)
prints('Found namespaces:') # debug_print('Found namespaces:')
pprint(self.namespaces) # pprint(self.namespaces)
# }}} # }}}

View File

@ -46,7 +46,8 @@ class Book(MetaInformation):
self.smart_update(other) self.smart_update(other)
def __eq__(self, other): def __eq__(self, other):
return self.path == getattr(other, 'path', None) # use lpath because the prefix can change, changing path
return self.path == getattr(other, 'lpath', None)
@dynamic_property @dynamic_property
def db_id(self): def db_id(self):
@ -97,13 +98,24 @@ class Book(MetaInformation):
class BookList(_BookList): class BookList(_BookList):
def __init__(self, oncard, prefix, settings):
_BookList.__init__(self, oncard, prefix, settings)
self._bookmap = {}
def supports_collections(self): def supports_collections(self):
return False return False
def add_book(self, book, replace_metadata): def add_book(self, book, replace_metadata):
if book not in self: try:
b = self.index(book)
except (ValueError, IndexError):
b = None
if b is None:
self.append(book) self.append(book)
return True return True
if replace_metadata:
self[b].smart_update(book)
return True
return False return False
def remove_book(self, book): def remove_book(self, book):
@ -112,7 +124,6 @@ class BookList(_BookList):
def get_collections(self): def get_collections(self):
return {} return {}
class CollectionsBookList(BookList): class CollectionsBookList(BookList):
def supports_collections(self): def supports_collections(self):

View File

@ -765,12 +765,8 @@ class Device(DeviceConfig, DevicePlugin):
path = existing[0] path = existing[0]
def get_size(obj): def get_size(obj):
if hasattr(obj, 'seek'): path = getattr(obj, 'name', obj)
obj.seek(0, os.SEEK_END) return os.path.getsize(path)
size = obj.tell()
obj.seek(0)
return size
return os.path.getsize(obj)
sizes = [get_size(f) for f in files] sizes = [get_size(f) for f in files]
size = sum(sizes) size = sum(sizes)

View File

@ -12,15 +12,24 @@ for a particular device.
import os import os
import re import re
import time
import json import json
from itertools import cycle from itertools import cycle
from calibre import prints, isbytestring from calibre import prints, isbytestring
from calibre.constants import filesystem_encoding from calibre.constants import filesystem_encoding, DEBUG
from calibre.devices.usbms.cli import CLI from calibre.devices.usbms.cli import CLI
from calibre.devices.usbms.device import Device from calibre.devices.usbms.device import Device
from calibre.devices.usbms.books import BookList, Book from calibre.devices.usbms.books import BookList, Book
BASE_TIME = None
def debug_print(*args):
global BASE_TIME
if BASE_TIME is None:
BASE_TIME = time.time()
if DEBUG:
prints('DEBUG: %6.1f'%(time.time()-BASE_TIME), *args)
# CLI must come before Device as it implements the CLI functions that # CLI must come before Device as it implements the CLI functions that
# are inherited from the device interface in Device. # are inherited from the device interface in Device.
class USBMS(CLI, Device): class USBMS(CLI, Device):
@ -47,6 +56,8 @@ class USBMS(CLI, Device):
def books(self, oncard=None, end_session=True): def books(self, oncard=None, end_session=True):
from calibre.ebooks.metadata.meta import path_to_ext from calibre.ebooks.metadata.meta import path_to_ext
debug_print ('USBMS: Fetching list of books from device. oncard=', oncard)
dummy_bl = BookList(None, None, None) dummy_bl = BookList(None, None, None)
if oncard == 'carda' and not self._card_a_prefix: if oncard == 'carda' and not self._card_a_prefix:
@ -136,8 +147,8 @@ class USBMS(CLI, Device):
need_sync = True need_sync = True
del bl[idx] del bl[idx]
#print "count found in cache: %d, count of files in metadata: %d, need_sync: %s" % \ debug_print('USBMS: count found in cache: %d, count of files in metadata: %d, need_sync: %s' % \
# (len(bl_cache), len(bl), need_sync) (len(bl_cache), len(bl), need_sync))
if need_sync: #self.count_found_in_bl != len(bl) or need_sync: if need_sync: #self.count_found_in_bl != len(bl) or need_sync:
if oncard == 'cardb': if oncard == 'cardb':
self.sync_booklists((None, None, bl)) self.sync_booklists((None, None, bl))
@ -147,10 +158,13 @@ class USBMS(CLI, Device):
self.sync_booklists((bl, None, None)) self.sync_booklists((bl, None, None))
self.report_progress(1.0, _('Getting list of books on device...')) self.report_progress(1.0, _('Getting list of books on device...'))
debug_print('USBMS: Finished fetching list of books from device. oncard=', oncard)
return bl return bl
def upload_books(self, files, names, on_card=None, end_session=True, def upload_books(self, files, names, on_card=None, end_session=True,
metadata=None): metadata=None):
debug_print('USBMS: uploading %d books'%(len(files)))
path = self._sanity_check(on_card, files) path = self._sanity_check(on_card, files)
paths = [] paths = []
@ -174,6 +188,7 @@ class USBMS(CLI, Device):
self.report_progress((i+1) / float(len(files)), _('Transferring books to device...')) self.report_progress((i+1) / float(len(files)), _('Transferring books to device...'))
self.report_progress(1.0, _('Transferring books to device...')) self.report_progress(1.0, _('Transferring books to device...'))
debug_print('USBMS: finished uploading %d books'%(len(files)))
return zip(paths, cycle([on_card])) return zip(paths, cycle([on_card]))
def upload_cover(self, path, filename, metadata): def upload_cover(self, path, filename, metadata):
@ -186,6 +201,8 @@ class USBMS(CLI, Device):
pass pass
def add_books_to_metadata(self, locations, metadata, booklists): def add_books_to_metadata(self, locations, metadata, booklists):
debug_print('USBMS: adding metadata for %d books'%(len(metadata)))
metadata = iter(metadata) metadata = iter(metadata)
for i, location in enumerate(locations): for i, location in enumerate(locations):
self.report_progress((i+1) / float(len(locations)), _('Adding books to device metadata listing...')) self.report_progress((i+1) / float(len(locations)), _('Adding books to device metadata listing...'))
@ -218,8 +235,10 @@ class USBMS(CLI, Device):
book.size = os.stat(self.normalize_path(path)).st_size book.size = os.stat(self.normalize_path(path)).st_size
booklists[blist].add_book(book, replace_metadata=True) booklists[blist].add_book(book, replace_metadata=True)
self.report_progress(1.0, _('Adding books to device metadata listing...')) self.report_progress(1.0, _('Adding books to device metadata listing...'))
debug_print('USBMS: finished adding metadata')
def delete_books(self, paths, end_session=True): def delete_books(self, paths, end_session=True):
debug_print('USBMS: deleting %d books'%(len(paths)))
for i, path in enumerate(paths): for i, path in enumerate(paths):
self.report_progress((i+1) / float(len(paths)), _('Removing books from device...')) self.report_progress((i+1) / float(len(paths)), _('Removing books from device...'))
path = self.normalize_path(path) path = self.normalize_path(path)
@ -240,8 +259,11 @@ class USBMS(CLI, Device):
except: except:
pass pass
self.report_progress(1.0, _('Removing books from device...')) self.report_progress(1.0, _('Removing books from device...'))
debug_print('USBMS: finished deleting %d books'%(len(paths)))
def remove_books_from_metadata(self, paths, booklists): def remove_books_from_metadata(self, paths, booklists):
debug_print('USBMS: removing metadata for %d books'%(len(paths)))
for i, path in enumerate(paths): for i, path in enumerate(paths):
self.report_progress((i+1) / float(len(paths)), _('Removing books from device metadata listing...')) self.report_progress((i+1) / float(len(paths)), _('Removing books from device metadata listing...'))
for bl in booklists: for bl in booklists:
@ -249,8 +271,11 @@ class USBMS(CLI, Device):
if path.endswith(book.path): if path.endswith(book.path):
bl.remove_book(book) bl.remove_book(book)
self.report_progress(1.0, _('Removing books from device metadata listing...')) self.report_progress(1.0, _('Removing books from device metadata listing...'))
debug_print('USBMS: finished removing metadata for %d books'%(len(paths)))
def sync_booklists(self, booklists, end_session=True): def sync_booklists(self, booklists, end_session=True):
debug_print('USBMS: starting sync_booklists')
if not os.path.exists(self.normalize_path(self._main_prefix)): if not os.path.exists(self.normalize_path(self._main_prefix)):
os.makedirs(self.normalize_path(self._main_prefix)) os.makedirs(self.normalize_path(self._main_prefix))
@ -267,6 +292,7 @@ class USBMS(CLI, Device):
write_prefix(self._card_b_prefix, 2) write_prefix(self._card_b_prefix, 2)
self.report_progress(1.0, _('Sending metadata to device...')) self.report_progress(1.0, _('Sending metadata to device...'))
debug_print('USBMS: finished sync_booklists')
@classmethod @classmethod
def path_to_unicode(cls, path): def path_to_unicode(cls, path):

View File

@ -1,6 +1,8 @@
from __future__ import with_statement from __future__ import with_statement
__license__ = 'GPL v3' __license__ = 'GPL v3'
__copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>' __copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
# Imports {{{
import os, traceback, Queue, time, socket, cStringIO, re import os, traceback, Queue, time, socket, cStringIO, re
from threading import Thread, RLock from threading import Thread, RLock
from itertools import repeat from itertools import repeat
@ -27,7 +29,9 @@ from calibre.utils.smtp import compose_mail, sendmail, extract_email_address, \
config as email_config config as email_config
from calibre.devices.folder_device.driver import FOLDER_DEVICE from calibre.devices.folder_device.driver import FOLDER_DEVICE
class DeviceJob(BaseJob): # }}}
class DeviceJob(BaseJob): # {{{
def __init__(self, func, done, job_manager, args=[], kwargs={}, def __init__(self, func, done, job_manager, args=[], kwargs={},
description=''): description=''):
@ -78,8 +82,9 @@ class DeviceJob(BaseJob):
def log_file(self): def log_file(self):
return cStringIO.StringIO(self._details.encode('utf-8')) return cStringIO.StringIO(self._details.encode('utf-8'))
# }}}
class DeviceManager(Thread): class DeviceManager(Thread): # {{{
def __init__(self, connected_slot, job_manager, open_feedback_slot, sleep_time=2): def __init__(self, connected_slot, job_manager, open_feedback_slot, sleep_time=2):
''' '''
@ -122,7 +127,7 @@ class DeviceManager(Thread):
try: try:
dev.open() dev.open()
except: except:
print 'Unable to open device', dev prints('Unable to open device', str(dev))
traceback.print_exc() traceback.print_exc()
continue continue
self.connected_device = dev self.connected_device = dev
@ -168,11 +173,11 @@ class DeviceManager(Thread):
if possibly_connected_devices: if possibly_connected_devices:
if not self.do_connect(possibly_connected_devices, if not self.do_connect(possibly_connected_devices,
is_folder_device=False): is_folder_device=False):
print 'Connect to device failed, retrying in 5 seconds...' prints('Connect to device failed, retrying in 5 seconds...')
time.sleep(5) time.sleep(5)
if not self.do_connect(possibly_connected_devices, if not self.do_connect(possibly_connected_devices,
is_folder_device=False): is_folder_device=False):
print 'Device connect failed again, giving up' prints('Device connect failed again, giving up')
def umount_device(self, *args): def umount_device(self, *args):
if self.is_device_connected and not self.job_manager.has_device_jobs(): if self.is_device_connected and not self.job_manager.has_device_jobs():
@ -317,7 +322,7 @@ class DeviceManager(Thread):
def _save_books(self, paths, target): def _save_books(self, paths, target):
'''Copy books from device to disk''' '''Copy books from device to disk'''
for path in paths: for path in paths:
name = path.rpartition(getattr(self.device, 'path_sep', '/'))[2] name = path.rpartition(os.sep)[2]
dest = os.path.join(target, name) dest = os.path.join(target, name)
if os.path.abspath(dest) != os.path.abspath(path): if os.path.abspath(dest) != os.path.abspath(path):
f = open(dest, 'wb') f = open(dest, 'wb')
@ -338,8 +343,9 @@ class DeviceManager(Thread):
return self.create_job(self._view_book, done, args=[path, target], return self.create_job(self._view_book, done, args=[path, target],
description=_('View book on device')) description=_('View book on device'))
# }}}
class DeviceAction(QAction): class DeviceAction(QAction): # {{{
a_s = pyqtSignal(object) a_s = pyqtSignal(object)
@ -356,9 +362,9 @@ class DeviceAction(QAction):
def __repr__(self): def __repr__(self):
return self.__class__.__name__ + ':%s:%s:%s'%(self.dest, self.delete, return self.__class__.__name__ + ':%s:%s:%s'%(self.dest, self.delete,
self.specific) self.specific)
# }}}
class DeviceMenu(QMenu): # {{{
class DeviceMenu(QMenu):
fetch_annotations = pyqtSignal() fetch_annotations = pyqtSignal()
connect_to_folder = pyqtSignal() connect_to_folder = pyqtSignal()
@ -532,8 +538,9 @@ class DeviceMenu(QMenu):
annot_enable = enable and getattr(device, 'SUPPORTS_ANNOTATIONS', False) annot_enable = enable and getattr(device, 'SUPPORTS_ANNOTATIONS', False)
self.annotation_action.setEnabled(annot_enable) self.annotation_action.setEnabled(annot_enable)
# }}}
class Emailer(Thread): class Emailer(Thread): # {{{
def __init__(self, timeout=60): def __init__(self, timeout=60):
Thread.__init__(self) Thread.__init__(self)
@ -590,6 +597,7 @@ class Emailer(Thread):
results.append([jobname, e, traceback.format_exc()]) results.append([jobname, e, traceback.format_exc()])
callback(results) callback(results)
# }}}
class DeviceGUI(object): class DeviceGUI(object):
@ -637,7 +645,7 @@ class DeviceGUI(object):
if not ids or len(ids) == 0: if not ids or len(ids) == 0:
return return
files, _auto_ids = self.library_view.model().get_preferred_formats_from_ids(ids, files, _auto_ids = self.library_view.model().get_preferred_formats_from_ids(ids,
fmts, paths=True, set_metadata=True, fmts, set_metadata=True,
specific_format=specific_format, specific_format=specific_format,
exclude_auto=do_auto_convert) exclude_auto=do_auto_convert)
if do_auto_convert: if do_auto_convert:
@ -647,7 +655,6 @@ class DeviceGUI(object):
_auto_ids = [] _auto_ids = []
full_metadata = self.library_view.model().metadata_for(ids) full_metadata = self.library_view.model().metadata_for(ids)
files = [getattr(f, 'name', None) for f in files]
bad, remove_ids, jobnames = [], [], [] bad, remove_ids, jobnames = [], [], []
texts, subjects, attachments, attachment_names = [], [], [], [] texts, subjects, attachments, attachment_names = [], [], [], []
@ -760,7 +767,7 @@ class DeviceGUI(object):
for account, fmts in accounts: for account, fmts in accounts:
files, auto = self.library_view.model().\ files, auto = self.library_view.model().\
get_preferred_formats_from_ids([id], fmts) get_preferred_formats_from_ids([id], fmts)
files = [f.name for f in files if f is not None] files = [f for f in files if f is not None]
if not files: if not files:
continue continue
attachment = files[0] attachment = files[0]
@ -824,7 +831,7 @@ class DeviceGUI(object):
prefix = prefix.decode(preferred_encoding, 'replace') prefix = prefix.decode(preferred_encoding, 'replace')
prefix = ascii_filename(prefix) prefix = ascii_filename(prefix)
names.append('%s_%d%s'%(prefix, id, names.append('%s_%d%s'%(prefix, id,
os.path.splitext(f.name)[1])) os.path.splitext(f)[1]))
if mi.cover and os.access(mi.cover, os.R_OK): if mi.cover and os.access(mi.cover, os.R_OK):
mi.thumbnail = self.cover_to_thumbnail(open(mi.cover, mi.thumbnail = self.cover_to_thumbnail(open(mi.cover,
'rb').read()) 'rb').read())
@ -837,7 +844,7 @@ class DeviceGUI(object):
on_card = space.get(sorted(space.keys(), reverse=True)[0], None) on_card = space.get(sorted(space.keys(), reverse=True)[0], None)
self.upload_books(files, names, metadata, self.upload_books(files, names, metadata,
on_card=on_card, on_card=on_card,
memory=[[f.name for f in files], remove]) memory=[files, remove])
self.status_bar.showMessage(_('Sending catalogs to device.'), 5000) self.status_bar.showMessage(_('Sending catalogs to device.'), 5000)
@ -884,7 +891,7 @@ class DeviceGUI(object):
prefix = prefix.decode(preferred_encoding, 'replace') prefix = prefix.decode(preferred_encoding, 'replace')
prefix = ascii_filename(prefix) prefix = ascii_filename(prefix)
names.append('%s_%d%s'%(prefix, id, names.append('%s_%d%s'%(prefix, id,
os.path.splitext(f.name)[1])) os.path.splitext(f)[1]))
if mi.cover and os.access(mi.cover, os.R_OK): if mi.cover and os.access(mi.cover, os.R_OK):
mi.thumbnail = self.cover_to_thumbnail(open(mi.cover, mi.thumbnail = self.cover_to_thumbnail(open(mi.cover,
'rb').read()) 'rb').read())
@ -898,7 +905,7 @@ class DeviceGUI(object):
on_card = space.get(sorted(space.keys(), reverse=True)[0], None) on_card = space.get(sorted(space.keys(), reverse=True)[0], None)
self.upload_books(files, names, metadata, self.upload_books(files, names, metadata,
on_card=on_card, on_card=on_card,
memory=[[f.name for f in files], remove]) memory=[files, remove])
self.status_bar.showMessage(_('Sending news to device.'), 5000) self.status_bar.showMessage(_('Sending news to device.'), 5000)
@ -914,7 +921,7 @@ class DeviceGUI(object):
_files, _auto_ids = self.library_view.model().get_preferred_formats_from_ids(ids, _files, _auto_ids = self.library_view.model().get_preferred_formats_from_ids(ids,
settings.format_map, settings.format_map,
paths=True, set_metadata=True, set_metadata=True,
specific_format=specific_format, specific_format=specific_format,
exclude_auto=do_auto_convert) exclude_auto=do_auto_convert)
if do_auto_convert: if do_auto_convert:
@ -930,9 +937,8 @@ class DeviceGUI(object):
mi.thumbnail = self.cover_to_thumbnail(open(mi.cover, 'rb').read()) mi.thumbnail = self.cover_to_thumbnail(open(mi.cover, 'rb').read())
imetadata = iter(metadata) imetadata = iter(metadata)
files = [getattr(f, 'name', None) for f in _files]
bad, good, gf, names, remove_ids = [], [], [], [], [] bad, good, gf, names, remove_ids = [], [], [], [], []
for f in files: for f in _files:
mi = imetadata.next() mi = imetadata.next()
id = ids.next() id = ids.next()
if f is None: if f is None:

View File

@ -21,7 +21,8 @@ from calibre.utils.date import dt_factory, qt_to_dt, isoformat
from calibre.ebooks.metadata.meta import set_metadata as _set_metadata from calibre.ebooks.metadata.meta import set_metadata as _set_metadata
from calibre.utils.search_query_parser import SearchQueryParser from calibre.utils.search_query_parser import SearchQueryParser
from calibre.library.caches import _match, CONTAINS_MATCH, EQUALS_MATCH, REGEXP_MATCH from calibre.library.caches import _match, CONTAINS_MATCH, EQUALS_MATCH, REGEXP_MATCH
from calibre import strftime from calibre import strftime, isbytestring
from calibre.constants import filesystem_encoding
from calibre.gui2.library import DEFAULT_SORT from calibre.gui2.library import DEFAULT_SORT
def human_readable(size, precision=1): def human_readable(size, precision=1):
@ -33,6 +34,13 @@ TIME_FMT = '%d %b %Y'
ALIGNMENT_MAP = {'left': Qt.AlignLeft, 'right': Qt.AlignRight, 'center': ALIGNMENT_MAP = {'left': Qt.AlignLeft, 'right': Qt.AlignRight, 'center':
Qt.AlignHCenter} Qt.AlignHCenter}
class FormatPath(unicode):
def __new__(cls, path, orig_file_path):
ans = unicode.__new__(cls, path)
ans.orig_file_path = orig_file_path
return ans
class BooksModel(QAbstractTableModel): # {{{ class BooksModel(QAbstractTableModel): # {{{
about_to_be_sorted = pyqtSignal(object, name='aboutToBeSorted') about_to_be_sorted = pyqtSignal(object, name='aboutToBeSorted')
@ -379,7 +387,7 @@ class BooksModel(QAbstractTableModel): # {{{
else: else:
return metadata return metadata
def get_preferred_formats_from_ids(self, ids, formats, paths=False, def get_preferred_formats_from_ids(self, ids, formats,
set_metadata=False, specific_format=None, set_metadata=False, specific_format=None,
exclude_auto=False, mode='r+b'): exclude_auto=False, mode='r+b'):
ans = [] ans = []
@ -404,12 +412,20 @@ class BooksModel(QAbstractTableModel): # {{{
as_file=True)) as src: as_file=True)) as src:
shutil.copyfileobj(src, pt) shutil.copyfileobj(src, pt)
pt.flush() pt.flush()
if getattr(src, 'name', None):
pt.orig_file_path = os.path.abspath(src.name)
pt.seek(0) pt.seek(0)
if set_metadata: if set_metadata:
_set_metadata(pt, self.db.get_metadata(id, get_cover=True, index_is_id=True), _set_metadata(pt, self.db.get_metadata(id, get_cover=True, index_is_id=True),
format) format)
pt.close() if paths else pt.seek(0) pt.close()
ans.append(pt) def to_uni(x):
if isbytestring(x):
x = x.decode(filesystem_encoding)
return x
name, op = map(to_uni, map(os.path.abspath, (pt.name,
pt.orig_file_path)))
ans.append(FormatPath(name, op))
else: else:
need_auto.append(id) need_auto.append(id)
if not exclude_auto: if not exclude_auto:

View File

@ -138,7 +138,8 @@ class TagsView(QTreeView): # {{{
# the possibility of renaming that item # the possibility of renaming that item
if tag_name and \ if tag_name and \
(key in ['authors', 'tags', 'series', 'publisher', 'search'] or \ (key in ['authors', 'tags', 'series', 'publisher', 'search'] or \
self.db.field_metadata[key]['is_custom']): self.db.field_metadata[key]['is_custom'] and \
self.db.field_metadata[key]['datatype'] != 'rating'):
self.context_menu.addAction(_('Rename') + " '" + tag_name + "'", self.context_menu.addAction(_('Rename') + " '" + tag_name + "'",
partial(self.context_menu_handler, action='edit_item', partial(self.context_menu_handler, action='edit_item',
category=tag_item, index=index)) category=tag_item, index=index))
@ -184,11 +185,17 @@ class TagsView(QTreeView): # {{{
if self.model(): if self.model():
self.model().clear_state() self.model().clear_state()
def is_visible(self, idx):
item = idx.internalPointer()
if getattr(item, 'type', None) == TagTreeItem.TAG:
idx = idx.parent()
return self.isExpanded(idx)
def recount(self, *args): def recount(self, *args):
ci = self.currentIndex() ci = self.currentIndex()
if not ci.isValid(): if not ci.isValid():
ci = self.indexAt(QPoint(10, 10)) ci = self.indexAt(QPoint(10, 10))
path = self.model().path_for_index(ci) path = self.model().path_for_index(ci) if self.is_visible(ci) else None
try: try:
self.model().refresh() self.model().refresh()
except: #Database connection could be closed if an integrity check is happening except: #Database connection could be closed if an integrity check is happening
@ -359,12 +366,8 @@ class TagsModel(QAbstractItemModel): # {{{
data = self.db.get_categories(sort_on_count=sort, icon_map=self.category_icon_map) data = self.db.get_categories(sort_on_count=sort, icon_map=self.category_icon_map)
tb_categories = self.db.field_metadata tb_categories = self.db.field_metadata
self.category_items = {}
for category in tb_categories: for category in tb_categories:
if category in data: # They should always be there, but ... if category in data: # They should always be there, but ...
# make a map of sets of names per category for duplicate
# checking when editing
self.category_items[category] = set([tag.name for tag in data[category]])
self.row_map.append(category) self.row_map.append(category)
self.categories.append(tb_categories[category]['name']) self.categories.append(tb_categories[category]['name'])
return data return data
@ -412,15 +415,14 @@ class TagsModel(QAbstractItemModel): # {{{
return False return False
item = index.internalPointer() item = index.internalPointer()
key = item.parent.category_key key = item.parent.category_key
# make certain we know about the category # make certain we know about the item's category
if key not in self.db.field_metadata: if key not in self.db.field_metadata:
return return
if val in self.category_items[key]:
error_dialog(self.tags_view, 'Duplicate item',
_('The name %s is already used.')%val).exec_()
return False
oldval = item.tag.name
if key == 'search': if key == 'search':
if val in saved_searches.names():
error_dialog(self.tags_view, _('Duplicate search name'),
_('The saved search name %s is already used.')%val).exec_()
return False
saved_searches.rename(unicode(item.data(role).toString()), val) saved_searches.rename(unicode(item.data(role).toString()), val)
self.tags_view.search_item_renamed.emit() self.tags_view.search_item_renamed.emit()
else: else:
@ -437,10 +439,7 @@ class TagsModel(QAbstractItemModel): # {{{
label=self.db.field_metadata[key]['label']) label=self.db.field_metadata[key]['label'])
self.tags_view.tag_item_renamed.emit() self.tags_view.tag_item_renamed.emit()
item.tag.name = val item.tag.name = val
self.dataChanged.emit(index, index) self.refresh()
# replace the old value in the duplicate detection map with the new one
self.category_items[key].discard(oldval)
self.category_items[key].add(val)
return True return True
def headerData(self, *args): def headerData(self, *args):

View File

@ -183,15 +183,30 @@ class CustomColumns(object):
ans = self.conn.get('SELECT id, value FROM %s'%table) ans = self.conn.get('SELECT id, value FROM %s'%table)
return ans return ans
def rename_custom_item(self, id, new_name, label=None, num=None): def rename_custom_item(self, old_id, new_name, label=None, num=None):
if id: if label is not None:
if label is not None: data = self.custom_column_label_map[label]
data = self.custom_column_label_map[label] if num is not None:
if num is not None: data = self.custom_column_num_map[num]
data = self.custom_column_num_map[num] table,lt = self.custom_table_names(data['num'])
table,lt = self.custom_table_names(data['num']) # check if item exists
self.conn.execute('UPDATE %s SET value=? WHERE id=?'%table, (new_name, id)) new_id = self.conn.get(
self.conn.commit() 'SELECT id FROM %s WHERE value=?'%table, (new_name,), all=False)
if new_id is None:
self.conn.execute('UPDATE %s SET value=? WHERE id=?'%table, (new_name, old_id))
else:
# New id exists. If the column is_multiple, then process like
# tags, otherwise process like publishers (see database2)
if data['is_multiple']:
books = self.conn.get('''SELECT book from %s
WHERE value=?'''%lt, (old_id,))
for (book_id,) in books:
self.conn.execute('''DELETE FROM %s
WHERE book=? and value=?'''%lt, (book_id, new_id))
self.conn.execute('''UPDATE %s SET value=?
WHERE value=?'''%lt, (new_id, old_id,))
self.conn.execute('DELETE FROM %s WHERE id=?'%table, (old_id,))
self.conn.commit()
def delete_custom_item_using_id(self, id, label=None, num=None): def delete_custom_item_using_id(self, id, label=None, num=None):
if id: if id:

View File

@ -999,16 +999,37 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
return [] return []
return result return result
def rename_tag(self, id, new_name): def rename_tag(self, old_id, new_name):
if id: new_id = self.conn.get(
self.conn.execute('UPDATE tags SET name=? WHERE id=?', (new_name, id)) '''SELECT id from tags
self.conn.commit() WHERE name=?''', (new_name,), all=False)
if new_id is None:
# easy case. Simply rename the tag
self.conn.execute('''UPDATE tags SET name=?
WHERE id=?''', (new_name, old_id))
else:
# It is possible that by renaming a tag, the tag will appear
# twice on a book. This will throw an integrity error, aborting
# all the changes. To get around this, we first delete any links
# to the new_id from books referencing the old_id, so that
# renaming old_id to new_id will be unique on the book
books = self.conn.get('''SELECT book from books_tags_link
WHERE tag=?''', (old_id,))
for (book_id,) in books:
self.conn.execute('''DELETE FROM books_tags_link
WHERE book=? and tag=?''', (book_id, new_id))
# Change the link table to point at the new tag
self.conn.execute('''UPDATE books_tags_link SET tag=?
WHERE tag=?''',(new_id, old_id,))
# Get rid of the no-longer used publisher
self.conn.execute('DELETE FROM tags WHERE id=?', (old_id,))
self.conn.commit()
def delete_tag_using_id(self, id): def delete_tag_using_id(self, id):
if id: self.conn.execute('DELETE FROM books_tags_link WHERE tag=?', (id,))
self.conn.execute('DELETE FROM books_tags_link WHERE tag=?', (id,)) self.conn.execute('DELETE FROM tags WHERE id=?', (id,))
self.conn.execute('DELETE FROM tags WHERE id=?', (id,)) self.conn.commit()
self.conn.commit()
def get_series_with_ids(self): def get_series_with_ids(self):
result = self.conn.get('SELECT id,name FROM series') result = self.conn.get('SELECT id,name FROM series')
@ -1016,19 +1037,44 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
return [] return []
return result return result
def rename_series(self, id, new_name): def rename_series(self, old_id, new_name):
if id: new_id = self.conn.get(
self.conn.execute('UPDATE series SET name=? WHERE id=?', (new_name, id)) '''SELECT id from series
self.conn.commit() WHERE name=?''', (new_name,), all=False)
if new_id is None:
self.conn.execute('UPDATE series SET name=? WHERE id=?',
(new_name, old_id))
else:
# New series exists. Must update the link, then assign a
# new series index to each of the books.
# Get the list of books where we must update the series index
books = self.conn.get('''SELECT books.id
FROM books, books_series_link as lt
WHERE books.id = lt.book AND lt.series=?
ORDER BY books.series_index''', (old_id,))
# Get the next series index
index = self.get_next_series_num_for(new_name)
# Now update the link table
self.conn.execute('''UPDATE books_series_link
SET series=?
WHERE series=?''',(new_id, old_id,))
# Now set the indices
for (book_id,) in books:
self.conn.execute('''UPDATE books
SET series_index=?
WHERE id=?''',(index, book_id,))
index = index + 1
self.conn.commit()
def delete_series_using_id(self, id): def delete_series_using_id(self, id):
if id: books = self.conn.get('SELECT book from books_series_link WHERE series=?', (id,))
books = self.conn.get('SELECT book from books_series_link WHERE series=?', (id,)) self.conn.execute('DELETE FROM books_series_link WHERE series=?', (id,))
self.conn.execute('DELETE FROM books_series_link WHERE series=?', (id,)) self.conn.execute('DELETE FROM series WHERE id=?', (id,))
self.conn.execute('DELETE FROM series WHERE id=?', (id,)) self.conn.commit()
self.conn.commit() for (book_id,) in books:
for (book_id,) in books: self.conn.execute('UPDATE books SET series_index=1.0 WHERE id=?', (book_id,))
self.conn.execute('UPDATE books SET series_index=1.0 WHERE id=?', (book_id,))
def get_publishers_with_ids(self): def get_publishers_with_ids(self):
result = self.conn.get('SELECT id,name FROM publishers') result = self.conn.get('SELECT id,name FROM publishers')
@ -1036,43 +1082,103 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
return [] return []
return result return result
def rename_publisher(self, id, new_name): def rename_publisher(self, old_id, new_name):
if id: new_id = self.conn.get(
self.conn.execute('UPDATE publishers SET name=? WHERE id=?', (new_name, id)) '''SELECT id from publishers
self.conn.commit() WHERE name=?''', (new_name,), all=False)
if new_id is None:
# New name doesn't exist. Simply change the old name
self.conn.execute('UPDATE publishers SET name=? WHERE id=?', \
(new_name, old_id))
else:
# Change the link table to point at the new one
self.conn.execute('''UPDATE books_publishers_link
SET publisher=?
WHERE publisher=?''',(new_id, old_id,))
# Get rid of the no-longer used publisher
self.conn.execute('DELETE FROM publishers WHERE id=?', (old_id,))
self.conn.commit()
def delete_publisher_using_id(self, id): def delete_publisher_using_id(self, old_id):
if id: self.conn.execute('''DELETE FROM books_publishers_link
self.conn.execute('DELETE FROM books_publishers_link WHERE publisher=?', (id,)) WHERE publisher=?''', (old_id,))
self.conn.execute('DELETE FROM publishers WHERE id=?', (id,)) self.conn.execute('DELETE FROM publishers WHERE id=?', (old_id,))
self.conn.commit() self.conn.commit()
# There is no editor for author, so we do not need get_authors_with_ids or # There is no editor for author, so we do not need get_authors_with_ids or
# delete_author_using_id. # delete_author_using_id.
def rename_author(self, id, new_name):
if id: def rename_author(self, old_id, new_name):
# Make sure that any commas in new_name are changed to '|'! # Make sure that any commas in new_name are changed to '|'!
new_name = new_name.replace(',', '|') new_name = new_name.replace(',', '|')
self.conn.execute('UPDATE authors SET name=? WHERE id=?', (new_name, id))
self.conn.commit() # Get the list of books we must fix up, one way or the other
# now must fix up the books books = self.conn.get('SELECT book from books_authors_link WHERE author=?', (old_id,))
books = self.conn.get('SELECT book from books_authors_link WHERE author=?', (id,))
# check if the new author already exists
new_id = self.conn.get('SELECT id from authors WHERE name=?',
(new_name,), all=False)
if new_id is None:
# No name clash. Go ahead and update the author's name
self.conn.execute('UPDATE authors SET name=? WHERE id=?',
(new_name, old_id))
else:
# Author exists. To fix this, we must replace all the authors
# instead of replacing the one. Reason: db integrity checks can stop
# the rename process, which would leave everything half-done. We
# can't do it the same way as tags (delete and add) because author
# order is important.
for (book_id,) in books: for (book_id,) in books:
# First, must refresh the cache to see the new authors # Get the existing list of authors
self.data.refresh_ids(self, [book_id])
# now fix the filesystem paths
self.set_path(book_id, index_is_id=True)
# Next fix the author sort. Reset it to the default
authors = self.conn.get(''' authors = self.conn.get('''
SELECT authors.name SELECT author from books_authors_link
FROM authors, books_authors_link as bl WHERE book=?
WHERE bl.book = ? and bl.author = authors.id ORDER BY id''',(book_id,))
''' , (book_id,))
# unpack the double-list structure # unpack the double-list structure, replacing the old author
# with the new one while we are at it
for i,aut in enumerate(authors): for i,aut in enumerate(authors):
authors[i] = aut[0] authors[i] = aut[0] if aut[0] != old_id else new_id
ss = authors_to_sort_string(authors)
self.conn.execute('UPDATE books SET author_sort=? WHERE id=?', (ss, id)) # Delete the existing authors list
self.conn.execute('''DELETE FROM books_authors_link
WHERE book=?''',(book_id,))
# Change the authors to the new list
for aid in authors:
try:
self.conn.execute('''
INSERT INTO books_authors_link(book, author)
VALUES (?,?)''', (book_id, aid))
except IntegrityError:
# Sometimes books specify the same author twice in their
# metadata. Ignore it.
pass
# Now delete the old author from the DB
self.conn.execute('DELETE FROM authors WHERE id=?', (old_id,))
self.conn.commit()
# the authors are now changed, either by changing the author's name
# or replacing the author in the list. Now must fix up the books.
for (book_id,) in books:
# First, must refresh the cache to see the new authors
self.data.refresh_ids(self, [book_id])
# now fix the filesystem paths
self.set_path(book_id, index_is_id=True)
# Next fix the author sort. Reset it to the default
authors = self.conn.get('''
SELECT authors.name
FROM authors, books_authors_link as bl
WHERE bl.book = ? and bl.author = authors.id
''' , (book_id,))
# unpack the double-list structure
for i,aut in enumerate(authors):
authors[i] = aut[0]
ss = authors_to_sort_string(authors)
self.conn.execute('''UPDATE books
SET author_sort=?
WHERE id=?''', (ss, old_id))
self.conn.commit()
# the caller will do a general refresh, so we don't need to
# do one here
# end convenience methods # end convenience methods

View File

@ -19,12 +19,20 @@ use *plugins* to add funtionality to |app|.
Environment variables Environment variables
----------------------- -----------------------
* ``CALIBRE_CONFIG_DIRECTORY`` * ``CALIBRE_CONFIG_DIRECTORY`` - sets the directory where configuration files are stored/read.
* ``CALIBRE_OVERRIDE_DATABASE_PATH`` * ``CALIBRE_OVERRIDE_DATABASE_PATH`` - allows you to specify the full path to metadata.db. Using this variable you can have metadata.db be in a location other than the library folder. Useful if your library folder is on a networked drive that does not support file locking.
* ``CALIBRE_DEVELOP_FROM`` * ``CALIBRE_DEVELOP_FROM`` - Used to run from a calibre development environment. See :ref:`develop`.
* ``CALIBRE_OVERRIDE_LANG`` * ``CALIBRE_OVERRIDE_LANG`` - Used to force the language used by the interface (ISO 639 language code)
* ``SYSFS_PATH`` * ``SYSFS_PATH`` - Use if sysfs is mounted somewhere other than /sys
* ``http_proxy`` * ``http_proxy`` - Used on linux to specify an HTTP proxy
Tweaks
------------
Tweaks are small changes that you can specify to control various aspects of |app|'s behavior. You specify them by editing the 2tweaks.py file in the config directory.
The default tweaks.py file is reproduced below
.. literalinclude:: ../../../resources/default_tweaks.py
A Hello World plugin A Hello World plugin