mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-08 18:54:09 -04:00
KG updates per 0.7.5
This commit is contained in:
commit
25fa58b65e
@ -15,22 +15,22 @@ class ZAOBAO(BasicNewsRecipe):
|
|||||||
no_stylesheets = True
|
no_stylesheets = True
|
||||||
recursions = 1
|
recursions = 1
|
||||||
language = 'zh'
|
language = 'zh'
|
||||||
|
|
||||||
encoding = 'gbk'
|
encoding = 'gbk'
|
||||||
# multithreaded_fetch = True
|
# multithreaded_fetch = True
|
||||||
|
|
||||||
keep_only_tags = [
|
keep_only_tags = [
|
||||||
dict(name='table', attrs={'cellpadding':'9'}),
|
dict(name='td', attrs={'class':'text'}),
|
||||||
dict(name='table', attrs={'class':'cont'}),
|
|
||||||
dict(name='div', attrs={'id':'content'}),
|
|
||||||
dict(name='span', attrs={'class':'page'}),
|
dict(name='span', attrs={'class':'page'}),
|
||||||
|
dict(name='div', attrs={'id':'content'})
|
||||||
]
|
]
|
||||||
|
|
||||||
remove_tags = [
|
remove_tags = [
|
||||||
dict(name='table', attrs={'cellspacing':'9'}),
|
dict(name='table', attrs={'cellspacing':'9'}),
|
||||||
|
dict(name='fieldset'),
|
||||||
|
dict(name='div', attrs={'width':'30%'}),
|
||||||
]
|
]
|
||||||
|
|
||||||
extra_css = '\
|
extra_css = '\n\
|
||||||
@font-face {font-family: "serif1";src:url(res:///opt/sony/ebook/FONT/tt0011m_.ttf)}\n\
|
@font-face {font-family: "serif1";src:url(res:///opt/sony/ebook/FONT/tt0011m_.ttf)}\n\
|
||||||
body{font-family: serif1, serif}\n\
|
body{font-family: serif1, serif}\n\
|
||||||
.article_description{font-family: serif1, serif}\n\
|
.article_description{font-family: serif1, serif}\n\
|
||||||
@ -41,7 +41,10 @@ class ZAOBAO(BasicNewsRecipe):
|
|||||||
.article {font-size:medium}\n\
|
.article {font-size:medium}\n\
|
||||||
.navbar {font-size: small}\n\
|
.navbar {font-size: small}\n\
|
||||||
.feed{font-size: medium}\n\
|
.feed{font-size: medium}\n\
|
||||||
.small{font-size: small; padding-right: 8%}\n'
|
.small{font-size: small;padding-right: 8pt}\n\
|
||||||
|
.text{padding-right: 8pt}\n\
|
||||||
|
p{text-indent: 0cm}\n\
|
||||||
|
div#content{padding-right: 10pt}'
|
||||||
|
|
||||||
INDEXES = [
|
INDEXES = [
|
||||||
(u'\u65b0\u95fb\u56fe\u7247', u'http://www.zaobao.com/photoweb/photoweb_idx.shtml')
|
(u'\u65b0\u95fb\u56fe\u7247', u'http://www.zaobao.com/photoweb/photoweb_idx.shtml')
|
||||||
@ -51,27 +54,35 @@ class ZAOBAO(BasicNewsRecipe):
|
|||||||
DESC_SENSE = u'\u8054\u5408\u65e9\u62a5\u7f51'
|
DESC_SENSE = u'\u8054\u5408\u65e9\u62a5\u7f51'
|
||||||
|
|
||||||
feeds = [
|
feeds = [
|
||||||
(u'\u5373\u65f6\u62a5\u9053', u'http://realtime.zaobao.com/news.xml'),
|
(u'\u5373\u65f6\u62a5\u9053', u'http://realtime.zaobao.com/news.xml'),
|
||||||
(u'\u4e2d\u56fd\u65b0\u95fb', u'http://www.zaobao.com/zg/zg.xml'),
|
(u'\u4e2d\u56fd\u65b0\u95fb', u'http://www.zaobao.com/zg/zg.xml'),
|
||||||
(u'\u56fd\u9645\u65b0\u95fb', u'http://www.zaobao.com/gj/gj.xml'),
|
(u'\u56fd\u9645\u65b0\u95fb', u'http://www.zaobao.com/gj/gj.xml'),
|
||||||
(u'\u4e16\u754c\u62a5\u520a\u6587\u8403', u'http://www.zaobao.com/wencui/wencui.xml'),
|
(u'\u4e16\u754c\u62a5\u520a\u6587\u8403', u'http://www.zaobao.com/wencui/wencui.xml'),
|
||||||
(u'\u4e1c\u5357\u4e9a\u65b0\u95fb', u'http://www.zaobao.com/yx/yx.xml'),
|
(u'\u4e1c\u5357\u4e9a\u65b0\u95fb', u'http://www.zaobao.com/yx/yx.xml'),
|
||||||
(u'\u65b0\u52a0\u5761\u65b0\u95fb', u'http://www.zaobao.com/sp/sp.xml'),
|
(u'\u65b0\u52a0\u5761\u65b0\u95fb', u'http://www.zaobao.com/sp/sp.xml'),
|
||||||
(u'\u4eca\u65e5\u89c2\u70b9', u'http://www.zaobao.com/yl/yl.xml'),
|
(u'\u4eca\u65e5\u89c2\u70b9', u'http://www.zaobao.com/yl/yl.xml'),
|
||||||
(u'\u4e2d\u56fd\u8d22\u7ecf', u'http://www.zaobao.com/cz/cz.xml'),
|
(u'\u4e2d\u56fd\u8d22\u7ecf', u'http://www.zaobao.com/cz/cz.xml'),
|
||||||
(u'\u72ee\u57ce\u8d22\u7ecf', u'http://www.zaobao.com/cs/cs.xml'),
|
(u'\u72ee\u57ce\u8d22\u7ecf', u'http://www.zaobao.com/cs/cs.xml'),
|
||||||
(u'\u5168\u7403\u8d22\u7ecf', u'http://www.zaobao.com/cg/cg.xml'),
|
(u'\u5168\u7403\u8d22\u7ecf', u'http://www.zaobao.com/cg/cg.xml'),
|
||||||
(u'\u65e9\u62a5\u4f53\u80b2', u'http://www.zaobao.com/ty/ty.xml'),
|
(u'\u65e9\u62a5\u4f53\u80b2', u'http://www.zaobao.com/ty/ty.xml'),
|
||||||
(u'\u65e9\u62a5\u526f\u520a', u'http://www.zaobao.com/fk/fk.xml'),
|
(u'\u65e9\u62a5\u526f\u520a', u'http://www.zaobao.com/fk/fk.xml'),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def preprocess_html(self, soup):
|
||||||
|
for tag in soup.findAll(name='a'):
|
||||||
|
if tag.has_key('href'):
|
||||||
|
tag_url = tag['href']
|
||||||
|
if tag_url.find('http://') != -1 and tag_url.find('zaobao.com') == -1:
|
||||||
|
del tag['href']
|
||||||
|
return soup
|
||||||
|
|
||||||
def postprocess_html(self, soup, first):
|
def postprocess_html(self, soup, first):
|
||||||
for tag in soup.findAll(name=['table', 'tr', 'td']):
|
for tag in soup.findAll(name=['table', 'tr', 'td']):
|
||||||
tag.name = 'div'
|
tag.name = 'div'
|
||||||
return soup
|
return soup
|
||||||
|
|
||||||
def parse_feeds(self):
|
def parse_feeds(self):
|
||||||
self.log.debug('ZAOBAO overrided parse_feeds()')
|
self.log_debug(_('ZAOBAO overrided parse_feeds()'))
|
||||||
parsed_feeds = BasicNewsRecipe.parse_feeds(self)
|
parsed_feeds = BasicNewsRecipe.parse_feeds(self)
|
||||||
|
|
||||||
for id, obj in enumerate(self.INDEXES):
|
for id, obj in enumerate(self.INDEXES):
|
||||||
@ -88,7 +99,7 @@ class ZAOBAO(BasicNewsRecipe):
|
|||||||
a_title = self.tag_to_string(a)
|
a_title = self.tag_to_string(a)
|
||||||
date = ''
|
date = ''
|
||||||
description = ''
|
description = ''
|
||||||
self.log.debug('adding %s at %s'%(a_title,a_url))
|
self.log_debug(_('adding %s at %s')%(a_title,a_url))
|
||||||
articles.append({
|
articles.append({
|
||||||
'title':a_title,
|
'title':a_title,
|
||||||
'date':date,
|
'date':date,
|
||||||
@ -97,26 +108,25 @@ class ZAOBAO(BasicNewsRecipe):
|
|||||||
})
|
})
|
||||||
|
|
||||||
pfeeds = feeds_from_index([(title, articles)], oldest_article=self.oldest_article,
|
pfeeds = feeds_from_index([(title, articles)], oldest_article=self.oldest_article,
|
||||||
max_articles_per_feed=self.max_articles_per_feed,
|
max_articles_per_feed=self.max_articles_per_feed)
|
||||||
log=self.log)
|
|
||||||
|
|
||||||
self.log.debug('adding %s to feed'%(title))
|
self.log_debug(_('adding %s to feed')%(title))
|
||||||
for feed in pfeeds:
|
for feed in pfeeds:
|
||||||
self.log.debug('adding feed: %s'%(feed.title))
|
self.log_debug(_('adding feed: %s')%(feed.title))
|
||||||
feed.description = self.DESC_SENSE
|
feed.description = self.DESC_SENSE
|
||||||
parsed_feeds.append(feed)
|
parsed_feeds.append(feed)
|
||||||
for a, article in enumerate(feed):
|
for a, article in enumerate(feed):
|
||||||
self.log.debug('added article %s from %s'%(article.title, article.url))
|
self.log_debug(_('added article %s from %s')%(article.title, article.url))
|
||||||
self.log.debug('added feed %s'%(feed.title))
|
self.log_debug(_('added feed %s')%(feed.title))
|
||||||
|
|
||||||
for i, feed in enumerate(parsed_feeds):
|
for i, feed in enumerate(parsed_feeds):
|
||||||
# workaorund a strange problem: Somethimes the xml encoding is not apllied correctly by parse()
|
# workaorund a strange problem: Somethimes the xml encoding is not apllied correctly by parse()
|
||||||
weired_encoding_detected = False
|
weired_encoding_detected = False
|
||||||
if not isinstance(feed.description, unicode) and self.encoding and feed.description:
|
if not isinstance(feed.description, unicode) and self.encoding and feed.description:
|
||||||
self.log.debug('Feed %s is not encoded correctly, manually replace it'%(feed.title))
|
self.log_debug(_('Feed %s is not encoded correctly, manually replace it')%(feed.title))
|
||||||
feed.description = feed.description.decode(self.encoding, 'replace')
|
feed.description = feed.description.decode(self.encoding, 'replace')
|
||||||
elif feed.description.find(self.DESC_SENSE) == -1 and self.encoding and feed.description:
|
elif feed.description.find(self.DESC_SENSE) == -1 and self.encoding and feed.description:
|
||||||
self.log.debug('Feed %s is strangely encoded, manually redo all'%(feed.title))
|
self.log_debug(_('Feed %s is weired encoded, manually redo all')%(feed.title))
|
||||||
feed.description = feed.description.encode('cp1252', 'replace').decode(self.encoding, 'replace')
|
feed.description = feed.description.encode('cp1252', 'replace').decode(self.encoding, 'replace')
|
||||||
weired_encoding_detected = True
|
weired_encoding_detected = True
|
||||||
|
|
||||||
@ -138,7 +148,7 @@ class ZAOBAO(BasicNewsRecipe):
|
|||||||
article.text_summary = article.text_summary.encode('cp1252', 'replace').decode(self.encoding, 'replace')
|
article.text_summary = article.text_summary.encode('cp1252', 'replace').decode(self.encoding, 'replace')
|
||||||
|
|
||||||
if article.title == "Untitled article":
|
if article.title == "Untitled article":
|
||||||
self.log.debug('Removing empty article %s from %s'%(article.title, article.url))
|
self.log_debug(_('Removing empty article %s from %s')%(article.title, article.url))
|
||||||
# remove the article
|
# remove the article
|
||||||
feed.articles[a:a+1] = []
|
feed.articles[a:a+1] = []
|
||||||
return parsed_feeds
|
return parsed_feeds
|
||||||
|
@ -406,3 +406,8 @@ img, object, svg|svg {
|
|||||||
width: auto;
|
width: auto;
|
||||||
height: auto;
|
height: auto;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* These are needed because ADE renders anchors the same as links */
|
||||||
|
|
||||||
|
a { text-decoration: inherit; color: inherit; cursor: inherit }
|
||||||
|
a[href] { text-decoration: underline; color: blue; cursor: pointer }
|
||||||
|
@ -43,6 +43,7 @@ mimetypes.add_type('application/x-mobipocket-ebook', '.prc')
|
|||||||
mimetypes.add_type('application/x-mobipocket-ebook', '.azw')
|
mimetypes.add_type('application/x-mobipocket-ebook', '.azw')
|
||||||
mimetypes.add_type('application/x-cbz', '.cbz')
|
mimetypes.add_type('application/x-cbz', '.cbz')
|
||||||
mimetypes.add_type('application/x-cbr', '.cbr')
|
mimetypes.add_type('application/x-cbr', '.cbr')
|
||||||
|
mimetypes.add_type('application/x-koboreader-ebook', '.kobo')
|
||||||
mimetypes.add_type('image/wmf', '.wmf')
|
mimetypes.add_type('image/wmf', '.wmf')
|
||||||
guess_type = mimetypes.guess_type
|
guess_type = mimetypes.guess_type
|
||||||
import cssutils
|
import cssutils
|
||||||
|
@ -436,7 +436,7 @@ from calibre.devices.blackberry.driver import BLACKBERRY
|
|||||||
from calibre.devices.cybook.driver import CYBOOK
|
from calibre.devices.cybook.driver import CYBOOK
|
||||||
from calibre.devices.eb600.driver import EB600, COOL_ER, SHINEBOOK, \
|
from calibre.devices.eb600.driver import EB600, COOL_ER, SHINEBOOK, \
|
||||||
POCKETBOOK360, GER2, ITALICA, ECLICTO, DBOOK, INVESBOOK, \
|
POCKETBOOK360, GER2, ITALICA, ECLICTO, DBOOK, INVESBOOK, \
|
||||||
BOOQ, ELONEX, POCKETBOOK301
|
BOOQ, ELONEX, POCKETBOOK301, MENTOR
|
||||||
from calibre.devices.iliad.driver import ILIAD
|
from calibre.devices.iliad.driver import ILIAD
|
||||||
from calibre.devices.irexdr.driver import IREXDR1000, IREXDR800
|
from calibre.devices.irexdr.driver import IREXDR1000, IREXDR800
|
||||||
from calibre.devices.jetbook.driver import JETBOOK
|
from calibre.devices.jetbook.driver import JETBOOK
|
||||||
@ -550,6 +550,7 @@ plugins += [
|
|||||||
AZBOOKA,
|
AZBOOKA,
|
||||||
FOLDER_DEVICE_FOR_CONFIG,
|
FOLDER_DEVICE_FOR_CONFIG,
|
||||||
AVANT,
|
AVANT,
|
||||||
|
MENTOR,
|
||||||
]
|
]
|
||||||
plugins += [x for x in list(locals().values()) if isinstance(x, type) and \
|
plugins += [x for x in list(locals().values()) if isinstance(x, type) and \
|
||||||
x.__name__.endswith('MetadataReader')]
|
x.__name__.endswith('MetadataReader')]
|
||||||
|
@ -34,6 +34,9 @@ class ANDROID(USBMS):
|
|||||||
|
|
||||||
# Acer
|
# Acer
|
||||||
0x502 : { 0x3203 : [0x0100]},
|
0x502 : { 0x3203 : [0x0100]},
|
||||||
|
|
||||||
|
# Dell
|
||||||
|
0x413c : { 0xb007 : [0x0100]},
|
||||||
}
|
}
|
||||||
EBOOK_DIR_MAIN = ['wordplayer/calibretransfer', 'eBooks/import', 'Books']
|
EBOOK_DIR_MAIN = ['wordplayer/calibretransfer', 'eBooks/import', 'Books']
|
||||||
EXTRA_CUSTOMIZATION_MESSAGE = _('Comma separated list of directories to '
|
EXTRA_CUSTOMIZATION_MESSAGE = _('Comma separated list of directories to '
|
||||||
@ -42,7 +45,7 @@ class ANDROID(USBMS):
|
|||||||
EXTRA_CUSTOMIZATION_DEFAULT = ', '.join(EBOOK_DIR_MAIN)
|
EXTRA_CUSTOMIZATION_DEFAULT = ', '.join(EBOOK_DIR_MAIN)
|
||||||
|
|
||||||
VENDOR_NAME = ['HTC', 'MOTOROLA', 'GOOGLE_', 'ANDROID', 'ACER',
|
VENDOR_NAME = ['HTC', 'MOTOROLA', 'GOOGLE_', 'ANDROID', 'ACER',
|
||||||
'GT-I5700', 'SAMSUNG']
|
'GT-I5700', 'SAMSUNG', 'DELL']
|
||||||
WINDOWS_MAIN_MEM = ['ANDROID_PHONE', 'A855', 'A853', 'INC.NEXUS_ONE',
|
WINDOWS_MAIN_MEM = ['ANDROID_PHONE', 'A855', 'A853', 'INC.NEXUS_ONE',
|
||||||
'__UMS_COMPOSITE', '_MB200', 'MASS_STORAGE', '_-_CARD',
|
'__UMS_COMPOSITE', '_MB200', 'MASS_STORAGE', '_-_CARD',
|
||||||
'PR OD_GT-I9000']
|
'PR OD_GT-I9000']
|
||||||
|
@ -2721,11 +2721,11 @@ class ITUNES_ASYNC(ITUNES):
|
|||||||
else:
|
else:
|
||||||
return BookList(self.log)
|
return BookList(self.log)
|
||||||
|
|
||||||
def disconnect_from_folder(self):
|
def unmount_device(self):
|
||||||
'''
|
'''
|
||||||
'''
|
'''
|
||||||
if DEBUG:
|
if DEBUG:
|
||||||
self.log.info("ITUNES_ASYNC:disconnect_from_folder()")
|
self.log.info("ITUNES_ASYNC:unmount_device()")
|
||||||
self.connected = False
|
self.connected = False
|
||||||
|
|
||||||
def eject(self):
|
def eject(self):
|
||||||
|
@ -186,6 +186,15 @@ class BOOQ(EB600):
|
|||||||
WINDOWS_MAIN_MEM = 'EB600'
|
WINDOWS_MAIN_MEM = 'EB600'
|
||||||
WINDOWS_CARD_A_MEM = 'EB600'
|
WINDOWS_CARD_A_MEM = 'EB600'
|
||||||
|
|
||||||
|
class MENTOR(EB600):
|
||||||
|
|
||||||
|
name = 'Astak Mentor EB600'
|
||||||
|
gui_name = 'Mentor'
|
||||||
|
description = _('Communicate with the Astak Mentor EB600')
|
||||||
|
FORMATS = ['epub', 'fb2', 'mobi', 'prc', 'pdf', 'txt']
|
||||||
|
|
||||||
|
WINDOWS_MAIN_MEM = WINDOWS_CARD_A_MEM = 'MENTOR'
|
||||||
|
|
||||||
class ELONEX(EB600):
|
class ELONEX(EB600):
|
||||||
|
|
||||||
name = 'Elonex 600EB'
|
name = 'Elonex 600EB'
|
||||||
|
@ -66,7 +66,7 @@ class FOLDER_DEVICE(USBMS):
|
|||||||
detected_device=None):
|
detected_device=None):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def disconnect_from_folder(self):
|
def unmount_device(self):
|
||||||
self._main_prefix = ''
|
self._main_prefix = ''
|
||||||
self.is_connected = False
|
self.is_connected = False
|
||||||
|
|
||||||
|
114
src/calibre/devices/kobo/books.py
Normal file
114
src/calibre/devices/kobo/books.py
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
__license__ = 'GPL v3'
|
||||||
|
__copyright__ = '2010, Timothy Legge <timlegge at gmail.com>'
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
|
||||||
|
from calibre.ebooks.metadata import MetaInformation
|
||||||
|
from calibre.constants import filesystem_encoding, preferred_encoding
|
||||||
|
from calibre import isbytestring
|
||||||
|
|
||||||
|
class Book(MetaInformation):
|
||||||
|
|
||||||
|
BOOK_ATTRS = ['lpath', 'size', 'mime', 'device_collections']
|
||||||
|
|
||||||
|
JSON_ATTRS = [
|
||||||
|
'lpath', 'title', 'authors', 'mime', 'size', 'tags', 'author_sort',
|
||||||
|
'title_sort', 'comments', 'category', 'publisher', 'series',
|
||||||
|
'series_index', 'rating', 'isbn', 'language', 'application_id',
|
||||||
|
'book_producer', 'lccn', 'lcc', 'ddc', 'rights', 'publication_type',
|
||||||
|
'uuid',
|
||||||
|
]
|
||||||
|
|
||||||
|
def __init__(self, prefix, lpath, title, authors, mime, date, ContentType, thumbnail_name, other=None):
|
||||||
|
|
||||||
|
MetaInformation.__init__(self, '')
|
||||||
|
self.device_collections = []
|
||||||
|
|
||||||
|
self.path = os.path.join(prefix, lpath)
|
||||||
|
if os.sep == '\\':
|
||||||
|
self.path = self.path.replace('/', '\\')
|
||||||
|
self.lpath = lpath.replace('\\', '/')
|
||||||
|
else:
|
||||||
|
self.lpath = lpath
|
||||||
|
|
||||||
|
self.title = title
|
||||||
|
if not authors:
|
||||||
|
self.authors = ['']
|
||||||
|
else:
|
||||||
|
self.authors = [authors]
|
||||||
|
self.mime = mime
|
||||||
|
try:
|
||||||
|
self.size = os.path.getsize(self.path)
|
||||||
|
except OSError:
|
||||||
|
self.size = 0
|
||||||
|
try:
|
||||||
|
if ContentType == '6':
|
||||||
|
self.datetime = time.strptime(date, "%Y-%m-%dT%H:%M:%S.%f")
|
||||||
|
else:
|
||||||
|
self.datetime = time.gmtime(os.path.getctime(self.path))
|
||||||
|
except:
|
||||||
|
self.datetime = time.gmtime()
|
||||||
|
|
||||||
|
self.thumbnail = ImageWrapper(thumbnail_name)
|
||||||
|
self.tags = []
|
||||||
|
if other:
|
||||||
|
self.smart_update(other)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return self.path == getattr(other, 'path', None)
|
||||||
|
|
||||||
|
@dynamic_property
|
||||||
|
def db_id(self):
|
||||||
|
doc = '''The database id in the application database that this file corresponds to'''
|
||||||
|
def fget(self):
|
||||||
|
match = re.search(r'_(\d+)$', self.lpath.rpartition('.')[0])
|
||||||
|
if match:
|
||||||
|
return int(match.group(1))
|
||||||
|
return None
|
||||||
|
return property(fget=fget, doc=doc)
|
||||||
|
|
||||||
|
@dynamic_property
|
||||||
|
def title_sorter(self):
|
||||||
|
doc = '''String to sort the title. If absent, title is returned'''
|
||||||
|
def fget(self):
|
||||||
|
return re.sub('^\s*A\s+|^\s*The\s+|^\s*An\s+', '', self.title).rstrip()
|
||||||
|
return property(doc=doc, fget=fget)
|
||||||
|
|
||||||
|
@dynamic_property
|
||||||
|
def thumbnail(self):
|
||||||
|
return None
|
||||||
|
|
||||||
|
def smart_update(self, other):
|
||||||
|
'''
|
||||||
|
Merge the information in C{other} into self. In case of conflicts, the information
|
||||||
|
in C{other} takes precedence, unless the information in C{other} is NULL.
|
||||||
|
'''
|
||||||
|
|
||||||
|
MetaInformation.smart_update(self, other)
|
||||||
|
|
||||||
|
for attr in self.BOOK_ATTRS:
|
||||||
|
if hasattr(other, attr):
|
||||||
|
val = getattr(other, attr, None)
|
||||||
|
setattr(self, attr, val)
|
||||||
|
|
||||||
|
def to_json(self):
|
||||||
|
json = {}
|
||||||
|
for attr in self.JSON_ATTRS:
|
||||||
|
val = getattr(self, attr)
|
||||||
|
if isbytestring(val):
|
||||||
|
enc = filesystem_encoding if attr == 'lpath' else preferred_encoding
|
||||||
|
val = val.decode(enc, 'replace')
|
||||||
|
elif isinstance(val, (list, tuple)):
|
||||||
|
val = [x.decode(preferred_encoding, 'replace') if
|
||||||
|
isbytestring(x) else x for x in val]
|
||||||
|
json[attr] = val
|
||||||
|
return json
|
||||||
|
|
||||||
|
class ImageWrapper(object):
|
||||||
|
def __init__(self, image_path):
|
||||||
|
self.image_path = image_path
|
||||||
|
|
@ -2,17 +2,26 @@
|
|||||||
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
|
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
|
||||||
|
|
||||||
__license__ = 'GPL v3'
|
__license__ = 'GPL v3'
|
||||||
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
__copyright__ = '2010, Timothy Legge <timlegge at gmail.com> and Kovid Goyal <kovid@kovidgoyal.net>'
|
||||||
__docformat__ = 'restructuredtext en'
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sqlite3 as sqlite
|
||||||
|
|
||||||
|
from calibre.devices.usbms.books import BookList
|
||||||
|
from calibre.devices.kobo.books import Book
|
||||||
|
from calibre.devices.kobo.books import ImageWrapper
|
||||||
|
from calibre.devices.mime import mime_type_ext
|
||||||
from calibre.devices.usbms.driver import USBMS
|
from calibre.devices.usbms.driver import USBMS
|
||||||
|
from calibre import prints
|
||||||
|
|
||||||
class KOBO(USBMS):
|
class KOBO(USBMS):
|
||||||
|
|
||||||
name = 'Kobo Reader Device Interface'
|
name = 'Kobo Reader Device Interface'
|
||||||
gui_name = 'Kobo Reader'
|
gui_name = 'Kobo Reader'
|
||||||
description = _('Communicate with the Kobo Reader')
|
description = _('Communicate with the Kobo Reader')
|
||||||
author = 'Kovid Goyal'
|
author = 'Timothy Legge and Kovid Goyal'
|
||||||
|
version = (1, 0, 4)
|
||||||
|
|
||||||
supported_platforms = ['windows', 'osx', 'linux']
|
supported_platforms = ['windows', 'osx', 'linux']
|
||||||
|
|
||||||
@ -29,3 +38,309 @@ class KOBO(USBMS):
|
|||||||
EBOOK_DIR_MAIN = ''
|
EBOOK_DIR_MAIN = ''
|
||||||
SUPPORTS_SUB_DIRS = True
|
SUPPORTS_SUB_DIRS = True
|
||||||
|
|
||||||
|
def initialize(self):
|
||||||
|
USBMS.initialize(self)
|
||||||
|
self.book_class = Book
|
||||||
|
|
||||||
|
def books(self, oncard=None, end_session=True):
|
||||||
|
from calibre.ebooks.metadata.meta import path_to_ext
|
||||||
|
|
||||||
|
dummy_bl = BookList(None, None, None)
|
||||||
|
|
||||||
|
if oncard == 'carda' and not self._card_a_prefix:
|
||||||
|
self.report_progress(1.0, _('Getting list of books on device...'))
|
||||||
|
return dummy_bl
|
||||||
|
elif oncard == 'cardb' and not self._card_b_prefix:
|
||||||
|
self.report_progress(1.0, _('Getting list of books on device...'))
|
||||||
|
return dummy_bl
|
||||||
|
elif oncard and oncard != 'carda' and oncard != 'cardb':
|
||||||
|
self.report_progress(1.0, _('Getting list of books on device...'))
|
||||||
|
return dummy_bl
|
||||||
|
|
||||||
|
prefix = self._card_a_prefix if oncard == 'carda' else \
|
||||||
|
self._card_b_prefix if oncard == 'cardb' \
|
||||||
|
else self._main_prefix
|
||||||
|
|
||||||
|
# get the metadata cache
|
||||||
|
bl = self.booklist_class(oncard, prefix, self.settings)
|
||||||
|
need_sync = self.parse_metadata_cache(bl, prefix, self.METADATA_CACHE)
|
||||||
|
|
||||||
|
# make a dict cache of paths so the lookup in the loop below is faster.
|
||||||
|
bl_cache = {}
|
||||||
|
for idx,b in enumerate(bl):
|
||||||
|
bl_cache[b.lpath] = idx
|
||||||
|
|
||||||
|
def update_booklist(prefix, path, title, authors, mime, date, ContentType, ImageID):
|
||||||
|
changed = False
|
||||||
|
# if path_to_ext(path) in self.FORMATS:
|
||||||
|
try:
|
||||||
|
lpath = path.partition(self.normalize_path(prefix))[2]
|
||||||
|
if lpath.startswith(os.sep):
|
||||||
|
lpath = lpath[len(os.sep):]
|
||||||
|
lpath = lpath.replace('\\', '/')
|
||||||
|
# print "LPATH: " + lpath
|
||||||
|
|
||||||
|
path = self.normalize_path(path)
|
||||||
|
# print "Normalized FileName: " + path
|
||||||
|
|
||||||
|
idx = bl_cache.get(lpath, None)
|
||||||
|
if idx is not None:
|
||||||
|
imagename = self.normalize_path(prefix + '.kobo/images/' + ImageID + ' - NickelBookCover.parsed')
|
||||||
|
#print "Image name Normalized: " + imagename
|
||||||
|
bl[idx].thumbnail = ImageWrapper(imagename)
|
||||||
|
bl_cache[lpath] = None
|
||||||
|
if ContentType != '6':
|
||||||
|
if self.update_metadata_item(bl[idx]):
|
||||||
|
# print 'update_metadata_item returned true'
|
||||||
|
changed = True
|
||||||
|
else:
|
||||||
|
book = Book(prefix, lpath, title, authors, mime, date, ContentType, ImageID)
|
||||||
|
# print 'Update booklist'
|
||||||
|
if bl.add_book(book, replace_metadata=False):
|
||||||
|
changed = True
|
||||||
|
except: # Probably a path encoding error
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
return changed
|
||||||
|
|
||||||
|
connection = sqlite.connect(self._main_prefix + '.kobo/KoboReader.sqlite')
|
||||||
|
cursor = connection.cursor()
|
||||||
|
|
||||||
|
#query = 'select count(distinct volumeId) from volume_shortcovers'
|
||||||
|
#cursor.execute(query)
|
||||||
|
#for row in (cursor):
|
||||||
|
# numrows = row[0]
|
||||||
|
#cursor.close()
|
||||||
|
|
||||||
|
query= 'select Title, Attribution, DateCreated, ContentID, MimeType, ContentType, ' \
|
||||||
|
'ImageID from content where BookID is Null'
|
||||||
|
|
||||||
|
cursor.execute (query)
|
||||||
|
|
||||||
|
changed = False
|
||||||
|
for i, row in enumerate(cursor):
|
||||||
|
# self.report_progress((i+1) / float(numrows), _('Getting list of books on device...'))
|
||||||
|
|
||||||
|
path = self.path_from_contentid(row[3], row[5], oncard)
|
||||||
|
mime = mime_type_ext(path_to_ext(row[3]))
|
||||||
|
|
||||||
|
if oncard != 'carda' and oncard != 'cardb' and not row[3].startswith("file:///mnt/sd/"):
|
||||||
|
changed = update_booklist(self._main_prefix, path, row[0], row[1], mime, row[2], row[5], row[6])
|
||||||
|
# print "shortbook: " + path
|
||||||
|
elif oncard == 'carda' and row[3].startswith("file:///mnt/sd/"):
|
||||||
|
changed = update_booklist(self._card_a_prefix, path, row[0], row[1], mime, row[2], row[5], row[6])
|
||||||
|
|
||||||
|
if changed:
|
||||||
|
need_sync = True
|
||||||
|
|
||||||
|
cursor.close()
|
||||||
|
connection.close()
|
||||||
|
|
||||||
|
# Remove books that are no longer in the filesystem. Cache contains
|
||||||
|
# indices into the booklist if book not in filesystem, None otherwise
|
||||||
|
# Do the operation in reverse order so indices remain valid
|
||||||
|
for idx in sorted(bl_cache.itervalues(), reverse=True):
|
||||||
|
if idx is not None:
|
||||||
|
need_sync = True
|
||||||
|
del bl[idx]
|
||||||
|
|
||||||
|
#print "count found in cache: %d, count of files in metadata: %d, need_sync: %s" % \
|
||||||
|
# (len(bl_cache), len(bl), need_sync)
|
||||||
|
if need_sync: #self.count_found_in_bl != len(bl) or need_sync:
|
||||||
|
if oncard == 'cardb':
|
||||||
|
self.sync_booklists((None, None, bl))
|
||||||
|
elif oncard == 'carda':
|
||||||
|
self.sync_booklists((None, bl, None))
|
||||||
|
else:
|
||||||
|
self.sync_booklists((bl, None, None))
|
||||||
|
|
||||||
|
self.report_progress(1.0, _('Getting list of books on device...'))
|
||||||
|
return bl
|
||||||
|
|
||||||
|
def delete_via_sql(self, ContentID, ContentType):
|
||||||
|
# Delete Order:
|
||||||
|
# 1) shortcover_page
|
||||||
|
# 2) volume_shorcover
|
||||||
|
# 2) content
|
||||||
|
|
||||||
|
connection = sqlite.connect(self._main_prefix + '.kobo/KoboReader.sqlite')
|
||||||
|
cursor = connection.cursor()
|
||||||
|
t = (ContentID,)
|
||||||
|
cursor.execute('select ImageID from content where ContentID = ?', t)
|
||||||
|
|
||||||
|
ImageID = None
|
||||||
|
for row in cursor:
|
||||||
|
# First get the ImageID to delete the images
|
||||||
|
ImageID = row[0]
|
||||||
|
cursor.close()
|
||||||
|
|
||||||
|
if ImageID != None:
|
||||||
|
cursor = connection.cursor()
|
||||||
|
if ContentType == 6:
|
||||||
|
# Delete the shortcover_pages first
|
||||||
|
cursor.execute('delete from shortcover_page where shortcoverid in (select ContentID from content where BookID = ?)', t)
|
||||||
|
|
||||||
|
#Delete the volume_shortcovers second
|
||||||
|
cursor.execute('delete from volume_shortcovers where volumeid = ?', t)
|
||||||
|
|
||||||
|
# Delete the chapters associated with the book next
|
||||||
|
t = (ContentID,ContentID,)
|
||||||
|
cursor.execute('delete from content where BookID = ? or ContentID = ?', t)
|
||||||
|
|
||||||
|
connection.commit()
|
||||||
|
|
||||||
|
cursor.close()
|
||||||
|
else:
|
||||||
|
print "Error condition ImageID was not found"
|
||||||
|
print "You likely tried to delete a book that the kobo has not yet added to the database"
|
||||||
|
|
||||||
|
connection.close()
|
||||||
|
# If all this succeeds we need to delete the images files via the ImageID
|
||||||
|
return ImageID
|
||||||
|
|
||||||
|
def delete_images(self, ImageID):
|
||||||
|
if ImageID != None:
|
||||||
|
path_prefix = '.kobo/images/'
|
||||||
|
path = self._main_prefix + path_prefix + ImageID
|
||||||
|
|
||||||
|
file_endings = (' - iPhoneThumbnail.parsed', ' - bbMediumGridList.parsed', ' - NickelBookCover.parsed',)
|
||||||
|
|
||||||
|
for ending in file_endings:
|
||||||
|
fpath = path + ending
|
||||||
|
fpath = self.normalize_path(fpath)
|
||||||
|
|
||||||
|
if os.path.exists(fpath):
|
||||||
|
# print 'Image File Exists: ' + fpath
|
||||||
|
os.unlink(fpath)
|
||||||
|
|
||||||
|
def delete_books(self, paths, end_session=True):
|
||||||
|
for i, path in enumerate(paths):
|
||||||
|
self.report_progress((i+1) / float(len(paths)), _('Removing books from device...'))
|
||||||
|
path = self.normalize_path(path)
|
||||||
|
# print "Delete file normalized path: " + path
|
||||||
|
extension = os.path.splitext(path)[1]
|
||||||
|
|
||||||
|
if extension == '.kobo':
|
||||||
|
# Kobo books do not have book files. They do have some images though
|
||||||
|
#print "kobo book"
|
||||||
|
ContentType = 6
|
||||||
|
ContentID = self.contentid_from_path(path, ContentType)
|
||||||
|
if extension == '.pdf' or extension == '.epub':
|
||||||
|
# print "ePub or pdf"
|
||||||
|
ContentType = 16
|
||||||
|
#print "Path: " + path
|
||||||
|
ContentID = self.contentid_from_path(path, ContentType)
|
||||||
|
# print "ContentID: " + ContentID
|
||||||
|
ImageID = self.delete_via_sql(ContentID, ContentType)
|
||||||
|
#print " We would now delete the Images for" + ImageID
|
||||||
|
self.delete_images(ImageID)
|
||||||
|
|
||||||
|
if os.path.exists(path):
|
||||||
|
# Delete the ebook
|
||||||
|
# print "Delete the ebook: " + path
|
||||||
|
os.unlink(path)
|
||||||
|
|
||||||
|
filepath = os.path.splitext(path)[0]
|
||||||
|
for ext in self.DELETE_EXTS:
|
||||||
|
if os.path.exists(filepath + ext):
|
||||||
|
# print "Filename: " + filename
|
||||||
|
os.unlink(filepath + ext)
|
||||||
|
if os.path.exists(path + ext):
|
||||||
|
# print "Filename: " + filename
|
||||||
|
os.unlink(path + ext)
|
||||||
|
|
||||||
|
if self.SUPPORTS_SUB_DIRS:
|
||||||
|
try:
|
||||||
|
# print "removed"
|
||||||
|
os.removedirs(os.path.dirname(path))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
self.report_progress(1.0, _('Removing books from device...'))
|
||||||
|
|
||||||
|
def remove_books_from_metadata(self, paths, booklists):
|
||||||
|
for i, path in enumerate(paths):
|
||||||
|
self.report_progress((i+1) / float(len(paths)), _('Removing books from device metadata listing...'))
|
||||||
|
for bl in booklists:
|
||||||
|
for book in bl:
|
||||||
|
#print "Book Path: " + book.path
|
||||||
|
if path.endswith(book.path):
|
||||||
|
#print " Remove: " + book.path
|
||||||
|
bl.remove_book(book)
|
||||||
|
self.report_progress(1.0, _('Removing books from device metadata listing...'))
|
||||||
|
|
||||||
|
def add_books_to_metadata(self, locations, metadata, booklists):
|
||||||
|
metadata = iter(metadata)
|
||||||
|
for i, location in enumerate(locations):
|
||||||
|
self.report_progress((i+1) / float(len(locations)), _('Adding books to device metadata listing...'))
|
||||||
|
info = metadata.next()
|
||||||
|
blist = 2 if location[1] == 'cardb' else 1 if location[1] == 'carda' else 0
|
||||||
|
|
||||||
|
# Extract the correct prefix from the pathname. To do this correctly,
|
||||||
|
# we must ensure that both the prefix and the path are normalized
|
||||||
|
# so that the comparison will work. Book's __init__ will fix up
|
||||||
|
# lpath, so we don't need to worry about that here.
|
||||||
|
path = self.normalize_path(location[0])
|
||||||
|
if self._main_prefix:
|
||||||
|
prefix = self._main_prefix if \
|
||||||
|
path.startswith(self.normalize_path(self._main_prefix)) else None
|
||||||
|
if not prefix and self._card_a_prefix:
|
||||||
|
prefix = self._card_a_prefix if \
|
||||||
|
path.startswith(self.normalize_path(self._card_a_prefix)) else None
|
||||||
|
if not prefix and self._card_b_prefix:
|
||||||
|
prefix = self._card_b_prefix if \
|
||||||
|
path.startswith(self.normalize_path(self._card_b_prefix)) else None
|
||||||
|
if prefix is None:
|
||||||
|
prints('in add_books_to_metadata. Prefix is None!', path,
|
||||||
|
self._main_prefix)
|
||||||
|
continue
|
||||||
|
#print "Add book to metatdata: "
|
||||||
|
#print "prefix: " + prefix
|
||||||
|
lpath = path.partition(prefix)[2]
|
||||||
|
if lpath.startswith('/') or lpath.startswith('\\'):
|
||||||
|
lpath = lpath[1:]
|
||||||
|
#print "path: " + lpath
|
||||||
|
#book = self.book_class(prefix, lpath, other=info)
|
||||||
|
lpath = self.normalize_path(prefix + lpath)
|
||||||
|
book = Book(prefix, lpath, '', '', '', '', '', '', other=info)
|
||||||
|
if book.size is None:
|
||||||
|
book.size = os.stat(self.normalize_path(path)).st_size
|
||||||
|
booklists[blist].add_book(book, replace_metadata=True)
|
||||||
|
self.report_progress(1.0, _('Adding books to device metadata listing...'))
|
||||||
|
|
||||||
|
def contentid_from_path(self, path, ContentType):
|
||||||
|
if ContentType == 6:
|
||||||
|
ContentID = os.path.splitext(path)[0]
|
||||||
|
# Remove the prefix on the file. it could be either
|
||||||
|
ContentID = ContentID.replace(self._main_prefix, '')
|
||||||
|
if self._card_a_prefix is not None:
|
||||||
|
ContentID = ContentID.replace(self._card_a_prefix, '')
|
||||||
|
else: # ContentType = 16
|
||||||
|
ContentID = path
|
||||||
|
ContentID = ContentID.replace(self._main_prefix, "file:///mnt/onboard/")
|
||||||
|
if self._card_a_prefix is not None:
|
||||||
|
ContentID = ContentID.replace(self._card_a_prefix, "file:///mnt/sd/")
|
||||||
|
ContentID = ContentID.replace("\\", '/')
|
||||||
|
return ContentID
|
||||||
|
|
||||||
|
|
||||||
|
def path_from_contentid(self, ContentID, ContentType, oncard):
|
||||||
|
path = ContentID
|
||||||
|
|
||||||
|
if oncard == 'cardb':
|
||||||
|
print 'path from_contentid cardb'
|
||||||
|
elif oncard == 'carda':
|
||||||
|
path = path.replace("file:///mnt/sd/", self._card_a_prefix)
|
||||||
|
# print "SD Card: " + filename
|
||||||
|
else:
|
||||||
|
if ContentType == "6":
|
||||||
|
# This is a hack as the kobo files do not exist
|
||||||
|
# but the path is required to make a unique id
|
||||||
|
# for calibre's reference
|
||||||
|
path = self._main_prefix + path + '.kobo'
|
||||||
|
# print "Path: " + path
|
||||||
|
else:
|
||||||
|
# if path.startswith("file:///mnt/onboard/"):
|
||||||
|
path = path.replace("file:///mnt/onboard/", self._main_prefix)
|
||||||
|
# print "Internal: " + filename
|
||||||
|
|
||||||
|
return path
|
||||||
|
@ -380,10 +380,9 @@ class EPUBOutput(OutputFormatPlugin):
|
|||||||
sel = '.'+lb.get('class')
|
sel = '.'+lb.get('class')
|
||||||
for rule in stylesheet.data.cssRules.rulesOfType(CSSRule.STYLE_RULE):
|
for rule in stylesheet.data.cssRules.rulesOfType(CSSRule.STYLE_RULE):
|
||||||
if sel == rule.selectorList.selectorText:
|
if sel == rule.selectorList.selectorText:
|
||||||
val = rule.style.removeProperty('margin-left')
|
rule.style.removeProperty('margin-left')
|
||||||
pval = rule.style.getProperty('padding-left')
|
# padding-left breaks rendering in webkit and gecko
|
||||||
if val and not pval:
|
rule.style.removeProperty('padding-left')
|
||||||
rule.style.setProperty('padding-left', val)
|
|
||||||
|
|
||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
|
@ -367,7 +367,7 @@ class LRFInput(InputFormatPlugin):
|
|||||||
xml = d.to_xml(write_files=True)
|
xml = d.to_xml(write_files=True)
|
||||||
if options.verbose > 2:
|
if options.verbose > 2:
|
||||||
open('lrs.xml', 'wb').write(xml.encode('utf-8'))
|
open('lrs.xml', 'wb').write(xml.encode('utf-8'))
|
||||||
parser = etree.XMLParser(recover=True, no_network=True)
|
parser = etree.XMLParser(recover=True, no_network=True, huge_tree=True)
|
||||||
doc = etree.fromstring(xml, parser=parser)
|
doc = etree.fromstring(xml, parser=parser)
|
||||||
char_button_map = {}
|
char_button_map = {}
|
||||||
for x in doc.xpath('//CharButton[@refobj]'):
|
for x in doc.xpath('//CharButton[@refobj]'):
|
||||||
|
@ -210,31 +210,19 @@ class LibraryThing(MetadataSource): # {{{
|
|||||||
|
|
||||||
name = 'LibraryThing'
|
name = 'LibraryThing'
|
||||||
metadata_type = 'social'
|
metadata_type = 'social'
|
||||||
description = _('Downloads series information from librarything.com')
|
description = _('Downloads series/tags/rating information from librarything.com')
|
||||||
|
|
||||||
def fetch(self):
|
def fetch(self):
|
||||||
if not self.isbn:
|
if not self.isbn:
|
||||||
return
|
return
|
||||||
from calibre.ebooks.metadata import MetaInformation
|
from calibre.ebooks.metadata.library_thing import get_social_metadata
|
||||||
import json
|
|
||||||
br = browser()
|
|
||||||
try:
|
try:
|
||||||
raw = br.open(
|
self.results = get_social_metadata(self.title, self.book_author,
|
||||||
'http://status.calibre-ebook.com/library_thing/metadata/'+self.isbn
|
self.publisher, self.isbn)
|
||||||
).read()
|
|
||||||
data = json.loads(raw)
|
|
||||||
if not data:
|
|
||||||
return
|
|
||||||
if 'error' in data:
|
|
||||||
raise Exception(data['error'])
|
|
||||||
if 'series' in data and 'series_index' in data:
|
|
||||||
mi = MetaInformation(self.title, [])
|
|
||||||
mi.series = data['series']
|
|
||||||
mi.series_index = data['series_index']
|
|
||||||
self.results = mi
|
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
self.exception = e
|
self.exception = e
|
||||||
self.tb = traceback.format_exc()
|
self.tb = traceback.format_exc()
|
||||||
|
|
||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
|
|
||||||
@ -369,6 +357,16 @@ def search(title=None, author=None, publisher=None, isbn=None, isbndb_key=None,
|
|||||||
if title.lower() == r.title[:len(title)].lower() and r.comments and len(r.comments):
|
if title.lower() == r.title[:len(title)].lower() and r.comments and len(r.comments):
|
||||||
results[0].comments = r.comments
|
results[0].comments = r.comments
|
||||||
break
|
break
|
||||||
|
# Find a pubdate
|
||||||
|
pubdate = None
|
||||||
|
for r in results:
|
||||||
|
if r.pubdate is not None:
|
||||||
|
pubdate = r.pubdate
|
||||||
|
break
|
||||||
|
if pubdate is not None:
|
||||||
|
for r in results:
|
||||||
|
if r.pubdate is None:
|
||||||
|
r.pubdate = pubdate
|
||||||
|
|
||||||
# for r in results:
|
# for r in results:
|
||||||
# print "{0:14.14} {1:30.30} {2:20.20} {3:6} {4}".format(r.isbn, r.title, r.publisher, len(r.comments if r.comments else ''), r.has_cover)
|
# print "{0:14.14} {1:30.30} {2:20.20} {3:6} {4}".format(r.isbn, r.title, r.publisher, len(r.comments if r.comments else ''), r.has_cover)
|
||||||
|
@ -6,10 +6,11 @@ Fetch cover from LibraryThing.com based on ISBN number.
|
|||||||
|
|
||||||
import sys, socket, os, re
|
import sys, socket, os, re
|
||||||
|
|
||||||
from calibre import browser as _browser
|
from lxml import html
|
||||||
|
|
||||||
|
from calibre import browser, prints
|
||||||
from calibre.utils.config import OptionParser
|
from calibre.utils.config import OptionParser
|
||||||
from calibre.ebooks.BeautifulSoup import BeautifulSoup
|
from calibre.ebooks.BeautifulSoup import BeautifulSoup
|
||||||
browser = None
|
|
||||||
|
|
||||||
OPENLIBRARY = 'http://covers.openlibrary.org/b/isbn/%s-L.jpg?default=false'
|
OPENLIBRARY = 'http://covers.openlibrary.org/b/isbn/%s-L.jpg?default=false'
|
||||||
|
|
||||||
@ -22,31 +23,28 @@ class ISBNNotFound(LibraryThingError):
|
|||||||
class ServerBusy(LibraryThingError):
|
class ServerBusy(LibraryThingError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def login(username, password, force=True):
|
def login(br, username, password, force=True):
|
||||||
global browser
|
br.open('http://www.librarything.com')
|
||||||
if browser is not None and not force:
|
br.select_form('signup')
|
||||||
return
|
br['formusername'] = username
|
||||||
browser = _browser()
|
br['formpassword'] = password
|
||||||
browser.open('http://www.librarything.com')
|
br.submit()
|
||||||
browser.select_form('signup')
|
|
||||||
browser['formusername'] = username
|
|
||||||
browser['formpassword'] = password
|
|
||||||
browser.submit()
|
|
||||||
|
|
||||||
|
|
||||||
def cover_from_isbn(isbn, timeout=5., username=None, password=None):
|
def cover_from_isbn(isbn, timeout=5., username=None, password=None):
|
||||||
global browser
|
|
||||||
if browser is None:
|
|
||||||
browser = _browser()
|
|
||||||
src = None
|
src = None
|
||||||
|
br = browser()
|
||||||
try:
|
try:
|
||||||
return browser.open(OPENLIBRARY%isbn, timeout=timeout).read(), 'jpg'
|
return br.open(OPENLIBRARY%isbn, timeout=timeout).read(), 'jpg'
|
||||||
except:
|
except:
|
||||||
pass # Cover not found
|
pass # Cover not found
|
||||||
if username and password:
|
if username and password:
|
||||||
login(username, password, force=False)
|
try:
|
||||||
|
login(br, username, password, force=False)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
try:
|
try:
|
||||||
src = browser.open('http://www.librarything.com/isbn/'+isbn,
|
src = br.open_novisit('http://www.librarything.com/isbn/'+isbn,
|
||||||
timeout=timeout).read().decode('utf-8', 'replace')
|
timeout=timeout).read().decode('utf-8', 'replace')
|
||||||
except Exception, err:
|
except Exception, err:
|
||||||
if isinstance(getattr(err, 'args', [None])[0], socket.timeout):
|
if isinstance(getattr(err, 'args', [None])[0], socket.timeout):
|
||||||
@ -63,7 +61,7 @@ def cover_from_isbn(isbn, timeout=5., username=None, password=None):
|
|||||||
if url is None:
|
if url is None:
|
||||||
raise LibraryThingError(_('LibraryThing.com server error. Try again later.'))
|
raise LibraryThingError(_('LibraryThing.com server error. Try again later.'))
|
||||||
url = re.sub(r'_S[XY]\d+', '', url['src'])
|
url = re.sub(r'_S[XY]\d+', '', url['src'])
|
||||||
cover_data = browser.open(url).read()
|
cover_data = br.open_novisit(url).read()
|
||||||
return cover_data, url.rpartition('.')[-1]
|
return cover_data, url.rpartition('.')[-1]
|
||||||
|
|
||||||
def option_parser():
|
def option_parser():
|
||||||
@ -71,7 +69,7 @@ def option_parser():
|
|||||||
_('''
|
_('''
|
||||||
%prog [options] ISBN
|
%prog [options] ISBN
|
||||||
|
|
||||||
Fetch a cover image for the book identified by ISBN from LibraryThing.com
|
Fetch a cover image/social metadata for the book identified by ISBN from LibraryThing.com
|
||||||
'''))
|
'''))
|
||||||
parser.add_option('-u', '--username', default=None,
|
parser.add_option('-u', '--username', default=None,
|
||||||
help='Username for LibraryThing.com')
|
help='Username for LibraryThing.com')
|
||||||
@ -79,6 +77,61 @@ Fetch a cover image for the book identified by ISBN from LibraryThing.com
|
|||||||
help='Password for LibraryThing.com')
|
help='Password for LibraryThing.com')
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
def get_social_metadata(title, authors, publisher, isbn, username=None,
|
||||||
|
password=None):
|
||||||
|
from calibre.ebooks.metadata import MetaInformation
|
||||||
|
mi = MetaInformation(title, authors)
|
||||||
|
if isbn:
|
||||||
|
br = browser()
|
||||||
|
if username and password:
|
||||||
|
try:
|
||||||
|
login(br, username, password, force=False)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
raw = br.open_novisit('http://www.librarything.com/isbn/'
|
||||||
|
+isbn).read()
|
||||||
|
if not raw:
|
||||||
|
return mi
|
||||||
|
root = html.fromstring(raw)
|
||||||
|
h1 = root.xpath('//div[@class="headsummary"]/h1')
|
||||||
|
if h1 and not mi.title:
|
||||||
|
mi.title = html.tostring(h1[0], method='text', encoding=unicode)
|
||||||
|
h2 = root.xpath('//div[@class="headsummary"]/h2/a')
|
||||||
|
if h2 and not mi.authors:
|
||||||
|
mi.authors = [html.tostring(x, method='text', encoding=unicode) for
|
||||||
|
x in h2]
|
||||||
|
h3 = root.xpath('//div[@class="headsummary"]/h3/a')
|
||||||
|
if h3:
|
||||||
|
match = None
|
||||||
|
for h in h3:
|
||||||
|
series = html.tostring(h, method='text', encoding=unicode)
|
||||||
|
match = re.search(r'(.+) \((.+)\)', series)
|
||||||
|
if match is not None:
|
||||||
|
break
|
||||||
|
if match is not None:
|
||||||
|
mi.series = match.group(1).strip()
|
||||||
|
match = re.search(r'[0-9.]+', match.group(2))
|
||||||
|
si = 1.0
|
||||||
|
if match is not None:
|
||||||
|
si = float(match.group())
|
||||||
|
mi.series_index = si
|
||||||
|
tags = root.xpath('//div[@class="tags"]/span[@class="tag"]/a')
|
||||||
|
if tags:
|
||||||
|
mi.tags = [html.tostring(x, method='text', encoding=unicode) for x
|
||||||
|
in tags]
|
||||||
|
span = root.xpath(
|
||||||
|
'//table[@class="wsltable"]/tr[@class="wslcontent"]/td[4]//span')
|
||||||
|
if span:
|
||||||
|
raw = html.tostring(span[0], method='text', encoding=unicode)
|
||||||
|
match = re.search(r'([0-9.]+)', raw)
|
||||||
|
if match is not None:
|
||||||
|
rating = float(match.group())
|
||||||
|
if rating > 0 and rating <= 5:
|
||||||
|
mi.rating = rating
|
||||||
|
return mi
|
||||||
|
|
||||||
|
|
||||||
def main(args=sys.argv):
|
def main(args=sys.argv):
|
||||||
parser = option_parser()
|
parser = option_parser()
|
||||||
opts, args = parser.parse_args(args)
|
opts, args = parser.parse_args(args)
|
||||||
@ -86,6 +139,8 @@ def main(args=sys.argv):
|
|||||||
parser.print_help()
|
parser.print_help()
|
||||||
return 1
|
return 1
|
||||||
isbn = args[1]
|
isbn = args[1]
|
||||||
|
mi = get_social_metadata('', [], '', isbn)
|
||||||
|
prints(mi)
|
||||||
cover_data, ext = cover_from_isbn(isbn, username=opts.username,
|
cover_data, ext = cover_from_isbn(isbn, username=opts.username,
|
||||||
password=opts.password)
|
password=opts.password)
|
||||||
if not ext:
|
if not ext:
|
||||||
|
@ -7,7 +7,7 @@ __license__ = 'GPL v3'
|
|||||||
__copyright__ = '2008, Marshall T. Vandegrift <llasram@gmail.com>'
|
__copyright__ = '2008, Marshall T. Vandegrift <llasram@gmail.com>'
|
||||||
__docformat__ = 'restructuredtext en'
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
import os, re, uuid, logging
|
import os, re, uuid, logging, functools
|
||||||
from mimetypes import types_map
|
from mimetypes import types_map
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from itertools import count
|
from itertools import count
|
||||||
@ -26,6 +26,8 @@ from calibre.ebooks.chardet import xml_to_unicode
|
|||||||
from calibre.ebooks.oeb.entitydefs import ENTITYDEFS
|
from calibre.ebooks.oeb.entitydefs import ENTITYDEFS
|
||||||
from calibre.ebooks.conversion.preprocess import CSSPreProcessor
|
from calibre.ebooks.conversion.preprocess import CSSPreProcessor
|
||||||
|
|
||||||
|
RECOVER_PARSER = etree.XMLParser(recover=True, no_network=True, huge_tree=True)
|
||||||
|
|
||||||
XML_NS = 'http://www.w3.org/XML/1998/namespace'
|
XML_NS = 'http://www.w3.org/XML/1998/namespace'
|
||||||
XHTML_NS = 'http://www.w3.org/1999/xhtml'
|
XHTML_NS = 'http://www.w3.org/1999/xhtml'
|
||||||
OEB_DOC_NS = 'http://openebook.org/namespaces/oeb-document/1.0/'
|
OEB_DOC_NS = 'http://openebook.org/namespaces/oeb-document/1.0/'
|
||||||
@ -233,8 +235,6 @@ PREFIXNAME_RE = re.compile(r'^[^:]+[:][^:]+')
|
|||||||
XMLDECL_RE = re.compile(r'^\s*<[?]xml.*?[?]>')
|
XMLDECL_RE = re.compile(r'^\s*<[?]xml.*?[?]>')
|
||||||
CSSURL_RE = re.compile(r'''url[(](?P<q>["']?)(?P<url>[^)]+)(?P=q)[)]''')
|
CSSURL_RE = re.compile(r'''url[(](?P<q>["']?)(?P<url>[^)]+)(?P=q)[)]''')
|
||||||
|
|
||||||
RECOVER_PARSER = etree.XMLParser(recover=True)
|
|
||||||
|
|
||||||
|
|
||||||
def element(parent, *args, **kwargs):
|
def element(parent, *args, **kwargs):
|
||||||
if parent is not None:
|
if parent is not None:
|
||||||
@ -780,8 +780,7 @@ class Manifest(object):
|
|||||||
assume_utf8=True, resolve_entities=True)[0]
|
assume_utf8=True, resolve_entities=True)[0]
|
||||||
if not data:
|
if not data:
|
||||||
return None
|
return None
|
||||||
parser = etree.XMLParser(recover=True)
|
return etree.fromstring(data, parser=RECOVER_PARSER)
|
||||||
return etree.fromstring(data, parser=parser)
|
|
||||||
|
|
||||||
def _parse_xhtml(self, data):
|
def _parse_xhtml(self, data):
|
||||||
self.oeb.log.debug('Parsing', self.href, '...')
|
self.oeb.log.debug('Parsing', self.href, '...')
|
||||||
@ -809,16 +808,17 @@ class Manifest(object):
|
|||||||
pat = re.compile(r'&(%s);'%('|'.join(user_entities.keys())))
|
pat = re.compile(r'&(%s);'%('|'.join(user_entities.keys())))
|
||||||
data = pat.sub(lambda m:user_entities[m.group(1)], data)
|
data = pat.sub(lambda m:user_entities[m.group(1)], data)
|
||||||
|
|
||||||
|
fromstring = functools.partial(etree.fromstring, parser=RECOVER_PARSER)
|
||||||
# Try with more & more drastic measures to parse
|
# Try with more & more drastic measures to parse
|
||||||
def first_pass(data):
|
def first_pass(data):
|
||||||
try:
|
try:
|
||||||
data = etree.fromstring(data)
|
data = fromstring(data)
|
||||||
except etree.XMLSyntaxError, err:
|
except etree.XMLSyntaxError, err:
|
||||||
self.oeb.log.exception('Initial parse failed:')
|
self.oeb.log.exception('Initial parse failed:')
|
||||||
repl = lambda m: ENTITYDEFS.get(m.group(1), m.group(0))
|
repl = lambda m: ENTITYDEFS.get(m.group(1), m.group(0))
|
||||||
data = ENTITY_RE.sub(repl, data)
|
data = ENTITY_RE.sub(repl, data)
|
||||||
try:
|
try:
|
||||||
data = etree.fromstring(data)
|
data = fromstring(data)
|
||||||
except etree.XMLSyntaxError, err:
|
except etree.XMLSyntaxError, err:
|
||||||
self.oeb.logger.warn('Parsing file %r as HTML' % self.href)
|
self.oeb.logger.warn('Parsing file %r as HTML' % self.href)
|
||||||
if err.args and err.args[0].startswith('Excessive depth'):
|
if err.args and err.args[0].startswith('Excessive depth'):
|
||||||
@ -832,9 +832,9 @@ class Manifest(object):
|
|||||||
elem.text = elem.text.strip('-')
|
elem.text = elem.text.strip('-')
|
||||||
data = etree.tostring(data, encoding=unicode)
|
data = etree.tostring(data, encoding=unicode)
|
||||||
try:
|
try:
|
||||||
data = etree.fromstring(data)
|
data = fromstring(data)
|
||||||
except etree.XMLSyntaxError:
|
except etree.XMLSyntaxError:
|
||||||
data = etree.fromstring(data, parser=RECOVER_PARSER)
|
data = fromstring(data)
|
||||||
return data
|
return data
|
||||||
data = first_pass(data)
|
data = first_pass(data)
|
||||||
|
|
||||||
@ -866,12 +866,12 @@ class Manifest(object):
|
|||||||
data = etree.tostring(data, encoding=unicode)
|
data = etree.tostring(data, encoding=unicode)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = etree.fromstring(data)
|
data = fromstring(data)
|
||||||
except:
|
except:
|
||||||
data = data.replace(':=', '=').replace(':>', '>')
|
data = data.replace(':=', '=').replace(':>', '>')
|
||||||
data = data.replace('<http:/>', '')
|
data = data.replace('<http:/>', '')
|
||||||
try:
|
try:
|
||||||
data = etree.fromstring(data)
|
data = fromstring(data)
|
||||||
except etree.XMLSyntaxError:
|
except etree.XMLSyntaxError:
|
||||||
self.oeb.logger.warn('Stripping comments and meta tags from %s'%
|
self.oeb.logger.warn('Stripping comments and meta tags from %s'%
|
||||||
self.href)
|
self.href)
|
||||||
@ -882,7 +882,7 @@ class Manifest(object):
|
|||||||
"<?xml version='1.0' encoding='utf-8'?><o:p></o:p>",
|
"<?xml version='1.0' encoding='utf-8'?><o:p></o:p>",
|
||||||
'')
|
'')
|
||||||
data = data.replace("<?xml version='1.0' encoding='utf-8'??>", '')
|
data = data.replace("<?xml version='1.0' encoding='utf-8'??>", '')
|
||||||
data = etree.fromstring(data)
|
data = fromstring(data)
|
||||||
elif namespace(data.tag) != XHTML_NS:
|
elif namespace(data.tag) != XHTML_NS:
|
||||||
# OEB_DOC_NS, but possibly others
|
# OEB_DOC_NS, but possibly others
|
||||||
ns = namespace(data.tag)
|
ns = namespace(data.tag)
|
||||||
|
@ -103,6 +103,8 @@ def _config():
|
|||||||
help=_('The layout of the user interface'), default='wide')
|
help=_('The layout of the user interface'), default='wide')
|
||||||
c.add_opt('show_avg_rating', default=True,
|
c.add_opt('show_avg_rating', default=True,
|
||||||
help=_('Show the average rating per item indication in the tag browser'))
|
help=_('Show the average rating per item indication in the tag browser'))
|
||||||
|
c.add_opt('disable_animations', default=False,
|
||||||
|
help=_('Disable UI animations'))
|
||||||
return ConfigProxy(c)
|
return ConfigProxy(c)
|
||||||
|
|
||||||
config = _config()
|
config = _config()
|
||||||
|
@ -16,6 +16,7 @@ from calibre.gui2.widgets import IMAGE_EXTENSIONS
|
|||||||
from calibre.ebooks import BOOK_EXTENSIONS
|
from calibre.ebooks import BOOK_EXTENSIONS
|
||||||
from calibre.constants import preferred_encoding
|
from calibre.constants import preferred_encoding
|
||||||
from calibre.library.comments import comments_to_html
|
from calibre.library.comments import comments_to_html
|
||||||
|
from calibre.gui2 import config
|
||||||
|
|
||||||
# render_rows(data) {{{
|
# render_rows(data) {{{
|
||||||
WEIGHTS = collections.defaultdict(lambda : 100)
|
WEIGHTS = collections.defaultdict(lambda : 100)
|
||||||
@ -133,7 +134,7 @@ class CoverView(QWidget): # {{{
|
|||||||
self.pixmap = self.default_pixmap
|
self.pixmap = self.default_pixmap
|
||||||
self.do_layout()
|
self.do_layout()
|
||||||
self.update()
|
self.update()
|
||||||
if not same_item:
|
if not same_item and not config['disable_animations']:
|
||||||
self.animation.start()
|
self.animation.start()
|
||||||
|
|
||||||
def paintEvent(self, event):
|
def paintEvent(self, event):
|
||||||
|
@ -5,7 +5,7 @@ __license__ = 'GPL v3'
|
|||||||
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||||
__docformat__ = 'restructuredtext en'
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
import re, sys
|
import sys
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
from PyQt4.Qt import QComboBox, QLabel, QSpinBox, QDoubleSpinBox, QDateEdit, \
|
from PyQt4.Qt import QComboBox, QLabel, QSpinBox, QDoubleSpinBox, QDateEdit, \
|
||||||
|
@ -76,10 +76,16 @@ class DeviceJob(BaseJob): # {{{
|
|||||||
self.job_done()
|
self.job_done()
|
||||||
|
|
||||||
def abort(self, err):
|
def abort(self, err):
|
||||||
|
call_job_done = False
|
||||||
|
if self.run_state == self.WAITING:
|
||||||
|
self.start_work()
|
||||||
|
call_job_done = True
|
||||||
self._aborted = True
|
self._aborted = True
|
||||||
self.failed = True
|
self.failed = True
|
||||||
self._details = unicode(err)
|
self._details = unicode(err)
|
||||||
self.exception = err
|
self.exception = err
|
||||||
|
if call_job_done:
|
||||||
|
self.job_done()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def log_file(self):
|
def log_file(self):
|
||||||
@ -105,12 +111,11 @@ class DeviceManager(Thread): # {{{
|
|||||||
self.current_job = None
|
self.current_job = None
|
||||||
self.scanner = DeviceScanner()
|
self.scanner = DeviceScanner()
|
||||||
self.connected_device = None
|
self.connected_device = None
|
||||||
|
self.connected_device_kind = None
|
||||||
self.ejected_devices = set([])
|
self.ejected_devices = set([])
|
||||||
self.mount_connection_requests = Queue.Queue(0)
|
self.mount_connection_requests = Queue.Queue(0)
|
||||||
self.open_feedback_slot = open_feedback_slot
|
self.open_feedback_slot = open_feedback_slot
|
||||||
|
|
||||||
ITUNES_STRING = '#itunes#'
|
|
||||||
|
|
||||||
def report_progress(self, *args):
|
def report_progress(self, *args):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -183,11 +188,23 @@ class DeviceManager(Thread): # {{{
|
|||||||
device_kind='usb'):
|
device_kind='usb'):
|
||||||
prints('Device connect failed again, giving up')
|
prints('Device connect failed again, giving up')
|
||||||
|
|
||||||
|
# Mount devices that don't use USB, such as the folder device and iTunes
|
||||||
|
# This will be called on the GUI thread. Because of this, we must store
|
||||||
|
# information that the scanner thread will use to do the real work.
|
||||||
|
def mount_device(self, kls, kind, path):
|
||||||
|
self.mount_connection_requests.put((kls, kind, path))
|
||||||
|
|
||||||
|
# disconnect a device
|
||||||
def umount_device(self, *args):
|
def umount_device(self, *args):
|
||||||
if self.is_device_connected and not self.job_manager.has_device_jobs():
|
if self.is_device_connected and not self.job_manager.has_device_jobs():
|
||||||
self.connected_device.eject()
|
if self.connected_device_kind == 'device':
|
||||||
self.ejected_devices.add(self.connected_device)
|
self.connected_device.eject()
|
||||||
self.connected_slot(False, self.connected_device_kind)
|
self.ejected_devices.add(self.connected_device)
|
||||||
|
self.connected_slot(False, self.connected_device_kind)
|
||||||
|
elif hasattr(self.connected_device, 'unmount_device'):
|
||||||
|
# As we are on the wrong thread, this call must *not* do
|
||||||
|
# anything besides set a flag that the right thread will see.
|
||||||
|
self.connected_device.unmount_device()
|
||||||
|
|
||||||
def next(self):
|
def next(self):
|
||||||
if not self.jobs.empty():
|
if not self.jobs.empty():
|
||||||
@ -250,22 +267,6 @@ class DeviceManager(Thread): # {{{
|
|||||||
return self.create_job(self._get_device_information, done,
|
return self.create_job(self._get_device_information, done,
|
||||||
description=_('Get device information'))
|
description=_('Get device information'))
|
||||||
|
|
||||||
# This will be called on the GUI thread. Because of this, we must store
|
|
||||||
# information that the scanner thread will use to do the real work.
|
|
||||||
# Note: this is used for iTunes
|
|
||||||
def mount_device(self, kls, kind, path):
|
|
||||||
self.mount_connection_requests.put((kls, kind, path))
|
|
||||||
|
|
||||||
# This is called on the GUI thread. No problem here, because it calls the
|
|
||||||
# device driver, telling it to tell the scanner when it passes by that the
|
|
||||||
# folder has disconnected. Note: this is also used for iTunes
|
|
||||||
def unmount_device(self):
|
|
||||||
if self.connected_device is not None:
|
|
||||||
if hasattr(self.connected_device, 'disconnect_from_folder'):
|
|
||||||
# As we are on the wrong thread, this call must *not* do
|
|
||||||
# anything besides set a flag that the right thread will see.
|
|
||||||
self.connected_device.disconnect_from_folder()
|
|
||||||
|
|
||||||
def _books(self):
|
def _books(self):
|
||||||
'''Get metadata from device'''
|
'''Get metadata from device'''
|
||||||
mainlist = self.device.books(oncard=None, end_session=False)
|
mainlist = self.device.books(oncard=None, end_session=False)
|
||||||
@ -493,20 +494,16 @@ class DeviceMenu(QMenu): # {{{
|
|||||||
mitem.triggered.connect(lambda x : self.connect_to_folder.emit())
|
mitem.triggered.connect(lambda x : self.connect_to_folder.emit())
|
||||||
self.connect_to_folder_action = mitem
|
self.connect_to_folder_action = mitem
|
||||||
|
|
||||||
mitem = self.addAction(QIcon(I('eject.svg')), _('Disconnect from folder'))
|
mitem = self.addAction(QIcon(I('devices/itunes.png')),
|
||||||
mitem.setEnabled(False)
|
_('Connect to iTunes (EXPERIMENTAL)'))
|
||||||
mitem.triggered.connect(lambda x : self.disconnect_mounted_device.emit())
|
|
||||||
self.disconnect_mounted_device_action = mitem
|
|
||||||
|
|
||||||
mitem = self.addAction(QIcon(I('document_open.svg')), _('Connect to iTunes (BETA TEST)'))
|
|
||||||
mitem.setEnabled(True)
|
mitem.setEnabled(True)
|
||||||
mitem.triggered.connect(lambda x : self.connect_to_itunes.emit())
|
mitem.triggered.connect(lambda x : self.connect_to_itunes.emit())
|
||||||
self.connect_to_itunes_action = mitem
|
self.connect_to_itunes_action = mitem
|
||||||
|
|
||||||
mitem = self.addAction(QIcon(I('eject.svg')), _('Disconnect from iTunes (BETA TEST)'))
|
mitem = self.addAction(QIcon(I('eject.svg')), _('Eject device'))
|
||||||
mitem.setEnabled(False)
|
mitem.setEnabled(False)
|
||||||
mitem.triggered.connect(lambda x : self.disconnect_mounted_device.emit())
|
mitem.triggered.connect(lambda x : self.disconnect_mounted_device.emit())
|
||||||
self.disconnect_from_itunes_action = mitem
|
self.disconnect_mounted_device_action = mitem
|
||||||
|
|
||||||
self.addSeparator()
|
self.addSeparator()
|
||||||
self.addMenu(self.set_default_menu)
|
self.addMenu(self.set_default_menu)
|
||||||
@ -652,7 +649,7 @@ class DeviceMixin(object): # {{{
|
|||||||
|
|
||||||
# disconnect from both folder and itunes devices
|
# disconnect from both folder and itunes devices
|
||||||
def disconnect_mounted_device(self):
|
def disconnect_mounted_device(self):
|
||||||
self.device_manager.unmount_device()
|
self.device_manager.umount_device()
|
||||||
|
|
||||||
def _sync_action_triggered(self, *args):
|
def _sync_action_triggered(self, *args):
|
||||||
m = getattr(self, '_sync_menu', None)
|
m = getattr(self, '_sync_menu', None)
|
||||||
@ -672,19 +669,11 @@ class DeviceMixin(object): # {{{
|
|||||||
if self.device_connected:
|
if self.device_connected:
|
||||||
self._sync_menu.connect_to_folder_action.setEnabled(False)
|
self._sync_menu.connect_to_folder_action.setEnabled(False)
|
||||||
self._sync_menu.connect_to_itunes_action.setEnabled(False)
|
self._sync_menu.connect_to_itunes_action.setEnabled(False)
|
||||||
if self.device_connected == 'folder':
|
self._sync_menu.disconnect_mounted_device_action.setEnabled(True)
|
||||||
self._sync_menu.disconnect_mounted_device_action.setEnabled(True)
|
|
||||||
if self.device_connected == 'itunes':
|
|
||||||
self._sync_menu.disconnect_from_itunes_action.setEnabled(True)
|
|
||||||
else:
|
|
||||||
self._sync_menu.disconnect_mounted_device_action.setEnabled(False)
|
|
||||||
else:
|
else:
|
||||||
self._sync_menu.connect_to_folder_action.setEnabled(True)
|
self._sync_menu.connect_to_folder_action.setEnabled(True)
|
||||||
self._sync_menu.disconnect_mounted_device_action.setEnabled(False)
|
|
||||||
self._sync_menu.connect_to_itunes_action.setEnabled(True)
|
self._sync_menu.connect_to_itunes_action.setEnabled(True)
|
||||||
self._sync_menu.disconnect_from_itunes_action.setEnabled(False)
|
self._sync_menu.disconnect_mounted_device_action.setEnabled(False)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def device_job_exception(self, job):
|
def device_job_exception(self, job):
|
||||||
'''
|
'''
|
||||||
@ -718,14 +707,11 @@ class DeviceMixin(object): # {{{
|
|||||||
|
|
||||||
# Device connected {{{
|
# Device connected {{{
|
||||||
|
|
||||||
def set_device_menu_items_state(self, connected, device_kind):
|
def set_device_menu_items_state(self, connected):
|
||||||
if connected:
|
if connected:
|
||||||
self._sync_menu.connect_to_folder_action.setEnabled(False)
|
self._sync_menu.connect_to_folder_action.setEnabled(False)
|
||||||
self._sync_menu.connect_to_itunes_action.setEnabled(False)
|
self._sync_menu.connect_to_itunes_action.setEnabled(False)
|
||||||
if device_kind == 'folder':
|
self._sync_menu.disconnect_mounted_device_action.setEnabled(True)
|
||||||
self._sync_menu.disconnect_mounted_device_action.setEnabled(True)
|
|
||||||
elif device_kind == 'itunes':
|
|
||||||
self._sync_menu.disconnect_from_itunes_action.setEnabled(True)
|
|
||||||
self._sync_menu.enable_device_actions(True,
|
self._sync_menu.enable_device_actions(True,
|
||||||
self.device_manager.device.card_prefix(),
|
self.device_manager.device.card_prefix(),
|
||||||
self.device_manager.device)
|
self.device_manager.device)
|
||||||
@ -734,7 +720,6 @@ class DeviceMixin(object): # {{{
|
|||||||
self._sync_menu.connect_to_folder_action.setEnabled(True)
|
self._sync_menu.connect_to_folder_action.setEnabled(True)
|
||||||
self._sync_menu.connect_to_itunes_action.setEnabled(True)
|
self._sync_menu.connect_to_itunes_action.setEnabled(True)
|
||||||
self._sync_menu.disconnect_mounted_device_action.setEnabled(False)
|
self._sync_menu.disconnect_mounted_device_action.setEnabled(False)
|
||||||
self._sync_menu.disconnect_from_itunes_action.setEnabled(False)
|
|
||||||
self._sync_menu.enable_device_actions(False)
|
self._sync_menu.enable_device_actions(False)
|
||||||
self.eject_action.setEnabled(False)
|
self.eject_action.setEnabled(False)
|
||||||
|
|
||||||
@ -742,7 +727,7 @@ class DeviceMixin(object): # {{{
|
|||||||
'''
|
'''
|
||||||
Called when a device is connected to the computer.
|
Called when a device is connected to the computer.
|
||||||
'''
|
'''
|
||||||
self.set_device_menu_items_state(connected, device_kind)
|
self.set_device_menu_items_state(connected)
|
||||||
if connected:
|
if connected:
|
||||||
self.device_manager.get_device_information(\
|
self.device_manager.get_device_information(\
|
||||||
Dispatcher(self.info_read))
|
Dispatcher(self.info_read))
|
||||||
|
@ -493,6 +493,7 @@ class ConfigDialog(ResizableDialog, Ui_Dialog):
|
|||||||
if x == config['gui_layout']:
|
if x == config['gui_layout']:
|
||||||
li = i
|
li = i
|
||||||
self.opt_gui_layout.setCurrentIndex(li)
|
self.opt_gui_layout.setCurrentIndex(li)
|
||||||
|
self.opt_disable_animations.setChecked(config['disable_animations'])
|
||||||
|
|
||||||
def check_port_value(self, *args):
|
def check_port_value(self, *args):
|
||||||
port = self.port.value()
|
port = self.port.value()
|
||||||
@ -868,6 +869,7 @@ class ConfigDialog(ResizableDialog, Ui_Dialog):
|
|||||||
config['get_social_metadata'] = self.opt_get_social_metadata.isChecked()
|
config['get_social_metadata'] = self.opt_get_social_metadata.isChecked()
|
||||||
config['overwrite_author_title_metadata'] = self.opt_overwrite_author_title_metadata.isChecked()
|
config['overwrite_author_title_metadata'] = self.opt_overwrite_author_title_metadata.isChecked()
|
||||||
config['enforce_cpu_limit'] = bool(self.opt_enforce_cpu_limit.isChecked())
|
config['enforce_cpu_limit'] = bool(self.opt_enforce_cpu_limit.isChecked())
|
||||||
|
config['disable_animations'] = bool(self.opt_disable_animations.isChecked())
|
||||||
gprefs['show_splash_screen'] = bool(self.show_splash_screen.isChecked())
|
gprefs['show_splash_screen'] = bool(self.show_splash_screen.isChecked())
|
||||||
fmts = []
|
fmts = []
|
||||||
for i in range(self.viewer.count()):
|
for i in range(self.viewer.count()):
|
||||||
|
@ -89,8 +89,8 @@
|
|||||||
<rect>
|
<rect>
|
||||||
<x>0</x>
|
<x>0</x>
|
||||||
<y>0</y>
|
<y>0</y>
|
||||||
<width>720</width>
|
<width>724</width>
|
||||||
<height>679</height>
|
<height>683</height>
|
||||||
</rect>
|
</rect>
|
||||||
</property>
|
</property>
|
||||||
<layout class="QGridLayout" name="gridLayout_7">
|
<layout class="QGridLayout" name="gridLayout_7">
|
||||||
@ -655,6 +655,16 @@
|
|||||||
</property>
|
</property>
|
||||||
</widget>
|
</widget>
|
||||||
</item>
|
</item>
|
||||||
|
<item row="1" column="1">
|
||||||
|
<widget class="QCheckBox" name="opt_disable_animations">
|
||||||
|
<property name="toolTip">
|
||||||
|
<string>Disable all animations. Useful if you have a slow/old computer.</string>
|
||||||
|
</property>
|
||||||
|
<property name="text">
|
||||||
|
<string>Disable &animations</string>
|
||||||
|
</property>
|
||||||
|
</widget>
|
||||||
|
</item>
|
||||||
</layout>
|
</layout>
|
||||||
</widget>
|
</widget>
|
||||||
<widget class="QWidget" name="page_6">
|
<widget class="QWidget" name="page_6">
|
||||||
|
@ -11,11 +11,10 @@ import re
|
|||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
import sip
|
|
||||||
from PyQt4.Qt import SIGNAL, QObject, QCoreApplication, Qt, QTimer, QThread, QDate, \
|
from PyQt4.Qt import SIGNAL, QObject, QCoreApplication, Qt, QTimer, QThread, QDate, \
|
||||||
QPixmap, QListWidgetItem, QDialog, QHBoxLayout, QGridLayout
|
QPixmap, QListWidgetItem, QDialog
|
||||||
|
|
||||||
from calibre.gui2 import error_dialog, file_icon_provider, \
|
from calibre.gui2 import error_dialog, file_icon_provider, dynamic, \
|
||||||
choose_files, choose_images, ResizableDialog, \
|
choose_files, choose_images, ResizableDialog, \
|
||||||
warning_dialog
|
warning_dialog
|
||||||
from calibre.gui2.dialogs.metadata_single_ui import Ui_MetadataSingleDialog
|
from calibre.gui2.dialogs.metadata_single_ui import Ui_MetadataSingleDialog
|
||||||
@ -301,6 +300,7 @@ class MetadataSingleDialog(ResizableDialog, Ui_MetadataSingleDialog):
|
|||||||
self.connect(self.__abort_button, SIGNAL('clicked()'),
|
self.connect(self.__abort_button, SIGNAL('clicked()'),
|
||||||
self.do_cancel_all)
|
self.do_cancel_all)
|
||||||
self.splitter.setStretchFactor(100, 1)
|
self.splitter.setStretchFactor(100, 1)
|
||||||
|
self.read_state()
|
||||||
self.db = db
|
self.db = db
|
||||||
self.pi = ProgressIndicator(self)
|
self.pi = ProgressIndicator(self)
|
||||||
self.accepted_callback = accepted_callback
|
self.accepted_callback = accepted_callback
|
||||||
@ -716,7 +716,7 @@ class MetadataSingleDialog(ResizableDialog, Ui_MetadataSingleDialog):
|
|||||||
_('Could not open %s. Is it being used by another'
|
_('Could not open %s. Is it being used by another'
|
||||||
' program?')%fname, show=True)
|
' program?')%fname, show=True)
|
||||||
raise
|
raise
|
||||||
|
self.save_state()
|
||||||
QDialog.accept(self)
|
QDialog.accept(self)
|
||||||
if callable(self.accepted_callback):
|
if callable(self.accepted_callback):
|
||||||
self.accepted_callback(self.id)
|
self.accepted_callback(self.id)
|
||||||
@ -728,3 +728,16 @@ class MetadataSingleDialog(ResizableDialog, Ui_MetadataSingleDialog):
|
|||||||
cf.wait()
|
cf.wait()
|
||||||
|
|
||||||
QDialog.reject(self, *args)
|
QDialog.reject(self, *args)
|
||||||
|
|
||||||
|
def read_state(self):
|
||||||
|
wg = dynamic.get('metasingle_window_geometry', None)
|
||||||
|
ss = dynamic.get('metasingle_splitter_state', None)
|
||||||
|
if wg is not None:
|
||||||
|
self.restoreGeometry(wg)
|
||||||
|
if ss is not None:
|
||||||
|
self.splitter.restoreState(ss)
|
||||||
|
|
||||||
|
def save_state(self):
|
||||||
|
dynamic.set('metasingle_window_geometry', bytes(self.saveGeometry()))
|
||||||
|
dynamic.set('metasingle_splitter_state',
|
||||||
|
bytes(self.splitter.saveState()))
|
||||||
|
@ -410,9 +410,7 @@ class Main(MainWindow, Ui_MainWindow, DeviceMixin, ToolbarMixin, # {{{
|
|||||||
self.tags_view.set_new_model() # in case columns changed
|
self.tags_view.set_new_model() # in case columns changed
|
||||||
self.tags_view.recount()
|
self.tags_view.recount()
|
||||||
self.create_device_menu()
|
self.create_device_menu()
|
||||||
self.set_device_menu_items_state(bool(self.device_connected),
|
self.set_device_menu_items_state(bool(self.device_connected))
|
||||||
self.device_connected == 'folder')
|
|
||||||
|
|
||||||
if not patheq(self.library_path, d.database_location):
|
if not patheq(self.library_path, d.database_location):
|
||||||
newloc = d.database_location
|
newloc = d.database_location
|
||||||
move_library(self.library_path, newloc, self,
|
move_library(self.library_path, newloc, self,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user