mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
merge from trunk
This commit is contained in:
commit
f4bb955156
44
recipes/hvg.recipe
Normal file
44
recipes/hvg.recipe
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
from calibre.web.feeds.recipes import BasicNewsRecipe
|
||||||
|
|
||||||
|
class HVG(BasicNewsRecipe):
|
||||||
|
title = 'HVG.HU'
|
||||||
|
__author__ = u'István Papp'
|
||||||
|
description = u'Friss hírek a HVG-től'
|
||||||
|
timefmt = ' [%Y. %b. %d., %a.]'
|
||||||
|
oldest_article = 4
|
||||||
|
language = 'hu'
|
||||||
|
|
||||||
|
max_articles_per_feed = 100
|
||||||
|
no_stylesheets = True
|
||||||
|
use_embedded_content = False
|
||||||
|
encoding = 'utf8'
|
||||||
|
publisher = 'HVG Online'
|
||||||
|
category = u'news, hírek, hvg'
|
||||||
|
extra_css = 'body{ font-family: Verdana,Helvetica,Arial,sans-serif } .introduction{font-weight: bold} .story-feature{display: block; padding: 0; border: 1px solid; width: 40%; font-size: small} .story-feature h2{text-align: center; text-transform: uppercase} '
|
||||||
|
preprocess_regexps = [(re.compile(r'<!--.*?-->', re.DOTALL), lambda m: '')]
|
||||||
|
remove_tags_before = dict(id='pg-content')
|
||||||
|
remove_javascript = True
|
||||||
|
remove_empty_feeds = True
|
||||||
|
|
||||||
|
feeds = [
|
||||||
|
(u'Itthon', u'http://hvg.hu/rss/itthon')
|
||||||
|
,(u'Világ', u'http://hvg.hu/rss/vilag')
|
||||||
|
,(u'Gazdaság', u'http://hvg.hu/rss/gazdasag')
|
||||||
|
,(u'IT | Tudomány', u'http://hvg.hu/rss/tudomany')
|
||||||
|
,(u'Panoráma', u'http://hvg.hu/rss/Panorama')
|
||||||
|
,(u'Karrier', u'http://hvg.hu/rss/karrier')
|
||||||
|
,(u'Gasztronómia', u'http://hvg.hu/rss/gasztronomia')
|
||||||
|
,(u'Helyi érték', u'http://hvg.hu/rss/helyiertek')
|
||||||
|
,(u'Kultúra', u'http://hvg.hu/rss/kultura')
|
||||||
|
,(u'Cégautó', u'http://hvg.hu/rss/cegauto')
|
||||||
|
,(u'Vállalkozó szellem', u'http://hvg.hu/rss/kkv')
|
||||||
|
,(u'Egészség', u'http://hvg.hu/rss/egeszseg')
|
||||||
|
,(u'Vélemény', u'http://hvg.hu/rss/velemeny')
|
||||||
|
,(u'Sport', u'http://hvg.hu/rss/sport')
|
||||||
|
]
|
||||||
|
|
||||||
|
def print_version(self, url):
|
||||||
|
return url.replace ('#rss', '/print')
|
||||||
|
|
@ -23,6 +23,11 @@ class WeeklyLWN(BasicNewsRecipe):
|
|||||||
remove_tags_after = dict(attrs={'class':'ArticleText'})
|
remove_tags_after = dict(attrs={'class':'ArticleText'})
|
||||||
remove_tags = [dict(name=['h2', 'form'])]
|
remove_tags = [dict(name=['h2', 'form'])]
|
||||||
|
|
||||||
|
preprocess_regexps = [
|
||||||
|
# Remove the <hr> and "Log in to post comments"
|
||||||
|
(re.compile(r'<hr.*?comments[)]', re.DOTALL), lambda m: ''),
|
||||||
|
]
|
||||||
|
|
||||||
conversion_options = { 'linearize_tables' : True }
|
conversion_options = { 'linearize_tables' : True }
|
||||||
|
|
||||||
oldest_article = 7.0
|
oldest_article = 7.0
|
||||||
@ -40,15 +45,15 @@ class WeeklyLWN(BasicNewsRecipe):
|
|||||||
|
|
||||||
def parse_index(self):
|
def parse_index(self):
|
||||||
if self.username is not None and self.password is not None:
|
if self.username is not None and self.password is not None:
|
||||||
index_url = 'http://lwn.net/current/bigpage'
|
index_url = 'http://lwn.net/current/bigpage?format=printable'
|
||||||
else:
|
else:
|
||||||
index_url = 'http://lwn.net/free/bigpage'
|
index_url = 'http://lwn.net/free/bigpage?format=printable'
|
||||||
soup = self.index_to_soup(index_url)
|
soup = self.index_to_soup(index_url)
|
||||||
body = soup.body
|
body = soup.body
|
||||||
|
|
||||||
articles = {}
|
articles = {}
|
||||||
ans = []
|
ans = []
|
||||||
url_re = re.compile('^http://lwn.net/Articles/')
|
url_re = re.compile('^/Articles/')
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
tag_title = body.findNext(name='p', attrs={'class':'SummaryHL'})
|
tag_title = body.findNext(name='p', attrs={'class':'SummaryHL'})
|
||||||
@ -91,7 +96,7 @@ class WeeklyLWN(BasicNewsRecipe):
|
|||||||
|
|
||||||
article = dict(
|
article = dict(
|
||||||
title=tag_title.string,
|
title=tag_title.string,
|
||||||
url=tag_url['href'].split('#')[0],
|
url= 'http://lwn.net' + tag_url['href'].split('#')[0] + '?format=printable',
|
||||||
description='', content='', date='')
|
description='', content='', date='')
|
||||||
articles[section].append(article)
|
articles[section].append(article)
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@ __appname__ = 'calibre'
|
|||||||
__version__ = '0.7.50'
|
__version__ = '0.7.50'
|
||||||
__author__ = "Kovid Goyal <kovid@kovidgoyal.net>"
|
__author__ = "Kovid Goyal <kovid@kovidgoyal.net>"
|
||||||
|
|
||||||
import re
|
import re, importlib
|
||||||
_ver = __version__.split('.')
|
_ver = __version__.split('.')
|
||||||
_ver = [int(re.search(r'(\d+)', x).group(1)) for x in _ver]
|
_ver = [int(re.search(r'(\d+)', x).group(1)) for x in _ver]
|
||||||
numeric_version = tuple(_ver)
|
numeric_version = tuple(_ver)
|
||||||
@ -33,10 +33,10 @@ try:
|
|||||||
except:
|
except:
|
||||||
preferred_encoding = 'utf-8'
|
preferred_encoding = 'utf-8'
|
||||||
|
|
||||||
win32event = __import__('win32event') if iswindows else None
|
win32event = importlib.import_module('win32event') if iswindows else None
|
||||||
winerror = __import__('winerror') if iswindows else None
|
winerror = importlib.import_module('winerror') if iswindows else None
|
||||||
win32api = __import__('win32api') if iswindows else None
|
win32api = importlib.import_module('win32api') if iswindows else None
|
||||||
fcntl = None if iswindows else __import__('fcntl')
|
fcntl = None if iswindows else importlib.import_module('fcntl')
|
||||||
|
|
||||||
filesystem_encoding = sys.getfilesystemencoding()
|
filesystem_encoding = sys.getfilesystemencoding()
|
||||||
if filesystem_encoding is None: filesystem_encoding = 'utf-8'
|
if filesystem_encoding is None: filesystem_encoding = 'utf-8'
|
||||||
@ -74,8 +74,8 @@ if plugins is None:
|
|||||||
(['winutil'] if iswindows else []) + \
|
(['winutil'] if iswindows else []) + \
|
||||||
(['usbobserver'] if isosx else []):
|
(['usbobserver'] if isosx else []):
|
||||||
try:
|
try:
|
||||||
p, err = __import__(plugin), ''
|
p, err = importlib.import_module(plugin), ''
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
p = None
|
p = None
|
||||||
err = str(err)
|
err = str(err)
|
||||||
plugins[plugin] = (p, err)
|
plugins[plugin] = (p, err)
|
||||||
|
@ -2,7 +2,7 @@ from __future__ import with_statement
|
|||||||
__license__ = 'GPL v3'
|
__license__ = 'GPL v3'
|
||||||
__copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
|
__copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||||
|
|
||||||
import os, sys, zipfile
|
import os, sys, zipfile, importlib
|
||||||
|
|
||||||
from calibre.constants import numeric_version
|
from calibre.constants import numeric_version
|
||||||
from calibre.ptempfile import PersistentTemporaryFile
|
from calibre.ptempfile import PersistentTemporaryFile
|
||||||
@ -517,7 +517,7 @@ class InterfaceActionBase(Plugin): # {{{
|
|||||||
This method must return the actual interface action plugin object.
|
This method must return the actual interface action plugin object.
|
||||||
'''
|
'''
|
||||||
mod, cls = self.actual_plugin.split(':')
|
mod, cls = self.actual_plugin.split(':')
|
||||||
return getattr(__import__(mod, fromlist=['1'], level=0), cls)(gui,
|
return getattr(importlib.import_module(mod), cls)(gui,
|
||||||
self.site_customization)
|
self.site_customization)
|
||||||
|
|
||||||
# }}}
|
# }}}
|
||||||
@ -575,7 +575,7 @@ class PreferencesPlugin(Plugin): # {{{
|
|||||||
base, _, wc = self.config_widget.partition(':')
|
base, _, wc = self.config_widget.partition(':')
|
||||||
if not wc:
|
if not wc:
|
||||||
wc = 'ConfigWidget'
|
wc = 'ConfigWidget'
|
||||||
base = __import__(base, fromlist=[1])
|
base = importlib.import_module(base)
|
||||||
widget = getattr(base, wc)
|
widget = getattr(base, wc)
|
||||||
return widget(parent)
|
return widget(parent)
|
||||||
|
|
||||||
|
@ -282,7 +282,7 @@ def main():
|
|||||||
outfile = os.path.join(outfile, path[path.rfind("/")+1:])
|
outfile = os.path.join(outfile, path[path.rfind("/")+1:])
|
||||||
try:
|
try:
|
||||||
outfile = open(outfile, "wb")
|
outfile = open(outfile, "wb")
|
||||||
except IOError, e:
|
except IOError as e:
|
||||||
print >> sys.stderr, e
|
print >> sys.stderr, e
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
return 1
|
return 1
|
||||||
@ -291,13 +291,13 @@ def main():
|
|||||||
elif args[1].startswith("prs500:"):
|
elif args[1].startswith("prs500:"):
|
||||||
try:
|
try:
|
||||||
infile = open(args[0], "rb")
|
infile = open(args[0], "rb")
|
||||||
except IOError, e:
|
except IOError as e:
|
||||||
print >> sys.stderr, e
|
print >> sys.stderr, e
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
return 1
|
return 1
|
||||||
try:
|
try:
|
||||||
dev.put_file(infile, args[1][7:])
|
dev.put_file(infile, args[1][7:])
|
||||||
except PathError, err:
|
except PathError as err:
|
||||||
if options.force and 'exists' in str(err):
|
if options.force and 'exists' in str(err):
|
||||||
dev.del_file(err.path, False)
|
dev.del_file(err.path, False)
|
||||||
dev.put_file(infile, args[1][7:])
|
dev.put_file(infile, args[1][7:])
|
||||||
@ -355,7 +355,7 @@ def main():
|
|||||||
return 1
|
return 1
|
||||||
except DeviceLocked:
|
except DeviceLocked:
|
||||||
print >> sys.stderr, "The device is locked. Use the --unlock option"
|
print >> sys.stderr, "The device is locked. Use the --unlock option"
|
||||||
except (ArgumentError, DeviceError), e:
|
except (ArgumentError, DeviceError) as e:
|
||||||
print >>sys.stderr, e
|
print >>sys.stderr, e
|
||||||
return 1
|
return 1
|
||||||
return 0
|
return 0
|
||||||
|
@ -177,7 +177,7 @@ class PRS500(DeviceConfig, DevicePlugin):
|
|||||||
dev.send_validated_command(BeginEndSession(end=True))
|
dev.send_validated_command(BeginEndSession(end=True))
|
||||||
dev.in_session = False
|
dev.in_session = False
|
||||||
raise
|
raise
|
||||||
except USBError, err:
|
except USBError as err:
|
||||||
if "No such device" in str(err):
|
if "No such device" in str(err):
|
||||||
raise DeviceError()
|
raise DeviceError()
|
||||||
elif "Connection timed out" in str(err):
|
elif "Connection timed out" in str(err):
|
||||||
@ -272,7 +272,7 @@ class PRS500(DeviceConfig, DevicePlugin):
|
|||||||
self.bulk_read_max_packet_size = red.MaxPacketSize
|
self.bulk_read_max_packet_size = red.MaxPacketSize
|
||||||
self.bulk_write_max_packet_size = wed.MaxPacketSize
|
self.bulk_write_max_packet_size = wed.MaxPacketSize
|
||||||
self.handle.claim_interface(self.INTERFACE_ID)
|
self.handle.claim_interface(self.INTERFACE_ID)
|
||||||
except USBError, err:
|
except USBError as err:
|
||||||
raise DeviceBusy(str(err))
|
raise DeviceBusy(str(err))
|
||||||
# Large timeout as device may still be initializing
|
# Large timeout as device may still be initializing
|
||||||
res = self.send_validated_command(GetUSBProtocolVersion(), timeout=20000)
|
res = self.send_validated_command(GetUSBProtocolVersion(), timeout=20000)
|
||||||
@ -303,7 +303,7 @@ class PRS500(DeviceConfig, DevicePlugin):
|
|||||||
try:
|
try:
|
||||||
self.handle.reset()
|
self.handle.reset()
|
||||||
self.handle.release_interface(self.INTERFACE_ID)
|
self.handle.release_interface(self.INTERFACE_ID)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
print >> sys.stderr, err
|
print >> sys.stderr, err
|
||||||
self.handle, self.device = None, None
|
self.handle, self.device = None, None
|
||||||
self.in_session = False
|
self.in_session = False
|
||||||
@ -509,7 +509,7 @@ class PRS500(DeviceConfig, DevicePlugin):
|
|||||||
outfile.write("".join(map(chr, packets[0][16:])))
|
outfile.write("".join(map(chr, packets[0][16:])))
|
||||||
for i in range(1, len(packets)):
|
for i in range(1, len(packets)):
|
||||||
outfile.write("".join(map(chr, packets[i])))
|
outfile.write("".join(map(chr, packets[i])))
|
||||||
except IOError, err:
|
except IOError as err:
|
||||||
self.send_validated_command(FileClose(_id))
|
self.send_validated_command(FileClose(_id))
|
||||||
raise ArgumentError("File get operation failed. " + \
|
raise ArgumentError("File get operation failed. " + \
|
||||||
"Could not write to local location: " + str(err))
|
"Could not write to local location: " + str(err))
|
||||||
@ -656,7 +656,7 @@ class PRS500(DeviceConfig, DevicePlugin):
|
|||||||
dest = None
|
dest = None
|
||||||
try:
|
try:
|
||||||
dest = self.path_properties(path, end_session=False)
|
dest = self.path_properties(path, end_session=False)
|
||||||
except PathError, err:
|
except PathError as err:
|
||||||
if "does not exist" in str(err) or "not mounted" in str(err):
|
if "does not exist" in str(err) or "not mounted" in str(err):
|
||||||
return (False, None)
|
return (False, None)
|
||||||
else: raise
|
else: raise
|
||||||
|
@ -124,11 +124,11 @@ class Device(DeviceConfig, DevicePlugin):
|
|||||||
if not prefix:
|
if not prefix:
|
||||||
return 0, 0
|
return 0, 0
|
||||||
prefix = prefix[:-1]
|
prefix = prefix[:-1]
|
||||||
win32file = __import__('win32file', globals(), locals(), [], -1)
|
import win32file
|
||||||
try:
|
try:
|
||||||
sectors_per_cluster, bytes_per_sector, free_clusters, total_clusters = \
|
sectors_per_cluster, bytes_per_sector, free_clusters, total_clusters = \
|
||||||
win32file.GetDiskFreeSpace(prefix)
|
win32file.GetDiskFreeSpace(prefix)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
if getattr(err, 'args', [None])[0] == 21: # Disk not ready
|
if getattr(err, 'args', [None])[0] == 21: # Disk not ready
|
||||||
time.sleep(3)
|
time.sleep(3)
|
||||||
sectors_per_cluster, bytes_per_sector, free_clusters, total_clusters = \
|
sectors_per_cluster, bytes_per_sector, free_clusters, total_clusters = \
|
||||||
@ -771,7 +771,7 @@ class Device(DeviceConfig, DevicePlugin):
|
|||||||
for d in drives:
|
for d in drives:
|
||||||
try:
|
try:
|
||||||
eject(d)
|
eject(d)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
print 'Udisks eject call for:', d, 'failed:'
|
print 'Udisks eject call for:', d, 'failed:'
|
||||||
print '\t', e
|
print '\t', e
|
||||||
failures = True
|
failures = True
|
||||||
|
@ -57,7 +57,7 @@ class HTMLRenderer(object):
|
|||||||
buf.open(QBuffer.WriteOnly)
|
buf.open(QBuffer.WriteOnly)
|
||||||
image.save(buf, 'JPEG')
|
image.save(buf, 'JPEG')
|
||||||
self.data = str(ba.data())
|
self.data = str(ba.data())
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.exception = e
|
self.exception = e
|
||||||
self.traceback = traceback.format_exc()
|
self.traceback = traceback.format_exc()
|
||||||
finally:
|
finally:
|
||||||
|
@ -151,7 +151,7 @@ class Container(object):
|
|||||||
if name in self.mime_map:
|
if name in self.mime_map:
|
||||||
try:
|
try:
|
||||||
raw = self._parse(raw, self.mime_map[name])
|
raw = self._parse(raw, self.mime_map[name])
|
||||||
except XMLSyntaxError, err:
|
except XMLSyntaxError as err:
|
||||||
raise ParseError(name, unicode(err))
|
raise ParseError(name, unicode(err))
|
||||||
self.cache[name] = raw
|
self.cache[name] = raw
|
||||||
return raw
|
return raw
|
||||||
|
@ -54,7 +54,7 @@ def main(args=sys.argv):
|
|||||||
epub = os.path.abspath(args[1])
|
epub = os.path.abspath(args[1])
|
||||||
try:
|
try:
|
||||||
run(epub, opts, default_log)
|
run(epub, opts, default_log)
|
||||||
except ParseError, err:
|
except ParseError as err:
|
||||||
default_log.error(unicode(err))
|
default_log.error(unicode(err))
|
||||||
raise SystemExit(1)
|
raise SystemExit(1)
|
||||||
|
|
||||||
|
@ -110,7 +110,7 @@ class HTMLFile(object):
|
|||||||
try:
|
try:
|
||||||
with open(self.path, 'rb') as f:
|
with open(self.path, 'rb') as f:
|
||||||
src = f.read()
|
src = f.read()
|
||||||
except IOError, err:
|
except IOError as err:
|
||||||
msg = 'Could not read from file: %s with error: %s'%(self.path, as_unicode(err))
|
msg = 'Could not read from file: %s with error: %s'%(self.path, as_unicode(err))
|
||||||
if level == 0:
|
if level == 0:
|
||||||
raise IOError(msg)
|
raise IOError(msg)
|
||||||
@ -202,7 +202,7 @@ def traverse(path_to_html_file, max_levels=sys.maxint, verbose=0, encoding=None)
|
|||||||
raise IgnoreFile('%s is a binary file'%nf.path, -1)
|
raise IgnoreFile('%s is a binary file'%nf.path, -1)
|
||||||
nl.append(nf)
|
nl.append(nf)
|
||||||
flat.append(nf)
|
flat.append(nf)
|
||||||
except IgnoreFile, err:
|
except IgnoreFile as err:
|
||||||
rejects.append(link)
|
rejects.append(link)
|
||||||
if not err.doesnt_exist or verbose > 1:
|
if not err.doesnt_exist or verbose > 1:
|
||||||
print repr(err)
|
print repr(err)
|
||||||
|
@ -332,7 +332,7 @@ class HTMLConverter(object):
|
|||||||
soup = BeautifulSoup(raw,
|
soup = BeautifulSoup(raw,
|
||||||
convertEntities=BeautifulSoup.XHTML_ENTITIES,
|
convertEntities=BeautifulSoup.XHTML_ENTITIES,
|
||||||
markupMassage=nmassage)
|
markupMassage=nmassage)
|
||||||
except ConversionError, err:
|
except ConversionError as err:
|
||||||
if 'Failed to coerce to unicode' in str(err):
|
if 'Failed to coerce to unicode' in str(err):
|
||||||
raw = unicode(raw, 'utf8', 'replace')
|
raw = unicode(raw, 'utf8', 'replace')
|
||||||
soup = BeautifulSoup(raw,
|
soup = BeautifulSoup(raw,
|
||||||
@ -935,7 +935,7 @@ class HTMLConverter(object):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
im = PILImage.open(path)
|
im = PILImage.open(path)
|
||||||
except IOError, err:
|
except IOError as err:
|
||||||
self.log.warning('Unable to process image: %s\n%s'%( original_path, err))
|
self.log.warning('Unable to process image: %s\n%s'%( original_path, err))
|
||||||
return
|
return
|
||||||
encoding = detect_encoding(im)
|
encoding = detect_encoding(im)
|
||||||
@ -953,7 +953,7 @@ class HTMLConverter(object):
|
|||||||
pt.close()
|
pt.close()
|
||||||
self.scaled_images[path] = pt
|
self.scaled_images[path] = pt
|
||||||
return pt.name
|
return pt.name
|
||||||
except (IOError, SystemError), err: # PIL chokes on interlaced PNG images as well a some GIF images
|
except (IOError, SystemError) as err: # PIL chokes on interlaced PNG images as well a some GIF images
|
||||||
self.log.warning(_('Unable to process image %s. Error: %s')%(path, err))
|
self.log.warning(_('Unable to process image %s. Error: %s')%(path, err))
|
||||||
|
|
||||||
if width == None or height == None:
|
if width == None or height == None:
|
||||||
@ -1013,7 +1013,7 @@ class HTMLConverter(object):
|
|||||||
if not self.images.has_key(path):
|
if not self.images.has_key(path):
|
||||||
try:
|
try:
|
||||||
self.images[path] = ImageStream(path, encoding=encoding)
|
self.images[path] = ImageStream(path, encoding=encoding)
|
||||||
except LrsError, err:
|
except LrsError as err:
|
||||||
self.log.warning(_('Could not process image: %s\n%s')%(
|
self.log.warning(_('Could not process image: %s\n%s')%(
|
||||||
original_path, err))
|
original_path, err))
|
||||||
return
|
return
|
||||||
@ -1768,7 +1768,7 @@ class HTMLConverter(object):
|
|||||||
tag_css = self.tag_css(tag)[0] # Table should not inherit CSS
|
tag_css = self.tag_css(tag)[0] # Table should not inherit CSS
|
||||||
try:
|
try:
|
||||||
self.process_table(tag, tag_css)
|
self.process_table(tag, tag_css)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.warning(_('An error occurred while processing a table: %s. Ignoring table markup.')%repr(err))
|
self.log.warning(_('An error occurred while processing a table: %s. Ignoring table markup.')%repr(err))
|
||||||
self.log.exception('')
|
self.log.exception('')
|
||||||
self.log.debug(_('Bad table:\n%s')%unicode(tag)[:300])
|
self.log.debug(_('Bad table:\n%s')%unicode(tag)[:300])
|
||||||
@ -1858,7 +1858,7 @@ def process_file(path, options, logger):
|
|||||||
tf.close()
|
tf.close()
|
||||||
tim.save(tf.name)
|
tim.save(tf.name)
|
||||||
tpath = tf.name
|
tpath = tf.name
|
||||||
except IOError, err: # PIL sometimes fails, for example on interlaced PNG files
|
except IOError as err: # PIL sometimes fails, for example on interlaced PNG files
|
||||||
logger.warn(_('Could not read cover image: %s'), err)
|
logger.warn(_('Could not read cover image: %s'), err)
|
||||||
options.cover = None
|
options.cover = None
|
||||||
else:
|
else:
|
||||||
|
@ -34,7 +34,7 @@ License: GPL 2 (http://www.gnu.org/copyleft/gpl.html) or BSD
|
|||||||
import re, sys, codecs
|
import re, sys, codecs
|
||||||
|
|
||||||
from logging import getLogger, StreamHandler, Formatter, \
|
from logging import getLogger, StreamHandler, Formatter, \
|
||||||
DEBUG, INFO, WARN, ERROR, CRITICAL
|
DEBUG, INFO, WARN, CRITICAL
|
||||||
|
|
||||||
|
|
||||||
MESSAGE_THRESHOLD = CRITICAL
|
MESSAGE_THRESHOLD = CRITICAL
|
||||||
@ -95,7 +95,7 @@ def removeBOM(text, encoding):
|
|||||||
# and uses the actual name of the executable called.)
|
# and uses the actual name of the executable called.)
|
||||||
|
|
||||||
EXECUTABLE_NAME_FOR_USAGE = "python markdown.py"
|
EXECUTABLE_NAME_FOR_USAGE = "python markdown.py"
|
||||||
|
|
||||||
|
|
||||||
# --------------- CONSTANTS YOU _SHOULD NOT_ HAVE TO CHANGE ----------
|
# --------------- CONSTANTS YOU _SHOULD NOT_ HAVE TO CHANGE ----------
|
||||||
|
|
||||||
@ -242,8 +242,6 @@ class Element:
|
|||||||
|
|
||||||
if bidi:
|
if bidi:
|
||||||
|
|
||||||
orig_bidi = self.bidi
|
|
||||||
|
|
||||||
if not self.bidi or self.isDocumentElement:
|
if not self.bidi or self.isDocumentElement:
|
||||||
# Once the bidi is set don't change it (except for doc element)
|
# Once the bidi is set don't change it (except for doc element)
|
||||||
self.bidi = bidi
|
self.bidi = bidi
|
||||||
@ -319,7 +317,7 @@ class Element:
|
|||||||
childBuffer += "/>"
|
childBuffer += "/>"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
buffer += "<" + self.nodeName
|
buffer += "<" + self.nodeName
|
||||||
|
|
||||||
if self.nodeName in ['p', 'li', 'ul', 'ol',
|
if self.nodeName in ['p', 'li', 'ul', 'ol',
|
||||||
@ -330,10 +328,10 @@ class Element:
|
|||||||
bidi = self.bidi
|
bidi = self.bidi
|
||||||
else:
|
else:
|
||||||
bidi = self.doc.bidi
|
bidi = self.doc.bidi
|
||||||
|
|
||||||
if bidi=="rtl":
|
if bidi=="rtl":
|
||||||
self.setAttribute("dir", "rtl")
|
self.setAttribute("dir", "rtl")
|
||||||
|
|
||||||
for attr in self.attributes:
|
for attr in self.attributes:
|
||||||
value = self.attribute_values[attr]
|
value = self.attribute_values[attr]
|
||||||
value = self.doc.normalizeEntities(value,
|
value = self.doc.normalizeEntities(value,
|
||||||
@ -358,7 +356,7 @@ class TextNode:
|
|||||||
attrRegExp = re.compile(r'\{@([^\}]*)=([^\}]*)}') # {@id=123}
|
attrRegExp = re.compile(r'\{@([^\}]*)=([^\}]*)}') # {@id=123}
|
||||||
|
|
||||||
def __init__ (self, text):
|
def __init__ (self, text):
|
||||||
self.value = text
|
self.value = text
|
||||||
|
|
||||||
def attributeCallback(self, match):
|
def attributeCallback(self, match):
|
||||||
|
|
||||||
@ -372,7 +370,7 @@ class TextNode:
|
|||||||
text = self.value
|
text = self.value
|
||||||
|
|
||||||
self.parent.setBidi(getBidiType(text))
|
self.parent.setBidi(getBidiType(text))
|
||||||
|
|
||||||
if not text.startswith(HTML_PLACEHOLDER_PREFIX):
|
if not text.startswith(HTML_PLACEHOLDER_PREFIX):
|
||||||
if self.parent.nodeName == "p":
|
if self.parent.nodeName == "p":
|
||||||
text = text.replace("\n", "\n ")
|
text = text.replace("\n", "\n ")
|
||||||
@ -413,11 +411,11 @@ There are two types of preprocessors: TextPreprocessor and Preprocessor.
|
|||||||
class TextPreprocessor:
|
class TextPreprocessor:
|
||||||
'''
|
'''
|
||||||
TextPreprocessors are run before the text is broken into lines.
|
TextPreprocessors are run before the text is broken into lines.
|
||||||
|
|
||||||
Each TextPreprocessor implements a "run" method that takes a pointer to a
|
Each TextPreprocessor implements a "run" method that takes a pointer to a
|
||||||
text string of the document, modifies it as necessary and returns
|
text string of the document, modifies it as necessary and returns
|
||||||
either the same pointer or a pointer to a new string.
|
either the same pointer or a pointer to a new string.
|
||||||
|
|
||||||
TextPreprocessors must extend markdown.TextPreprocessor.
|
TextPreprocessors must extend markdown.TextPreprocessor.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
@ -431,18 +429,18 @@ class Preprocessor:
|
|||||||
|
|
||||||
Each preprocessor implements a "run" method that takes a pointer to a
|
Each preprocessor implements a "run" method that takes a pointer to a
|
||||||
list of lines of the document, modifies it as necessary and returns
|
list of lines of the document, modifies it as necessary and returns
|
||||||
either the same pointer or a pointer to a new list.
|
either the same pointer or a pointer to a new list.
|
||||||
|
|
||||||
Preprocessors must extend markdown.Preprocessor.
|
Preprocessors must extend markdown.Preprocessor.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
def run(self, lines):
|
def run(self, lines):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class HtmlBlockPreprocessor(TextPreprocessor):
|
class HtmlBlockPreprocessor(TextPreprocessor):
|
||||||
"""Removes html blocks from the source text and stores it."""
|
"""Removes html blocks from the source text and stores it."""
|
||||||
|
|
||||||
def _get_left_tag(self, block):
|
def _get_left_tag(self, block):
|
||||||
return block[1:].replace(">", " ", 1).split()[0].lower()
|
return block[1:].replace(">", " ", 1).split()[0].lower()
|
||||||
|
|
||||||
@ -451,7 +449,7 @@ class HtmlBlockPreprocessor(TextPreprocessor):
|
|||||||
return block.rstrip()[-len(left_tag)-2:-1].lower()
|
return block.rstrip()[-len(left_tag)-2:-1].lower()
|
||||||
|
|
||||||
def _equal_tags(self, left_tag, right_tag):
|
def _equal_tags(self, left_tag, right_tag):
|
||||||
|
|
||||||
if left_tag == 'div' or left_tag[0] in ['?', '@', '%']: # handle PHP, etc.
|
if left_tag == 'div' or left_tag[0] in ['?', '@', '%']: # handle PHP, etc.
|
||||||
return True
|
return True
|
||||||
if ("/" + left_tag) == right_tag:
|
if ("/" + left_tag) == right_tag:
|
||||||
@ -467,17 +465,17 @@ class HtmlBlockPreprocessor(TextPreprocessor):
|
|||||||
def _is_oneliner(self, tag):
|
def _is_oneliner(self, tag):
|
||||||
return (tag in ['hr', 'hr/'])
|
return (tag in ['hr', 'hr/'])
|
||||||
|
|
||||||
|
|
||||||
def run(self, text):
|
def run(self, text):
|
||||||
|
|
||||||
new_blocks = []
|
new_blocks = []
|
||||||
text = text.split("\n\n")
|
text = text.split("\n\n")
|
||||||
|
|
||||||
items = []
|
items = []
|
||||||
left_tag = ''
|
left_tag = ''
|
||||||
right_tag = ''
|
right_tag = ''
|
||||||
in_tag = False # flag
|
in_tag = False # flag
|
||||||
|
|
||||||
for block in text:
|
for block in text:
|
||||||
if block.startswith("\n"):
|
if block.startswith("\n"):
|
||||||
block = block[1:]
|
block = block[1:]
|
||||||
@ -485,7 +483,7 @@ class HtmlBlockPreprocessor(TextPreprocessor):
|
|||||||
if not in_tag:
|
if not in_tag:
|
||||||
|
|
||||||
if block.startswith("<"):
|
if block.startswith("<"):
|
||||||
|
|
||||||
left_tag = self._get_left_tag(block)
|
left_tag = self._get_left_tag(block)
|
||||||
right_tag = self._get_right_tag(left_tag, block)
|
right_tag = self._get_right_tag(left_tag, block)
|
||||||
|
|
||||||
@ -497,13 +495,13 @@ class HtmlBlockPreprocessor(TextPreprocessor):
|
|||||||
if self._is_oneliner(left_tag):
|
if self._is_oneliner(left_tag):
|
||||||
new_blocks.append(block.strip())
|
new_blocks.append(block.strip())
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if block[1] == "!":
|
if block[1] == "!":
|
||||||
# is a comment block
|
# is a comment block
|
||||||
left_tag = "--"
|
left_tag = "--"
|
||||||
right_tag = self._get_right_tag(left_tag, block)
|
right_tag = self._get_right_tag(left_tag, block)
|
||||||
# keep checking conditions below and maybe just append
|
# keep checking conditions below and maybe just append
|
||||||
|
|
||||||
if block.rstrip().endswith(">") \
|
if block.rstrip().endswith(">") \
|
||||||
and self._equal_tags(left_tag, right_tag):
|
and self._equal_tags(left_tag, right_tag):
|
||||||
new_blocks.append(
|
new_blocks.append(
|
||||||
@ -519,9 +517,9 @@ class HtmlBlockPreprocessor(TextPreprocessor):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
items.append(block.strip())
|
items.append(block.strip())
|
||||||
|
|
||||||
right_tag = self._get_right_tag(left_tag, block)
|
right_tag = self._get_right_tag(left_tag, block)
|
||||||
|
|
||||||
if self._equal_tags(left_tag, right_tag):
|
if self._equal_tags(left_tag, right_tag):
|
||||||
# if find closing tag
|
# if find closing tag
|
||||||
in_tag = False
|
in_tag = False
|
||||||
@ -532,7 +530,7 @@ class HtmlBlockPreprocessor(TextPreprocessor):
|
|||||||
if items:
|
if items:
|
||||||
new_blocks.append(self.stash.store('\n\n'.join(items)))
|
new_blocks.append(self.stash.store('\n\n'.join(items)))
|
||||||
new_blocks.append('\n')
|
new_blocks.append('\n')
|
||||||
|
|
||||||
return "\n\n".join(new_blocks)
|
return "\n\n".join(new_blocks)
|
||||||
|
|
||||||
HTML_BLOCK_PREPROCESSOR = HtmlBlockPreprocessor()
|
HTML_BLOCK_PREPROCESSOR = HtmlBlockPreprocessor()
|
||||||
@ -605,7 +603,7 @@ LINE_PREPROCESSOR = LinePreprocessor()
|
|||||||
|
|
||||||
|
|
||||||
class ReferencePreprocessor(Preprocessor):
|
class ReferencePreprocessor(Preprocessor):
|
||||||
'''
|
'''
|
||||||
Removes reference definitions from the text and stores them for later use.
|
Removes reference definitions from the text and stores them for later use.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
@ -760,7 +758,7 @@ class BacktickPattern (Pattern):
|
|||||||
return el
|
return el
|
||||||
|
|
||||||
|
|
||||||
class DoubleTagPattern (SimpleTagPattern):
|
class DoubleTagPattern (SimpleTagPattern):
|
||||||
|
|
||||||
def handleMatch(self, m, doc):
|
def handleMatch(self, m, doc):
|
||||||
tag1, tag2 = self.tag.split(",")
|
tag1, tag2 = self.tag.split(",")
|
||||||
@ -775,7 +773,6 @@ class HtmlPattern (Pattern):
|
|||||||
|
|
||||||
def handleMatch (self, m, doc):
|
def handleMatch (self, m, doc):
|
||||||
rawhtml = m.group(2)
|
rawhtml = m.group(2)
|
||||||
inline = True
|
|
||||||
place_holder = self.stash.store(rawhtml)
|
place_holder = self.stash.store(rawhtml)
|
||||||
return doc.createTextNode(place_holder)
|
return doc.createTextNode(place_holder)
|
||||||
|
|
||||||
@ -926,11 +923,11 @@ There are two types of post-processors: Postprocessor and TextPostprocessor
|
|||||||
class Postprocessor:
|
class Postprocessor:
|
||||||
'''
|
'''
|
||||||
Postprocessors are run before the dom it converted back into text.
|
Postprocessors are run before the dom it converted back into text.
|
||||||
|
|
||||||
Each Postprocessor implements a "run" method that takes a pointer to a
|
Each Postprocessor implements a "run" method that takes a pointer to a
|
||||||
NanoDom document, modifies it as necessary and returns a NanoDom
|
NanoDom document, modifies it as necessary and returns a NanoDom
|
||||||
document.
|
document.
|
||||||
|
|
||||||
Postprocessors must extend markdown.Postprocessor.
|
Postprocessors must extend markdown.Postprocessor.
|
||||||
|
|
||||||
There are currently no standard post-processors, but the footnote
|
There are currently no standard post-processors, but the footnote
|
||||||
@ -945,10 +942,10 @@ class Postprocessor:
|
|||||||
class TextPostprocessor:
|
class TextPostprocessor:
|
||||||
'''
|
'''
|
||||||
TextPostprocessors are run after the dom it converted back into text.
|
TextPostprocessors are run after the dom it converted back into text.
|
||||||
|
|
||||||
Each TextPostprocessor implements a "run" method that takes a pointer to a
|
Each TextPostprocessor implements a "run" method that takes a pointer to a
|
||||||
text string, modifies it as necessary and returns a text string.
|
text string, modifies it as necessary and returns a text string.
|
||||||
|
|
||||||
TextPostprocessors must extend markdown.TextPostprocessor.
|
TextPostprocessors must extend markdown.TextPostprocessor.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
@ -971,7 +968,7 @@ class RawHtmlTextPostprocessor(TextPostprocessor):
|
|||||||
html = ''
|
html = ''
|
||||||
else:
|
else:
|
||||||
html = HTML_REMOVED_TEXT
|
html = HTML_REMOVED_TEXT
|
||||||
|
|
||||||
text = text.replace("<p>%s\n</p>" % (HTML_PLACEHOLDER % i),
|
text = text.replace("<p>%s\n</p>" % (HTML_PLACEHOLDER % i),
|
||||||
html + "\n")
|
html + "\n")
|
||||||
text = text.replace(HTML_PLACEHOLDER % i, html)
|
text = text.replace(HTML_PLACEHOLDER % i, html)
|
||||||
@ -1031,7 +1028,6 @@ class BlockGuru:
|
|||||||
remainder of the original list"""
|
remainder of the original list"""
|
||||||
|
|
||||||
items = []
|
items = []
|
||||||
item = -1
|
|
||||||
|
|
||||||
i = 0 # to keep track of where we are
|
i = 0 # to keep track of where we are
|
||||||
|
|
||||||
@ -1187,7 +1183,7 @@ class Markdown:
|
|||||||
RAWHTMLTEXTPOSTPROCESSOR]
|
RAWHTMLTEXTPOSTPROCESSOR]
|
||||||
|
|
||||||
self.prePatterns = []
|
self.prePatterns = []
|
||||||
|
|
||||||
|
|
||||||
self.inlinePatterns = [DOUBLE_BACKTICK_PATTERN,
|
self.inlinePatterns = [DOUBLE_BACKTICK_PATTERN,
|
||||||
BACKTICK_PATTERN,
|
BACKTICK_PATTERN,
|
||||||
@ -1241,7 +1237,7 @@ class Markdown:
|
|||||||
configs_for_ext = configs[ext]
|
configs_for_ext = configs[ext]
|
||||||
else:
|
else:
|
||||||
configs_for_ext = []
|
configs_for_ext = []
|
||||||
extension = module.makeExtension(configs_for_ext)
|
extension = module.makeExtension(configs_for_ext)
|
||||||
extension.extendMarkdown(self, globals())
|
extension.extendMarkdown(self, globals())
|
||||||
|
|
||||||
|
|
||||||
@ -1310,7 +1306,7 @@ class Markdown:
|
|||||||
else:
|
else:
|
||||||
buffer.append(line)
|
buffer.append(line)
|
||||||
self._processSection(self.top_element, buffer)
|
self._processSection(self.top_element, buffer)
|
||||||
|
|
||||||
#self._processSection(self.top_element, self.lines)
|
#self._processSection(self.top_element, self.lines)
|
||||||
|
|
||||||
# Not sure why I put this in but let's leave it for now.
|
# Not sure why I put this in but let's leave it for now.
|
||||||
@ -1426,7 +1422,7 @@ class Markdown:
|
|||||||
|
|
||||||
for item in list:
|
for item in list:
|
||||||
el.appendChild(item)
|
el.appendChild(item)
|
||||||
|
|
||||||
|
|
||||||
def _processUList(self, parent_elem, lines, inList):
|
def _processUList(self, parent_elem, lines, inList):
|
||||||
self._processList(parent_elem, lines, inList,
|
self._processList(parent_elem, lines, inList,
|
||||||
@ -1458,7 +1454,7 @@ class Markdown:
|
|||||||
|
|
||||||
i = 0 # a counter to keep track of where we are
|
i = 0 # a counter to keep track of where we are
|
||||||
|
|
||||||
for line in lines:
|
for line in lines:
|
||||||
|
|
||||||
loose = 0
|
loose = 0
|
||||||
if not line.strip():
|
if not line.strip():
|
||||||
@ -1477,7 +1473,7 @@ class Markdown:
|
|||||||
|
|
||||||
# Check if the next non-blank line is still a part of the list
|
# Check if the next non-blank line is still a part of the list
|
||||||
if ( RE.regExp['ul'].match(next) or
|
if ( RE.regExp['ul'].match(next) or
|
||||||
RE.regExp['ol'].match(next) or
|
RE.regExp['ol'].match(next) or
|
||||||
RE.regExp['tabbed'].match(next) ):
|
RE.regExp['tabbed'].match(next) ):
|
||||||
# get rid of any white space in the line
|
# get rid of any white space in the line
|
||||||
items[item].append(line.strip())
|
items[item].append(line.strip())
|
||||||
@ -1618,7 +1614,7 @@ class Markdown:
|
|||||||
i = 0
|
i = 0
|
||||||
|
|
||||||
while i < len(parts):
|
while i < len(parts):
|
||||||
|
|
||||||
x = parts[i]
|
x = parts[i]
|
||||||
|
|
||||||
if isinstance(x, (str, unicode)):
|
if isinstance(x, (str, unicode)):
|
||||||
@ -1641,14 +1637,14 @@ class Markdown:
|
|||||||
parts[i] = self.doc.createTextNode(x)
|
parts[i] = self.doc.createTextNode(x)
|
||||||
|
|
||||||
return parts
|
return parts
|
||||||
|
|
||||||
|
|
||||||
def _applyPattern(self, line, pattern, patternIndex):
|
def _applyPattern(self, line, pattern, patternIndex):
|
||||||
|
|
||||||
""" Given a pattern name, this function checks if the line
|
""" Given a pattern name, this function checks if the line
|
||||||
fits the pattern, creates the necessary elements, and returns
|
fits the pattern, creates the necessary elements, and returns
|
||||||
back a list consisting of NanoDom elements and/or strings.
|
back a list consisting of NanoDom elements and/or strings.
|
||||||
|
|
||||||
@param line: the text to be processed
|
@param line: the text to be processed
|
||||||
@param pattern: the pattern to be checked
|
@param pattern: the pattern to be checked
|
||||||
|
|
||||||
@ -1676,19 +1672,19 @@ class Markdown:
|
|||||||
if not node.nodeName in ["code", "pre"]:
|
if not node.nodeName in ["code", "pre"]:
|
||||||
for child in node.childNodes:
|
for child in node.childNodes:
|
||||||
if isinstance(child, TextNode):
|
if isinstance(child, TextNode):
|
||||||
|
|
||||||
result = self._handleInline(child.value, patternIndex+1)
|
result = self._handleInline(child.value, patternIndex+1)
|
||||||
|
|
||||||
if result:
|
if result:
|
||||||
|
|
||||||
if result == [child]:
|
if result == [child]:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
result.reverse()
|
result.reverse()
|
||||||
#to make insertion easier
|
#to make insertion easier
|
||||||
|
|
||||||
position = node.childNodes.index(child)
|
position = node.childNodes.index(child)
|
||||||
|
|
||||||
node.removeChild(child)
|
node.removeChild(child)
|
||||||
|
|
||||||
for item in result:
|
for item in result:
|
||||||
@ -1699,7 +1695,7 @@ class Markdown:
|
|||||||
self.doc.createTextNode(item))
|
self.doc.createTextNode(item))
|
||||||
else:
|
else:
|
||||||
node.insertChild(position, item)
|
node.insertChild(position, item)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -1798,14 +1794,14 @@ def markdownFromFile(input = None,
|
|||||||
def markdown(text,
|
def markdown(text,
|
||||||
extensions = [],
|
extensions = [],
|
||||||
safe_mode = False):
|
safe_mode = False):
|
||||||
|
|
||||||
message(DEBUG, "in markdown.markdown(), received text:\n%s" % text)
|
message(DEBUG, "in markdown.markdown(), received text:\n%s" % text)
|
||||||
|
|
||||||
extension_names = []
|
extension_names = []
|
||||||
extension_configs = {}
|
extension_configs = {}
|
||||||
|
|
||||||
for ext in extensions:
|
for ext in extensions:
|
||||||
pos = ext.find("(")
|
pos = ext.find("(")
|
||||||
if pos == -1:
|
if pos == -1:
|
||||||
extension_names.append(ext)
|
extension_names.append(ext)
|
||||||
else:
|
else:
|
||||||
@ -1820,7 +1816,7 @@ def markdown(text,
|
|||||||
safe_mode = safe_mode)
|
safe_mode = safe_mode)
|
||||||
|
|
||||||
return md.convert(text)
|
return md.convert(text)
|
||||||
|
|
||||||
|
|
||||||
class Extension:
|
class Extension:
|
||||||
|
|
||||||
@ -1845,26 +1841,11 @@ Python 2.3 or higher required for advanced command line options.
|
|||||||
For lower versions of Python use:
|
For lower versions of Python use:
|
||||||
|
|
||||||
%s INPUT_FILE > OUTPUT_FILE
|
%s INPUT_FILE > OUTPUT_FILE
|
||||||
|
|
||||||
""" % EXECUTABLE_NAME_FOR_USAGE
|
""" % EXECUTABLE_NAME_FOR_USAGE
|
||||||
|
|
||||||
def parse_options():
|
def parse_options():
|
||||||
|
import optparse
|
||||||
try:
|
|
||||||
optparse = __import__("optparse")
|
|
||||||
except:
|
|
||||||
if len(sys.argv) == 2:
|
|
||||||
return {'input': sys.argv[1],
|
|
||||||
'output': None,
|
|
||||||
'message_threshold': CRITICAL,
|
|
||||||
'safe': False,
|
|
||||||
'extensions': [],
|
|
||||||
'encoding': None }
|
|
||||||
|
|
||||||
else:
|
|
||||||
print OPTPARSE_WARNING
|
|
||||||
return None
|
|
||||||
|
|
||||||
parser = optparse.OptionParser(usage="%prog INPUTFILE [options]")
|
parser = optparse.OptionParser(usage="%prog INPUTFILE [options]")
|
||||||
|
|
||||||
parser.add_option("-f", "--file", dest="filename",
|
parser.add_option("-f", "--file", dest="filename",
|
||||||
@ -1881,7 +1862,7 @@ def parse_options():
|
|||||||
parser.add_option("-s", "--safe", dest="safe", default=False,
|
parser.add_option("-s", "--safe", dest="safe", default=False,
|
||||||
metavar="SAFE_MODE",
|
metavar="SAFE_MODE",
|
||||||
help="same mode ('replace', 'remove' or 'escape' user's HTML tag)")
|
help="same mode ('replace', 'remove' or 'escape' user's HTML tag)")
|
||||||
|
|
||||||
parser.add_option("--noisy",
|
parser.add_option("--noisy",
|
||||||
action="store_const", const=DEBUG, dest="verbose",
|
action="store_const", const=DEBUG, dest="verbose",
|
||||||
help="print debug messages")
|
help="print debug messages")
|
||||||
@ -1914,14 +1895,14 @@ def main():
|
|||||||
|
|
||||||
if not options:
|
if not options:
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
markdownFromFile(**options)
|
markdownFromFile(**options)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
sys.exit(main())
|
sys.exit(main())
|
||||||
""" Run Markdown from the command line. """
|
""" Run Markdown from the command line. """
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -108,7 +108,7 @@ def _get_cover_url(br, asin):
|
|||||||
q = 'http://amzn.com/'+asin
|
q = 'http://amzn.com/'+asin
|
||||||
try:
|
try:
|
||||||
raw = br.open_novisit(q).read()
|
raw = br.open_novisit(q).read()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
if callable(getattr(e, 'getcode', None)) and \
|
if callable(getattr(e, 'getcode', None)) and \
|
||||||
e.getcode() == 404:
|
e.getcode() == 404:
|
||||||
return None
|
return None
|
||||||
@ -139,7 +139,7 @@ def get_metadata(br, asin, mi):
|
|||||||
q = 'http://amzn.com/'+asin
|
q = 'http://amzn.com/'+asin
|
||||||
try:
|
try:
|
||||||
raw = br.open_novisit(q).read()
|
raw = br.open_novisit(q).read()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
if callable(getattr(e, 'getcode', None)) and \
|
if callable(getattr(e, 'getcode', None)) and \
|
||||||
e.getcode() == 404:
|
e.getcode() == 404:
|
||||||
return False
|
return False
|
||||||
|
@ -33,7 +33,7 @@ class AmazonFr(MetadataSource):
|
|||||||
try:
|
try:
|
||||||
self.results = search(self.title, self.book_author, self.publisher,
|
self.results = search(self.title, self.book_author, self.publisher,
|
||||||
self.isbn, max_results=10, verbose=self.verbose, lang='fr')
|
self.isbn, max_results=10, verbose=self.verbose, lang='fr')
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.exception = e
|
self.exception = e
|
||||||
self.tb = traceback.format_exc()
|
self.tb = traceback.format_exc()
|
||||||
|
|
||||||
@ -50,7 +50,7 @@ class AmazonEs(MetadataSource):
|
|||||||
try:
|
try:
|
||||||
self.results = search(self.title, self.book_author, self.publisher,
|
self.results = search(self.title, self.book_author, self.publisher,
|
||||||
self.isbn, max_results=10, verbose=self.verbose, lang='es')
|
self.isbn, max_results=10, verbose=self.verbose, lang='es')
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.exception = e
|
self.exception = e
|
||||||
self.tb = traceback.format_exc()
|
self.tb = traceback.format_exc()
|
||||||
|
|
||||||
@ -67,7 +67,7 @@ class AmazonEn(MetadataSource):
|
|||||||
try:
|
try:
|
||||||
self.results = search(self.title, self.book_author, self.publisher,
|
self.results = search(self.title, self.book_author, self.publisher,
|
||||||
self.isbn, max_results=10, verbose=self.verbose, lang='en')
|
self.isbn, max_results=10, verbose=self.verbose, lang='en')
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.exception = e
|
self.exception = e
|
||||||
self.tb = traceback.format_exc()
|
self.tb = traceback.format_exc()
|
||||||
|
|
||||||
@ -84,7 +84,7 @@ class AmazonDe(MetadataSource):
|
|||||||
try:
|
try:
|
||||||
self.results = search(self.title, self.book_author, self.publisher,
|
self.results = search(self.title, self.book_author, self.publisher,
|
||||||
self.isbn, max_results=10, verbose=self.verbose, lang='de')
|
self.isbn, max_results=10, verbose=self.verbose, lang='de')
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.exception = e
|
self.exception = e
|
||||||
self.tb = traceback.format_exc()
|
self.tb = traceback.format_exc()
|
||||||
|
|
||||||
@ -103,7 +103,7 @@ class Amazon(MetadataSource):
|
|||||||
try:
|
try:
|
||||||
self.results = search(self.title, self.book_author, self.publisher,
|
self.results = search(self.title, self.book_author, self.publisher,
|
||||||
self.isbn, max_results=10, verbose=self.verbose, lang='all')
|
self.isbn, max_results=10, verbose=self.verbose, lang='all')
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.exception = e
|
self.exception = e
|
||||||
self.tb = traceback.format_exc()
|
self.tb = traceback.format_exc()
|
||||||
|
|
||||||
@ -193,7 +193,7 @@ class Query(object):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
raw = browser.open_novisit(self.urldata, timeout=timeout).read()
|
raw = browser.open_novisit(self.urldata, timeout=timeout).read()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
report(verbose)
|
report(verbose)
|
||||||
if callable(getattr(e, 'getcode', None)) and \
|
if callable(getattr(e, 'getcode', None)) and \
|
||||||
e.getcode() == 404:
|
e.getcode() == 404:
|
||||||
@ -226,7 +226,7 @@ class Query(object):
|
|||||||
try:
|
try:
|
||||||
urldata = self.urldata + '&page=' + str(i)
|
urldata = self.urldata + '&page=' + str(i)
|
||||||
raw = browser.open_novisit(urldata, timeout=timeout).read()
|
raw = browser.open_novisit(urldata, timeout=timeout).read()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
continue
|
continue
|
||||||
if '<title>404 - ' in raw:
|
if '<title>404 - ' in raw:
|
||||||
continue
|
continue
|
||||||
@ -413,7 +413,7 @@ class ResultList(list):
|
|||||||
def get_individual_metadata(self, browser, linkdata, verbose):
|
def get_individual_metadata(self, browser, linkdata, verbose):
|
||||||
try:
|
try:
|
||||||
raw = browser.open_novisit(linkdata).read()
|
raw = browser.open_novisit(linkdata).read()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
report(verbose)
|
report(verbose)
|
||||||
if callable(getattr(e, 'getcode', None)) and \
|
if callable(getattr(e, 'getcode', None)) and \
|
||||||
e.getcode() == 404:
|
e.getcode() == 404:
|
||||||
@ -445,7 +445,7 @@ class ResultList(list):
|
|||||||
# self.clean_entry(entry, invalid_id=inv_ids)
|
# self.clean_entry(entry, invalid_id=inv_ids)
|
||||||
title = self.get_title(entry)
|
title = self.get_title(entry)
|
||||||
authors = self.get_authors(entry)
|
authors = self.get_authors(entry)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
if verbose:
|
if verbose:
|
||||||
print 'Failed to get all details for an entry'
|
print 'Failed to get all details for an entry'
|
||||||
print e
|
print e
|
||||||
|
@ -575,7 +575,10 @@ class Metadata(object):
|
|||||||
orig_res = res
|
orig_res = res
|
||||||
datatype = cmeta['datatype']
|
datatype = cmeta['datatype']
|
||||||
if datatype == 'text' and cmeta['is_multiple']:
|
if datatype == 'text' and cmeta['is_multiple']:
|
||||||
res = u', '.join(sorted(res, key=sort_key))
|
if cmeta['display'].get('is_names', False):
|
||||||
|
res = u' & '.join(res)
|
||||||
|
else:
|
||||||
|
res = u', '.join(sorted(res, key=sort_key))
|
||||||
elif datatype == 'series' and series_with_index:
|
elif datatype == 'series' and series_with_index:
|
||||||
if self.get_extra(key) is not None:
|
if self.get_extra(key) is not None:
|
||||||
res = res + \
|
res = res + \
|
||||||
|
@ -91,7 +91,7 @@ class OpenLibraryCovers(CoverDownload): # {{{
|
|||||||
br.open_novisit(HeadRequest(self.OPENLIBRARY%mi.isbn), timeout=timeout)
|
br.open_novisit(HeadRequest(self.OPENLIBRARY%mi.isbn), timeout=timeout)
|
||||||
self.debug('cover for', mi.isbn, 'found')
|
self.debug('cover for', mi.isbn, 'found')
|
||||||
ans.set()
|
ans.set()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
if callable(getattr(e, 'getcode', None)) and e.getcode() == 302:
|
if callable(getattr(e, 'getcode', None)) and e.getcode() == 302:
|
||||||
self.debug('cover for', mi.isbn, 'found')
|
self.debug('cover for', mi.isbn, 'found')
|
||||||
ans.set()
|
ans.set()
|
||||||
@ -106,7 +106,7 @@ class OpenLibraryCovers(CoverDownload): # {{{
|
|||||||
try:
|
try:
|
||||||
ans = br.open(self.OPENLIBRARY%mi.isbn, timeout=timeout).read()
|
ans = br.open(self.OPENLIBRARY%mi.isbn, timeout=timeout).read()
|
||||||
result_queue.put((True, ans, 'jpg', self.name))
|
result_queue.put((True, ans, 'jpg', self.name))
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
if callable(getattr(e, 'getcode', None)) and e.getcode() == 404:
|
if callable(getattr(e, 'getcode', None)) and e.getcode() == 404:
|
||||||
result_queue.put((False, _('ISBN: %s not found')%mi.isbn, '', self.name))
|
result_queue.put((False, _('ISBN: %s not found')%mi.isbn, '', self.name))
|
||||||
else:
|
else:
|
||||||
@ -131,7 +131,7 @@ class AmazonCovers(CoverDownload): # {{{
|
|||||||
get_cover_url(mi.isbn, br)
|
get_cover_url(mi.isbn, br)
|
||||||
self.debug('cover for', mi.isbn, 'found')
|
self.debug('cover for', mi.isbn, 'found')
|
||||||
ans.set()
|
ans.set()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.debug(e)
|
self.debug(e)
|
||||||
|
|
||||||
def get_covers(self, mi, result_queue, abort, timeout=5.):
|
def get_covers(self, mi, result_queue, abort, timeout=5.):
|
||||||
@ -145,7 +145,7 @@ class AmazonCovers(CoverDownload): # {{{
|
|||||||
raise ValueError('No cover found for ISBN: %s'%mi.isbn)
|
raise ValueError('No cover found for ISBN: %s'%mi.isbn)
|
||||||
cover_data = br.open_novisit(url).read()
|
cover_data = br.open_novisit(url).read()
|
||||||
result_queue.put((True, cover_data, 'jpg', self.name))
|
result_queue.put((True, cover_data, 'jpg', self.name))
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
result_queue.put((False, self.exception_to_string(e),
|
result_queue.put((False, self.exception_to_string(e),
|
||||||
traceback.format_exc(), self.name))
|
traceback.format_exc(), self.name))
|
||||||
|
|
||||||
@ -242,7 +242,7 @@ class DoubanCovers(CoverDownload): # {{{
|
|||||||
try:
|
try:
|
||||||
url = self.DOUBAN_ISBN_URL + isbn + "?apikey=" + self.CALIBRE_DOUBAN_API_KEY
|
url = self.DOUBAN_ISBN_URL + isbn + "?apikey=" + self.CALIBRE_DOUBAN_API_KEY
|
||||||
src = br.open(url, timeout=timeout).read()
|
src = br.open(url, timeout=timeout).read()
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
if isinstance(getattr(err, 'args', [None])[0], socket.timeout):
|
if isinstance(getattr(err, 'args', [None])[0], socket.timeout):
|
||||||
err = Exception(_('Douban.com API timed out. Try again later.'))
|
err = Exception(_('Douban.com API timed out. Try again later.'))
|
||||||
raise err
|
raise err
|
||||||
@ -275,7 +275,7 @@ class DoubanCovers(CoverDownload): # {{{
|
|||||||
if self.get_cover_url(mi.isbn, br, timeout=timeout) != None:
|
if self.get_cover_url(mi.isbn, br, timeout=timeout) != None:
|
||||||
self.debug('cover for', mi.isbn, 'found')
|
self.debug('cover for', mi.isbn, 'found')
|
||||||
ans.set()
|
ans.set()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.debug(e)
|
self.debug(e)
|
||||||
|
|
||||||
def get_covers(self, mi, result_queue, abort, timeout=5.):
|
def get_covers(self, mi, result_queue, abort, timeout=5.):
|
||||||
@ -286,7 +286,7 @@ class DoubanCovers(CoverDownload): # {{{
|
|||||||
url = self.get_cover_url(mi.isbn, br, timeout=timeout)
|
url = self.get_cover_url(mi.isbn, br, timeout=timeout)
|
||||||
cover_data = br.open_novisit(url).read()
|
cover_data = br.open_novisit(url).read()
|
||||||
result_queue.put((True, cover_data, 'jpg', self.name))
|
result_queue.put((True, cover_data, 'jpg', self.name))
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
result_queue.put((False, self.exception_to_string(e),
|
result_queue.put((False, self.exception_to_string(e),
|
||||||
traceback.format_exc(), self.name))
|
traceback.format_exc(), self.name))
|
||||||
# }}}
|
# }}}
|
||||||
|
@ -49,7 +49,7 @@ class DoubanBooks(MetadataSource):
|
|||||||
self.results = search(self.title, self.book_author, self.publisher,
|
self.results = search(self.title, self.book_author, self.publisher,
|
||||||
self.isbn, max_results=10,
|
self.isbn, max_results=10,
|
||||||
verbose=self.verbose)
|
verbose=self.verbose)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.exception = e
|
self.exception = e
|
||||||
self.tb = traceback.format_exc()
|
self.tb = traceback.format_exc()
|
||||||
|
|
||||||
@ -192,7 +192,7 @@ class ResultList(list):
|
|||||||
raw = browser.open(id_url).read()
|
raw = browser.open(id_url).read()
|
||||||
feed = etree.fromstring(raw)
|
feed = etree.fromstring(raw)
|
||||||
x = entry(feed)[0]
|
x = entry(feed)[0]
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
if verbose:
|
if verbose:
|
||||||
print 'Failed to get all details for an entry'
|
print 'Failed to get all details for an entry'
|
||||||
print e
|
print e
|
||||||
@ -212,7 +212,7 @@ def search(title=None, author=None, publisher=None, isbn=None,
|
|||||||
api_key = CALIBRE_DOUBAN_API_KEY
|
api_key = CALIBRE_DOUBAN_API_KEY
|
||||||
|
|
||||||
while start > 0 and len(entries) <= max_results:
|
while start > 0 and len(entries) <= max_results:
|
||||||
new, start = Query(title=title, author=author, publisher=publisher,
|
new, start = Query(title=title, author=author, publisher=publisher,
|
||||||
isbn=isbn, max_results=max_results, start_index=start, api_key=api_key)(br, verbose)
|
isbn=isbn, max_results=max_results, start_index=start, api_key=api_key)(br, verbose)
|
||||||
if not new:
|
if not new:
|
||||||
break
|
break
|
||||||
|
@ -5,7 +5,7 @@ __copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
|
|||||||
|
|
||||||
'''Read meta information from epub files'''
|
'''Read meta information from epub files'''
|
||||||
|
|
||||||
import os, re, posixpath, shutil
|
import os, re, posixpath
|
||||||
from cStringIO import StringIO
|
from cStringIO import StringIO
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
|
|
||||||
@ -192,6 +192,13 @@ def get_metadata(stream, extract_cover=True):
|
|||||||
def get_quick_metadata(stream):
|
def get_quick_metadata(stream):
|
||||||
return get_metadata(stream, False)
|
return get_metadata(stream, False)
|
||||||
|
|
||||||
|
def _write_new_cover(new_cdata, cpath):
|
||||||
|
from calibre.utils.magick.draw import save_cover_data_to
|
||||||
|
new_cover = PersistentTemporaryFile(suffix=os.path.splitext(cpath)[1])
|
||||||
|
new_cover.close()
|
||||||
|
save_cover_data_to(new_cdata, new_cover.name)
|
||||||
|
return new_cover
|
||||||
|
|
||||||
def set_metadata(stream, mi, apply_null=False, update_timestamp=False):
|
def set_metadata(stream, mi, apply_null=False, update_timestamp=False):
|
||||||
stream.seek(0)
|
stream.seek(0)
|
||||||
reader = OCFZipReader(stream, root=os.getcwdu())
|
reader = OCFZipReader(stream, root=os.getcwdu())
|
||||||
@ -208,6 +215,7 @@ def set_metadata(stream, mi, apply_null=False, update_timestamp=False):
|
|||||||
new_cdata = open(mi.cover, 'rb').read()
|
new_cdata = open(mi.cover, 'rb').read()
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
new_cover = cpath = None
|
||||||
if new_cdata and raster_cover:
|
if new_cdata and raster_cover:
|
||||||
try:
|
try:
|
||||||
cpath = posixpath.join(posixpath.dirname(reader.opf_path),
|
cpath = posixpath.join(posixpath.dirname(reader.opf_path),
|
||||||
@ -215,19 +223,7 @@ def set_metadata(stream, mi, apply_null=False, update_timestamp=False):
|
|||||||
cover_replacable = not reader.encryption_meta.is_encrypted(cpath) and \
|
cover_replacable = not reader.encryption_meta.is_encrypted(cpath) and \
|
||||||
os.path.splitext(cpath)[1].lower() in ('.png', '.jpg', '.jpeg')
|
os.path.splitext(cpath)[1].lower() in ('.png', '.jpg', '.jpeg')
|
||||||
if cover_replacable:
|
if cover_replacable:
|
||||||
from calibre.utils.magick.draw import save_cover_data_to, \
|
new_cover = _write_new_cover(new_cdata, cpath)
|
||||||
identify
|
|
||||||
new_cover = PersistentTemporaryFile(suffix=os.path.splitext(cpath)[1])
|
|
||||||
resize_to = None
|
|
||||||
if False: # Resize new cover to same size as old cover
|
|
||||||
shutil.copyfileobj(reader.open(cpath), new_cover)
|
|
||||||
new_cover.close()
|
|
||||||
width, height, fmt = identify(new_cover.name)
|
|
||||||
resize_to = (width, height)
|
|
||||||
else:
|
|
||||||
new_cover.close()
|
|
||||||
save_cover_data_to(new_cdata, new_cover.name,
|
|
||||||
resize_to=resize_to)
|
|
||||||
replacements[cpath] = open(new_cover.name, 'rb')
|
replacements[cpath] = open(new_cover.name, 'rb')
|
||||||
except:
|
except:
|
||||||
import traceback
|
import traceback
|
||||||
@ -249,4 +245,11 @@ def set_metadata(stream, mi, apply_null=False, update_timestamp=False):
|
|||||||
newopf = StringIO(reader.opf.render())
|
newopf = StringIO(reader.opf.render())
|
||||||
safe_replace(stream, reader.container[OPF.MIMETYPE], newopf,
|
safe_replace(stream, reader.container[OPF.MIMETYPE], newopf,
|
||||||
extra_replacements=replacements)
|
extra_replacements=replacements)
|
||||||
|
try:
|
||||||
|
if cpath is not None:
|
||||||
|
replacements[cpath].close()
|
||||||
|
os.remove(replacements[cpath].name)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@ -93,7 +93,7 @@ class MetadataSource(Plugin): # {{{
|
|||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
mi.comments = None
|
mi.comments = None
|
||||||
|
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.exception = e
|
self.exception = e
|
||||||
self.tb = traceback.format_exc()
|
self.tb = traceback.format_exc()
|
||||||
|
|
||||||
@ -186,7 +186,7 @@ class GoogleBooks(MetadataSource): # {{{
|
|||||||
self.results = search(self.title, self.book_author, self.publisher,
|
self.results = search(self.title, self.book_author, self.publisher,
|
||||||
self.isbn, max_results=10,
|
self.isbn, max_results=10,
|
||||||
verbose=self.verbose)
|
verbose=self.verbose)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.exception = e
|
self.exception = e
|
||||||
self.tb = traceback.format_exc()
|
self.tb = traceback.format_exc()
|
||||||
|
|
||||||
@ -217,7 +217,7 @@ class ISBNDB(MetadataSource): # {{{
|
|||||||
try:
|
try:
|
||||||
opts, args = option_parser().parse_args(args)
|
opts, args = option_parser().parse_args(args)
|
||||||
self.results = create_books(opts, args)
|
self.results = create_books(opts, args)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.exception = e
|
self.exception = e
|
||||||
self.tb = traceback.format_exc()
|
self.tb = traceback.format_exc()
|
||||||
|
|
||||||
@ -244,7 +244,7 @@ class Amazon(MetadataSource): # {{{
|
|||||||
try:
|
try:
|
||||||
self.results = get_social_metadata(self.title, self.book_author,
|
self.results = get_social_metadata(self.title, self.book_author,
|
||||||
self.publisher, self.isbn)
|
self.publisher, self.isbn)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.exception = e
|
self.exception = e
|
||||||
self.tb = traceback.format_exc()
|
self.tb = traceback.format_exc()
|
||||||
|
|
||||||
@ -285,7 +285,7 @@ class KentDistrictLibrary(MetadataSource): # {{{
|
|||||||
from calibre.ebooks.metadata.kdl import get_series
|
from calibre.ebooks.metadata.kdl import get_series
|
||||||
try:
|
try:
|
||||||
self.results = get_series(self.title, self.book_author)
|
self.results = get_series(self.title, self.book_author)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
import traceback
|
import traceback
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
self.exception = e
|
self.exception = e
|
||||||
|
@ -30,7 +30,7 @@ class Fictionwise(MetadataSource): # {{{
|
|||||||
try:
|
try:
|
||||||
self.results = search(self.title, self.book_author, self.publisher,
|
self.results = search(self.title, self.book_author, self.publisher,
|
||||||
self.isbn, max_results=10, verbose=self.verbose)
|
self.isbn, max_results=10, verbose=self.verbose)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.exception = e
|
self.exception = e
|
||||||
self.tb = traceback.format_exc()
|
self.tb = traceback.format_exc()
|
||||||
|
|
||||||
@ -91,7 +91,7 @@ class Query(object):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
raw = browser.open_novisit(self.BASE_URL, self.urldata, timeout=timeout).read()
|
raw = browser.open_novisit(self.BASE_URL, self.urldata, timeout=timeout).read()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
report(verbose)
|
report(verbose)
|
||||||
if callable(getattr(e, 'getcode', None)) and \
|
if callable(getattr(e, 'getcode', None)) and \
|
||||||
e.getcode() == 404:
|
e.getcode() == 404:
|
||||||
@ -276,7 +276,7 @@ class ResultList(list):
|
|||||||
def get_individual_metadata(self, browser, linkdata, verbose):
|
def get_individual_metadata(self, browser, linkdata, verbose):
|
||||||
try:
|
try:
|
||||||
raw = browser.open_novisit(self.BASE_URL + linkdata).read()
|
raw = browser.open_novisit(self.BASE_URL + linkdata).read()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
report(verbose)
|
report(verbose)
|
||||||
if callable(getattr(e, 'getcode', None)) and \
|
if callable(getattr(e, 'getcode', None)) and \
|
||||||
e.getcode() == 404:
|
e.getcode() == 404:
|
||||||
@ -311,7 +311,7 @@ class ResultList(list):
|
|||||||
#maybe strenghten the search
|
#maybe strenghten the search
|
||||||
ratings = self.get_rating(entry.xpath("./p/table")[1], verbose)
|
ratings = self.get_rating(entry.xpath("./p/table")[1], verbose)
|
||||||
authors = self.get_authors(entry)
|
authors = self.get_authors(entry)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
if verbose:
|
if verbose:
|
||||||
print _('Failed to get all details for an entry')
|
print _('Failed to get all details for an entry')
|
||||||
print e
|
print e
|
||||||
@ -328,7 +328,7 @@ class ResultList(list):
|
|||||||
#maybe strenghten the search
|
#maybe strenghten the search
|
||||||
ratings = self.get_rating(entry.xpath("./p/table")[1], verbose)
|
ratings = self.get_rating(entry.xpath("./p/table")[1], verbose)
|
||||||
authors = self.get_authors(entry)
|
authors = self.get_authors(entry)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
if verbose:
|
if verbose:
|
||||||
print _('Failed to get all details for an entry')
|
print _('Failed to get all details for an entry')
|
||||||
print e
|
print e
|
||||||
|
@ -176,7 +176,7 @@ class ResultList(list):
|
|||||||
raw = browser.open(id_url).read()
|
raw = browser.open(id_url).read()
|
||||||
feed = etree.fromstring(raw)
|
feed = etree.fromstring(raw)
|
||||||
x = entry(feed)[0]
|
x = entry(feed)[0]
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
if verbose:
|
if verbose:
|
||||||
print 'Failed to get all details for an entry'
|
print 'Failed to get all details for an entry'
|
||||||
print e
|
print e
|
||||||
|
@ -38,7 +38,7 @@ def get_metadata(stream):
|
|||||||
mi.author = author
|
mi.author = author
|
||||||
if category:
|
if category:
|
||||||
mi.category = category
|
mi.category = category
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
msg = u'Couldn\'t read metadata from imp: %s with error %s'%(mi.title, unicode(err))
|
msg = u'Couldn\'t read metadata from imp: %s with error %s'%(mi.title, unicode(err))
|
||||||
print >>sys.stderr, msg.encode('utf8')
|
print >>sys.stderr, msg.encode('utf8')
|
||||||
return mi
|
return mi
|
||||||
|
@ -25,7 +25,7 @@ def fetch_metadata(url, max=3, timeout=5.):
|
|||||||
while len(books) < total_results and max > 0:
|
while len(books) < total_results and max > 0:
|
||||||
try:
|
try:
|
||||||
raw = br.open(url, timeout=timeout).read()
|
raw = br.open(url, timeout=timeout).read()
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
raise ISBNDBError('Could not fetch ISBNDB metadata. Error: '+str(err))
|
raise ISBNDBError('Could not fetch ISBNDB metadata. Error: '+str(err))
|
||||||
soup = BeautifulStoneSoup(raw,
|
soup = BeautifulStoneSoup(raw,
|
||||||
convertEntities=BeautifulStoneSoup.XML_ENTITIES)
|
convertEntities=BeautifulStoneSoup.XML_ENTITIES)
|
||||||
|
@ -43,7 +43,7 @@ def get_series(title, authors, timeout=60):
|
|||||||
br = browser()
|
br = browser()
|
||||||
try:
|
try:
|
||||||
raw = br.open_novisit(url, timeout=timeout).read()
|
raw = br.open_novisit(url, timeout=timeout).read()
|
||||||
except URLError, e:
|
except URLError as e:
|
||||||
if isinstance(e.reason, socket.timeout):
|
if isinstance(e.reason, socket.timeout):
|
||||||
raise Exception('KDL Server busy, try again later')
|
raise Exception('KDL Server busy, try again later')
|
||||||
raise
|
raise
|
||||||
|
@ -45,7 +45,7 @@ def check_for_cover(isbn, timeout=5.):
|
|||||||
try:
|
try:
|
||||||
br.open_novisit(HeadRequest(OPENLIBRARY%isbn), timeout=timeout)
|
br.open_novisit(HeadRequest(OPENLIBRARY%isbn), timeout=timeout)
|
||||||
return True
|
return True
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
if callable(getattr(e, 'getcode', None)) and e.getcode() == 302:
|
if callable(getattr(e, 'getcode', None)) and e.getcode() == 302:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
@ -32,7 +32,7 @@ class NiceBooks(MetadataSource):
|
|||||||
try:
|
try:
|
||||||
self.results = search(self.title, self.book_author, self.publisher,
|
self.results = search(self.title, self.book_author, self.publisher,
|
||||||
self.isbn, max_results=10, verbose=self.verbose)
|
self.isbn, max_results=10, verbose=self.verbose)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.exception = e
|
self.exception = e
|
||||||
self.tb = traceback.format_exc()
|
self.tb = traceback.format_exc()
|
||||||
|
|
||||||
@ -54,7 +54,7 @@ class NiceBooksCovers(CoverDownload):
|
|||||||
if Covers(mi.isbn)(entry).check_cover():
|
if Covers(mi.isbn)(entry).check_cover():
|
||||||
self.debug('cover for', mi.isbn, 'found')
|
self.debug('cover for', mi.isbn, 'found')
|
||||||
ans.set()
|
ans.set()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.debug(e)
|
self.debug(e)
|
||||||
|
|
||||||
def get_covers(self, mi, result_queue, abort, timeout=5.):
|
def get_covers(self, mi, result_queue, abort, timeout=5.):
|
||||||
@ -67,7 +67,7 @@ class NiceBooksCovers(CoverDownload):
|
|||||||
if not ext:
|
if not ext:
|
||||||
ext = 'jpg'
|
ext = 'jpg'
|
||||||
result_queue.put((True, cover_data, ext, self.name))
|
result_queue.put((True, cover_data, ext, self.name))
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
result_queue.put((False, self.exception_to_string(e),
|
result_queue.put((False, self.exception_to_string(e),
|
||||||
traceback.format_exc(), self.name))
|
traceback.format_exc(), self.name))
|
||||||
|
|
||||||
@ -109,7 +109,7 @@ class Query(object):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
raw = browser.open_novisit(self.BASE_URL+self.urldata, timeout=timeout).read()
|
raw = browser.open_novisit(self.BASE_URL+self.urldata, timeout=timeout).read()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
report(verbose)
|
report(verbose)
|
||||||
if callable(getattr(e, 'getcode', None)) and \
|
if callable(getattr(e, 'getcode', None)) and \
|
||||||
e.getcode() == 404:
|
e.getcode() == 404:
|
||||||
@ -144,7 +144,7 @@ class Query(object):
|
|||||||
try:
|
try:
|
||||||
urldata = self.urldata + '&p=' + str(i)
|
urldata = self.urldata + '&p=' + str(i)
|
||||||
raw = browser.open_novisit(self.BASE_URL+urldata, timeout=timeout).read()
|
raw = browser.open_novisit(self.BASE_URL+urldata, timeout=timeout).read()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
continue
|
continue
|
||||||
if '<title>404 - ' in raw:
|
if '<title>404 - ' in raw:
|
||||||
continue
|
continue
|
||||||
@ -233,7 +233,7 @@ class ResultList(list):
|
|||||||
def get_individual_metadata(self, browser, linkdata, verbose):
|
def get_individual_metadata(self, browser, linkdata, verbose):
|
||||||
try:
|
try:
|
||||||
raw = browser.open_novisit(self.BASE_URL + linkdata).read()
|
raw = browser.open_novisit(self.BASE_URL + linkdata).read()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
report(verbose)
|
report(verbose)
|
||||||
if callable(getattr(e, 'getcode', None)) and \
|
if callable(getattr(e, 'getcode', None)) and \
|
||||||
e.getcode() == 404:
|
e.getcode() == 404:
|
||||||
@ -266,7 +266,7 @@ class ResultList(list):
|
|||||||
entry = entry.find("div[@id='book-info']")
|
entry = entry.find("div[@id='book-info']")
|
||||||
title = self.get_title(entry)
|
title = self.get_title(entry)
|
||||||
authors = self.get_authors(entry)
|
authors = self.get_authors(entry)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
if verbose:
|
if verbose:
|
||||||
print 'Failed to get all details for an entry'
|
print 'Failed to get all details for an entry'
|
||||||
print e
|
print e
|
||||||
@ -280,7 +280,7 @@ class ResultList(list):
|
|||||||
entry = entry.find("div[@id='book-info']")
|
entry = entry.find("div[@id='book-info']")
|
||||||
title = self.get_title(entry)
|
title = self.get_title(entry)
|
||||||
authors = self.get_authors(entry)
|
authors = self.get_authors(entry)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
if verbose:
|
if verbose:
|
||||||
print 'Failed to get all details for an entry'
|
print 'Failed to get all details for an entry'
|
||||||
print e
|
print e
|
||||||
@ -315,7 +315,7 @@ class Covers(object):
|
|||||||
cover, ext = browser.open_novisit(self.urlimg, timeout=timeout).read(), \
|
cover, ext = browser.open_novisit(self.urlimg, timeout=timeout).read(), \
|
||||||
self.urlimg.rpartition('.')[-1]
|
self.urlimg.rpartition('.')[-1]
|
||||||
return cover, ext if ext else 'jpg'
|
return cover, ext if ext else 'jpg'
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
if isinstance(getattr(err, 'args', [None])[0], socket.timeout):
|
if isinstance(getattr(err, 'args', [None])[0], socket.timeout):
|
||||||
raise NiceBooksError(_('Nicebooks timed out. Try again later.'))
|
raise NiceBooksError(_('Nicebooks timed out. Try again later.'))
|
||||||
if not len(self.urlimg):
|
if not len(self.urlimg):
|
||||||
|
@ -43,7 +43,7 @@ def get_metadata(stream):
|
|||||||
elif key.strip() == 'AUTHOR':
|
elif key.strip() == 'AUTHOR':
|
||||||
mi.author = value
|
mi.author = value
|
||||||
mi.authors = string_to_authors(value)
|
mi.authors = string_to_authors(value)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
msg = u'Couldn\'t read metadata from rb: %s with error %s'%(mi.title, unicode(err))
|
msg = u'Couldn\'t read metadata from rb: %s with error %s'%(mi.title, unicode(err))
|
||||||
print >>sys.stderr, msg.encode('utf8')
|
print >>sys.stderr, msg.encode('utf8')
|
||||||
raise
|
raise
|
||||||
|
@ -41,12 +41,12 @@ class Worker(Thread): # {{{
|
|||||||
try:
|
try:
|
||||||
self.get_details()
|
self.get_details()
|
||||||
except:
|
except:
|
||||||
self.log.error('get_details failed for url: %r'%self.url)
|
self.log.exception('get_details failed for url: %r'%self.url)
|
||||||
|
|
||||||
def get_details(self):
|
def get_details(self):
|
||||||
try:
|
try:
|
||||||
raw = self.browser.open_novisit(self.url, timeout=self.timeout).read().strip()
|
raw = self.browser.open_novisit(self.url, timeout=self.timeout).read().strip()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
if callable(getattr(e, 'getcode', None)) and \
|
if callable(getattr(e, 'getcode', None)) and \
|
||||||
e.getcode() == 404:
|
e.getcode() == 404:
|
||||||
self.log.error('URL malformed: %r'%self.url)
|
self.log.error('URL malformed: %r'%self.url)
|
||||||
@ -168,7 +168,7 @@ class Worker(Thread): # {{{
|
|||||||
if self.isbn:
|
if self.isbn:
|
||||||
self.plugin.cache_isbn_to_identifier(self.isbn, self.amazon_id)
|
self.plugin.cache_isbn_to_identifier(self.isbn, self.amazon_id)
|
||||||
if self.cover_url:
|
if self.cover_url:
|
||||||
self.cache_identifier_to_cover_url(self.amazon_id,
|
self.plugin.cache_identifier_to_cover_url(self.amazon_id,
|
||||||
self.cover_url)
|
self.cover_url)
|
||||||
|
|
||||||
self.result_queue.put(mi)
|
self.result_queue.put(mi)
|
||||||
@ -359,7 +359,7 @@ class Amazon(Source):
|
|||||||
br = self.browser
|
br = self.browser
|
||||||
try:
|
try:
|
||||||
raw = br.open_novisit(query, timeout=timeout).read().strip()
|
raw = br.open_novisit(query, timeout=timeout).read().strip()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
if callable(getattr(e, 'getcode', None)) and \
|
if callable(getattr(e, 'getcode', None)) and \
|
||||||
e.getcode() == 404:
|
e.getcode() == 404:
|
||||||
log.error('Query malformed: %r'%query)
|
log.error('Query malformed: %r'%query)
|
||||||
|
@ -21,6 +21,7 @@ def create_log(ostream=None):
|
|||||||
log.outputs = [FileStream(ostream)]
|
log.outputs = [FileStream(ostream)]
|
||||||
return log
|
return log
|
||||||
|
|
||||||
|
# Comparing Metadata objects for relevance {{{
|
||||||
words = ("the", "a", "an", "of", "and")
|
words = ("the", "a", "an", "of", "and")
|
||||||
prefix_pat = re.compile(r'^(%s)\s+'%("|".join(words)))
|
prefix_pat = re.compile(r'^(%s)\s+'%("|".join(words)))
|
||||||
trailing_paren_pat = re.compile(r'\(.*\)$')
|
trailing_paren_pat = re.compile(r'\(.*\)$')
|
||||||
@ -35,6 +36,55 @@ def cleanup_title(s):
|
|||||||
s = whitespace_pat.sub(' ', s)
|
s = whitespace_pat.sub(' ', s)
|
||||||
return s.strip()
|
return s.strip()
|
||||||
|
|
||||||
|
class InternalMetadataCompareKeyGen(object):
|
||||||
|
|
||||||
|
'''
|
||||||
|
Generate a sort key for comparison of the relevance of Metadata objects,
|
||||||
|
given a search query.
|
||||||
|
|
||||||
|
The sort key ensures that an ascending order sort is a sort by order of
|
||||||
|
decreasing relevance.
|
||||||
|
|
||||||
|
The algorithm is:
|
||||||
|
|
||||||
|
1. Prefer results that have the same ISBN as specified in the query
|
||||||
|
2. Prefer results with all available fields filled in
|
||||||
|
3. Prefer results that are an exact title match to the query
|
||||||
|
4. Prefer results with longer comments (greater than 10 % longer)
|
||||||
|
5. Prefer results with a cached cover URL
|
||||||
|
6. Use the relevance of the result as reported by the metadata source's search
|
||||||
|
engine
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(self, mi, source_plugin, title, authors, identifiers):
|
||||||
|
isbn = 1 if mi.isbn and mi.isbn == identifiers.get('isbn', None) else 2
|
||||||
|
|
||||||
|
all_fields = 1 if source_plugin.test_fields(mi) is None else 2
|
||||||
|
|
||||||
|
exact_title = 1 if title and \
|
||||||
|
cleanup_title(title) == cleanup_title(mi.title) else 2
|
||||||
|
|
||||||
|
has_cover = 2 if source_plugin.get_cached_cover_url(mi.identifiers)\
|
||||||
|
is None else 1
|
||||||
|
|
||||||
|
self.base = (isbn, all_fields, exact_title)
|
||||||
|
self.comments_len = len(mi.comments.strip() if mi.comments else '')
|
||||||
|
self.extra = (has_cover, getattr(mi, 'source_relevance', 0))
|
||||||
|
|
||||||
|
def __cmp__(self, other):
|
||||||
|
result = cmp(self.base, other.base)
|
||||||
|
if result == 0:
|
||||||
|
# Now prefer results with the longer comments, within 10%
|
||||||
|
cx, cy = self.comments_len, other.comments_len
|
||||||
|
t = (cx + cy) / 20
|
||||||
|
delta = cy - cx
|
||||||
|
if abs(delta) > t:
|
||||||
|
result = delta
|
||||||
|
else:
|
||||||
|
result = cmp(self.extra, other.extra)
|
||||||
|
return result
|
||||||
|
|
||||||
|
# }}}
|
||||||
|
|
||||||
class Source(Plugin):
|
class Source(Plugin):
|
||||||
|
|
||||||
@ -70,7 +120,7 @@ class Source(Plugin):
|
|||||||
def browser(self):
|
def browser(self):
|
||||||
if self._browser is None:
|
if self._browser is None:
|
||||||
self._browser = browser(user_agent=random_user_agent())
|
self._browser = browser(user_agent=random_user_agent())
|
||||||
return self._browser
|
return self._browser.clone_browser()
|
||||||
|
|
||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
@ -172,69 +222,30 @@ class Source(Plugin):
|
|||||||
def get_cached_cover_url(self, identifiers):
|
def get_cached_cover_url(self, identifiers):
|
||||||
'''
|
'''
|
||||||
Return cached cover URL for the book identified by
|
Return cached cover URL for the book identified by
|
||||||
the identifiers dict or Noneif no such URL exists
|
the identifiers dict or None if no such URL exists.
|
||||||
|
|
||||||
|
Note that this method must only return validated URLs, i.e. not URLS
|
||||||
|
that could result in a generic cover image or a not found error.
|
||||||
'''
|
'''
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def compare_identify_results(self, x, y, title=None, authors=None,
|
def identify_results_keygen(self, title=None, authors=None,
|
||||||
identifiers={}):
|
identifiers={}):
|
||||||
'''
|
'''
|
||||||
Method used to sort the results from a call to identify by relevance.
|
Return a function that is used to generate a key that can sort Metadata
|
||||||
Uses the actual query and various heuristics to rank results.
|
objects by their relevance given a search query (title, authors,
|
||||||
Re-implement in your plugin if this generic algorithm is not suitable.
|
identifiers).
|
||||||
Note that this method assumes x and y have a source_relevance
|
|
||||||
attribute.
|
|
||||||
|
|
||||||
one < two iff one is more relevant than two
|
These keys are used to sort the results of a call to :meth:`identify`.
|
||||||
|
|
||||||
|
For details on the default algorithm see
|
||||||
|
:class:`InternalMetadataCompareKeyGen`. Re-implement this function in
|
||||||
|
your plugin if the default algorithm is not suitable.
|
||||||
'''
|
'''
|
||||||
# First, guarantee that if the query specifies an ISBN, the result with
|
def keygen(mi):
|
||||||
# the same isbn is the most relevant
|
return InternalMetadataCompareKeyGen(mi, self, title, authors,
|
||||||
def isbn_test(mi):
|
identifiers)
|
||||||
return mi.isbn and mi.isbn == identifiers.get('isbn', None)
|
return keygen
|
||||||
|
|
||||||
def boolcmp(a, b):
|
|
||||||
return -1 if a and not b else 1 if not a and b else 0
|
|
||||||
|
|
||||||
x_has_isbn, y_has_isbn = isbn_test(x), isbn_test(y)
|
|
||||||
result = boolcmp(x_has_isbn, y_has_isbn)
|
|
||||||
if result != 0:
|
|
||||||
return result
|
|
||||||
|
|
||||||
# Now prefer results that have complete metadata over those that don't
|
|
||||||
x_has_all_fields = self.test_fields(x) is None
|
|
||||||
y_has_all_fields = self.test_fields(y) is None
|
|
||||||
|
|
||||||
result = boolcmp(x_has_all_fields, y_has_all_fields)
|
|
||||||
if result != 0:
|
|
||||||
return result
|
|
||||||
|
|
||||||
# Now prefer results whose title matches the search query
|
|
||||||
if title:
|
|
||||||
x_title = cleanup_title(x.title)
|
|
||||||
y_title = cleanup_title(y.title)
|
|
||||||
t = cleanup_title(title)
|
|
||||||
x_has_title, y_has_title = x_title == t, y_title == t
|
|
||||||
result = boolcmp(x_has_title, y_has_title)
|
|
||||||
if result != 0:
|
|
||||||
return result
|
|
||||||
|
|
||||||
# Now prefer results with the longer comments, within 10%
|
|
||||||
cx = len(x.comments.strip() if x.comments else '')
|
|
||||||
cy = len(y.comments.strip() if y.comments else '')
|
|
||||||
t = (cx + cy) / 20
|
|
||||||
result = cy - cx
|
|
||||||
if result != 0 and abs(cx - cy) > t:
|
|
||||||
return result
|
|
||||||
|
|
||||||
# Now prefer results with cached cover URLs
|
|
||||||
x_has_cover = self.get_cached_cover_url(x.identifiers) is not None
|
|
||||||
y_has_cover = self.get_cached_cover_url(y.identifiers) is not None
|
|
||||||
result = boolcmp(x_has_cover, y_has_cover)
|
|
||||||
if result != 0:
|
|
||||||
return result
|
|
||||||
|
|
||||||
# Now use the relevance reported by the remote search engine
|
|
||||||
return x.source_relevance - y.source_relevance
|
|
||||||
|
|
||||||
def identify(self, log, result_queue, abort, title=None, authors=None,
|
def identify(self, log, result_queue, abort, title=None, authors=None,
|
||||||
identifiers={}, timeout=5):
|
identifiers={}, timeout=5):
|
||||||
|
@ -213,7 +213,7 @@ class GoogleBooks(Source):
|
|||||||
br = self.browser
|
br = self.browser
|
||||||
try:
|
try:
|
||||||
raw = br.open_novisit(query, timeout=timeout).read()
|
raw = br.open_novisit(query, timeout=timeout).read()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
log.exception('Failed to make identify query: %r'%query)
|
log.exception('Failed to make identify query: %r'%query)
|
||||||
return as_unicode(e)
|
return as_unicode(e)
|
||||||
|
|
||||||
@ -222,7 +222,7 @@ class GoogleBooks(Source):
|
|||||||
feed = etree.fromstring(xml_to_unicode(clean_ascii_chars(raw),
|
feed = etree.fromstring(xml_to_unicode(clean_ascii_chars(raw),
|
||||||
strip_encoding_pats=True)[0], parser=parser)
|
strip_encoding_pats=True)[0], parser=parser)
|
||||||
entries = entry(feed)
|
entries = entry(feed)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
log.exception('Failed to parse identify results')
|
log.exception('Failed to parse identify results')
|
||||||
return as_unicode(e)
|
return as_unicode(e)
|
||||||
|
|
||||||
|
@ -11,7 +11,6 @@ import os, tempfile, time
|
|||||||
from Queue import Queue, Empty
|
from Queue import Queue, Empty
|
||||||
from threading import Event
|
from threading import Event
|
||||||
|
|
||||||
|
|
||||||
from calibre.customize.ui import metadata_plugins
|
from calibre.customize.ui import metadata_plugins
|
||||||
from calibre import prints
|
from calibre import prints
|
||||||
from calibre.ebooks.metadata import check_isbn
|
from calibre.ebooks.metadata import check_isbn
|
||||||
@ -90,11 +89,17 @@ def test_identify_plugin(name, tests):
|
|||||||
except Empty:
|
except Empty:
|
||||||
break
|
break
|
||||||
|
|
||||||
prints('Found', len(results), 'matches:')
|
prints('Found', len(results), 'matches:', end=' ')
|
||||||
|
prints('Smaller relevance means better match')
|
||||||
|
|
||||||
for mi in results:
|
results.sort(key=plugin.identify_results_keygen(
|
||||||
|
title=kwargs.get('title', None), authors=kwargs.get('authors',
|
||||||
|
None), identifiers=kwargs.get('identifiers', {})))
|
||||||
|
|
||||||
|
for i, mi in enumerate(results):
|
||||||
|
prints('*'*30, 'Relevance:', i, '*'*30)
|
||||||
prints(mi)
|
prints(mi)
|
||||||
prints('\n\n')
|
prints('*'*75, '\n\n')
|
||||||
|
|
||||||
possibles = []
|
possibles = []
|
||||||
for mi in results:
|
for mi in results:
|
||||||
@ -117,6 +122,9 @@ def test_identify_plugin(name, tests):
|
|||||||
prints('Failed to find', plugin.test_fields(possibles[0]))
|
prints('Failed to find', plugin.test_fields(possibles[0]))
|
||||||
raise SystemExit(1)
|
raise SystemExit(1)
|
||||||
|
|
||||||
|
if results[0] is not possibles[0]:
|
||||||
|
prints('Most relevant result failed the tests')
|
||||||
|
|
||||||
|
|
||||||
prints('Average time per query', sum(times)/len(times))
|
prints('Average time per query', sum(times)/len(times))
|
||||||
|
|
||||||
|
@ -147,7 +147,7 @@ class TOC(list):
|
|||||||
if path and os.access(path, os.R_OK):
|
if path and os.access(path, os.R_OK):
|
||||||
try:
|
try:
|
||||||
self.read_ncx_toc(path)
|
self.read_ncx_toc(path)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
print 'WARNING: Invalid NCX file:', err
|
print 'WARNING: Invalid NCX file:', err
|
||||||
return
|
return
|
||||||
cwd = os.path.abspath(self.base_path)
|
cwd = os.path.abspath(self.base_path)
|
||||||
|
@ -769,7 +769,8 @@ class MobiReader(object):
|
|||||||
|
|
||||||
def extract_text(self):
|
def extract_text(self):
|
||||||
self.log.debug('Extracting text...')
|
self.log.debug('Extracting text...')
|
||||||
text_sections = [self.text_section(i) for i in range(1, self.book_header.records + 1)]
|
text_sections = [self.text_section(i) for i in range(1,
|
||||||
|
min(self.book_header.records + 1, len(self.sections)))]
|
||||||
processed_records = list(range(0, self.book_header.records + 1))
|
processed_records = list(range(0, self.book_header.records + 1))
|
||||||
|
|
||||||
self.mobi_html = ''
|
self.mobi_html = ''
|
||||||
|
@ -884,13 +884,13 @@ class Manifest(object):
|
|||||||
def first_pass(data):
|
def first_pass(data):
|
||||||
try:
|
try:
|
||||||
data = etree.fromstring(data, parser=parser)
|
data = etree.fromstring(data, parser=parser)
|
||||||
except etree.XMLSyntaxError, err:
|
except etree.XMLSyntaxError as err:
|
||||||
self.oeb.log.exception('Initial parse failed:')
|
self.oeb.log.exception('Initial parse failed:')
|
||||||
repl = lambda m: ENTITYDEFS.get(m.group(1), m.group(0))
|
repl = lambda m: ENTITYDEFS.get(m.group(1), m.group(0))
|
||||||
data = ENTITY_RE.sub(repl, data)
|
data = ENTITY_RE.sub(repl, data)
|
||||||
try:
|
try:
|
||||||
data = etree.fromstring(data, parser=parser)
|
data = etree.fromstring(data, parser=parser)
|
||||||
except etree.XMLSyntaxError, err:
|
except etree.XMLSyntaxError as err:
|
||||||
self.oeb.logger.warn('Parsing file %r as HTML' % self.href)
|
self.oeb.logger.warn('Parsing file %r as HTML' % self.href)
|
||||||
if err.args and err.args[0].startswith('Excessive depth'):
|
if err.args and err.args[0].startswith('Excessive depth'):
|
||||||
from lxml.html import soupparser
|
from lxml.html import soupparser
|
||||||
|
@ -103,7 +103,7 @@ def main(args=sys.argv, name=''):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
decrypt(args[0], opts.output, args[1])
|
decrypt(args[0], opts.output, args[1])
|
||||||
except DecryptionError, e:
|
except DecryptionError as e:
|
||||||
print e.value
|
print e.value
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ def pdftohtml(output_dir, pdf_path, no_images):
|
|||||||
try:
|
try:
|
||||||
p = popen(cmd, stderr=logf._fd, stdout=logf._fd,
|
p = popen(cmd, stderr=logf._fd, stdout=logf._fd,
|
||||||
stdin=subprocess.PIPE)
|
stdin=subprocess.PIPE)
|
||||||
except OSError, err:
|
except OSError as err:
|
||||||
if err.errno == 2:
|
if err.errno == 2:
|
||||||
raise ConversionError(_('Could not find pdftohtml, check it is in your PATH'))
|
raise ConversionError(_('Could not find pdftohtml, check it is in your PATH'))
|
||||||
else:
|
else:
|
||||||
@ -60,7 +60,7 @@ def pdftohtml(output_dir, pdf_path, no_images):
|
|||||||
try:
|
try:
|
||||||
ret = p.wait()
|
ret = p.wait()
|
||||||
break
|
break
|
||||||
except OSError, e:
|
except OSError as e:
|
||||||
if e.errno == errno.EINTR:
|
if e.errno == errno.EINTR:
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
|
@ -268,7 +268,7 @@ class RTFInput(InputFormatPlugin):
|
|||||||
self.log('Converting RTF to XML...')
|
self.log('Converting RTF to XML...')
|
||||||
try:
|
try:
|
||||||
xml = self.generate_xml(stream.name)
|
xml = self.generate_xml(stream.name)
|
||||||
except RtfInvalidCodeException, e:
|
except RtfInvalidCodeException as e:
|
||||||
raise ValueError(_('This RTF file has a feature calibre does not '
|
raise ValueError(_('This RTF file has a feature calibre does not '
|
||||||
'support. Convert it to HTML first and then try it.\n%s')%e)
|
'support. Convert it to HTML first and then try it.\n%s')%e)
|
||||||
|
|
||||||
|
@ -85,7 +85,7 @@ class SNBFile:
|
|||||||
uncompressedData += bzdc.decompress(data)
|
uncompressedData += bzdc.decompress(data)
|
||||||
else:
|
else:
|
||||||
uncompressedData += data
|
uncompressedData += data
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
print e
|
print e
|
||||||
if len(uncompressedData) != self.plainStreamSizeUncompressed:
|
if len(uncompressedData) != self.plainStreamSizeUncompressed:
|
||||||
raise Exception()
|
raise Exception()
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
"""
|
"""
|
||||||
PyTextile
|
PyTextile
|
||||||
|
|
||||||
@ -206,6 +208,12 @@ class Textile(object):
|
|||||||
(re.compile(r'{clubs?}'), r'♣'), # club
|
(re.compile(r'{clubs?}'), r'♣'), # club
|
||||||
(re.compile(r'{hearts?}'), r'♥'), # heart
|
(re.compile(r'{hearts?}'), r'♥'), # heart
|
||||||
(re.compile(r'{diam(onds?|s)}'), r'♦'), # diamond
|
(re.compile(r'{diam(onds?|s)}'), r'♦'), # diamond
|
||||||
|
(re.compile(r'{"}'), r'"'), # double-quote
|
||||||
|
(re.compile(r"{'}"), r'''), # single-quote
|
||||||
|
(re.compile(r"{(’|'/|/')}"), r'’'), # closing-single-quote - apostrophe
|
||||||
|
(re.compile(r"{(‘|\\'|'\\)}"), r'‘'), # opening-single-quote
|
||||||
|
(re.compile(r'{(”|"/|/")}'), r'”'), # closing-double-quote
|
||||||
|
(re.compile(r'{(“|\\"|"\\)}'), r'“'), # opening-double-quote
|
||||||
]
|
]
|
||||||
glyph_defaults = [
|
glyph_defaults = [
|
||||||
(re.compile(r'(\d+\'?\"?)( ?)x( ?)(?=\d+)'), r'\1\2×\3'), # dimension sign
|
(re.compile(r'(\d+\'?\"?)( ?)x( ?)(?=\d+)'), r'\1\2×\3'), # dimension sign
|
||||||
|
@ -165,7 +165,7 @@ class TXTInput(InputFormatPlugin):
|
|||||||
elif options.formatting_type == 'textile':
|
elif options.formatting_type == 'textile':
|
||||||
log.debug('Running text through textile conversion...')
|
log.debug('Running text through textile conversion...')
|
||||||
html = convert_textile(txt)
|
html = convert_textile(txt)
|
||||||
setattr(options, 'smarten_punctuation', True)
|
#setattr(options, 'smarten_punctuation', True)
|
||||||
else:
|
else:
|
||||||
log.debug('Running text through basic conversion...')
|
log.debug('Running text through basic conversion...')
|
||||||
flow_size = getattr(options, 'flow_size', 0)
|
flow_size = getattr(options, 'flow_size', 0)
|
||||||
|
@ -32,7 +32,7 @@ class Worker(Thread):
|
|||||||
def run(self):
|
def run(self):
|
||||||
try:
|
try:
|
||||||
self.doit()
|
self.doit()
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
import traceback
|
import traceback
|
||||||
try:
|
try:
|
||||||
err = unicode(err)
|
err = unicode(err)
|
||||||
|
@ -78,7 +78,7 @@ class RecursiveFind(QThread): # {{{
|
|||||||
if isinstance(root, unicode):
|
if isinstance(root, unicode):
|
||||||
root = root.encode(filesystem_encoding)
|
root = root.encode(filesystem_encoding)
|
||||||
self.walk(root)
|
self.walk(root)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
import traceback
|
import traceback
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
try:
|
try:
|
||||||
|
@ -6,7 +6,7 @@ __license__ = 'GPL v3'
|
|||||||
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
|
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||||
__docformat__ = 'restructuredtext en'
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
import textwrap, codecs
|
import textwrap, codecs, importlib
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
from PyQt4.Qt import QWidget, QSpinBox, QDoubleSpinBox, QLineEdit, QTextEdit, \
|
from PyQt4.Qt import QWidget, QSpinBox, QDoubleSpinBox, QLineEdit, QTextEdit, \
|
||||||
@ -22,8 +22,8 @@ from calibre.customize.ui import plugin_for_input_format
|
|||||||
def config_widget_for_input_plugin(plugin):
|
def config_widget_for_input_plugin(plugin):
|
||||||
name = plugin.name.lower().replace(' ', '_')
|
name = plugin.name.lower().replace(' ', '_')
|
||||||
try:
|
try:
|
||||||
return __import__('calibre.gui2.convert.'+name,
|
return importlib.import_module(
|
||||||
fromlist=[1]).PluginWidget
|
'calibre.gui2.convert.'+name).PluginWidget
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ __license__ = 'GPL 3'
|
|||||||
__copyright__ = '2009, John Schember <john@nachtimwald.com>'
|
__copyright__ = '2009, John Schember <john@nachtimwald.com>'
|
||||||
__docformat__ = 'restructuredtext en'
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
import shutil
|
import shutil, importlib
|
||||||
|
|
||||||
from PyQt4.Qt import QString, SIGNAL
|
from PyQt4.Qt import QString, SIGNAL
|
||||||
|
|
||||||
@ -82,8 +82,8 @@ class BulkConfig(Config):
|
|||||||
output_widget = None
|
output_widget = None
|
||||||
name = self.plumber.output_plugin.name.lower().replace(' ', '_')
|
name = self.plumber.output_plugin.name.lower().replace(' ', '_')
|
||||||
try:
|
try:
|
||||||
output_widget = __import__('calibre.gui2.convert.'+name,
|
output_widget = importlib.import_module(
|
||||||
fromlist=[1])
|
'calibre.gui2.convert.'+name)
|
||||||
pw = output_widget.PluginWidget
|
pw = output_widget.PluginWidget
|
||||||
pw.ICON = I('back.png')
|
pw.ICON = I('back.png')
|
||||||
pw.HELP = _('Options specific to the output format.')
|
pw.HELP = _('Options specific to the output format.')
|
||||||
|
@ -192,7 +192,7 @@ class MetadataWidget(Widget, Ui_Form):
|
|||||||
try:
|
try:
|
||||||
cf = open(_file, "rb")
|
cf = open(_file, "rb")
|
||||||
cover = cf.read()
|
cover = cf.read()
|
||||||
except IOError, e:
|
except IOError as e:
|
||||||
d = error_dialog(self.parent(), _('Error reading file'),
|
d = error_dialog(self.parent(), _('Error reading file'),
|
||||||
_("<p>There was an error reading from file: <br /><b>") + _file + "</b></p><br />"+str(e))
|
_("<p>There was an error reading from file: <br /><b>") + _file + "</b></p><br />"+str(e))
|
||||||
d.exec_()
|
d.exec_()
|
||||||
|
@ -69,7 +69,7 @@ class SearchAndReplaceWidget(Widget, Ui_Form):
|
|||||||
try:
|
try:
|
||||||
pat = unicode(x.regex)
|
pat = unicode(x.regex)
|
||||||
re.compile(pat)
|
re.compile(pat)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
error_dialog(self, _('Invalid regular expression'),
|
error_dialog(self, _('Invalid regular expression'),
|
||||||
_('Invalid regular expression: %s')%err, show=True)
|
_('Invalid regular expression: %s')%err, show=True)
|
||||||
return False
|
return False
|
||||||
|
@ -6,7 +6,7 @@ __license__ = 'GPL v3'
|
|||||||
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
|
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||||
__docformat__ = 'restructuredtext en'
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
import sys, cPickle, shutil
|
import sys, cPickle, shutil, importlib
|
||||||
|
|
||||||
from PyQt4.Qt import QString, SIGNAL, QAbstractListModel, Qt, QVariant, QFont
|
from PyQt4.Qt import QString, SIGNAL, QAbstractListModel, Qt, QVariant, QFont
|
||||||
|
|
||||||
@ -182,8 +182,8 @@ class Config(ResizableDialog, Ui_Dialog):
|
|||||||
output_widget = None
|
output_widget = None
|
||||||
name = self.plumber.output_plugin.name.lower().replace(' ', '_')
|
name = self.plumber.output_plugin.name.lower().replace(' ', '_')
|
||||||
try:
|
try:
|
||||||
output_widget = __import__('calibre.gui2.convert.'+name,
|
output_widget = importlib.import_module(
|
||||||
fromlist=[1])
|
'calibre.gui2.convert.'+name)
|
||||||
pw = output_widget.PluginWidget
|
pw = output_widget.PluginWidget
|
||||||
pw.ICON = I('back.png')
|
pw.ICON = I('back.png')
|
||||||
pw.HELP = _('Options specific to the output format.')
|
pw.HELP = _('Options specific to the output format.')
|
||||||
@ -193,8 +193,8 @@ class Config(ResizableDialog, Ui_Dialog):
|
|||||||
input_widget = None
|
input_widget = None
|
||||||
name = self.plumber.input_plugin.name.lower().replace(' ', '_')
|
name = self.plumber.input_plugin.name.lower().replace(' ', '_')
|
||||||
try:
|
try:
|
||||||
input_widget = __import__('calibre.gui2.convert.'+name,
|
input_widget = importlib.import_module(
|
||||||
fromlist=[1])
|
'calibre.gui2.convert.'+name)
|
||||||
pw = input_widget.PluginWidget
|
pw = input_widget.PluginWidget
|
||||||
pw.ICON = I('forward.png')
|
pw.ICON = I('forward.png')
|
||||||
pw.HELP = _('Options specific to the input format.')
|
pw.HELP = _('Options specific to the input format.')
|
||||||
|
@ -226,10 +226,18 @@ class Comments(Base):
|
|||||||
class Text(Base):
|
class Text(Base):
|
||||||
|
|
||||||
def setup_ui(self, parent):
|
def setup_ui(self, parent):
|
||||||
|
if self.col_metadata['display'].get('is_names', False):
|
||||||
|
self.sep = u' & '
|
||||||
|
else:
|
||||||
|
self.sep = u', '
|
||||||
values = self.all_values = list(self.db.all_custom(num=self.col_id))
|
values = self.all_values = list(self.db.all_custom(num=self.col_id))
|
||||||
values.sort(key=sort_key)
|
values.sort(key=sort_key)
|
||||||
if self.col_metadata['is_multiple']:
|
if self.col_metadata['is_multiple']:
|
||||||
w = MultiCompleteLineEdit(parent)
|
w = MultiCompleteLineEdit(parent)
|
||||||
|
w.set_separator(self.sep.strip())
|
||||||
|
if self.sep == u' & ':
|
||||||
|
w.set_space_before_sep(True)
|
||||||
|
w.set_add_separator(tweaks['authors_completer_append_separator'])
|
||||||
w.update_items_cache(values)
|
w.update_items_cache(values)
|
||||||
w.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Preferred)
|
w.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Preferred)
|
||||||
else:
|
else:
|
||||||
@ -261,12 +269,12 @@ class Text(Base):
|
|||||||
if self.col_metadata['is_multiple']:
|
if self.col_metadata['is_multiple']:
|
||||||
if not val:
|
if not val:
|
||||||
val = []
|
val = []
|
||||||
self.widgets[1].setText(u', '.join(val))
|
self.widgets[1].setText(self.sep.join(val))
|
||||||
|
|
||||||
def getter(self):
|
def getter(self):
|
||||||
if self.col_metadata['is_multiple']:
|
if self.col_metadata['is_multiple']:
|
||||||
val = unicode(self.widgets[1].text()).strip()
|
val = unicode(self.widgets[1].text()).strip()
|
||||||
ans = [x.strip() for x in val.split(',') if x.strip()]
|
ans = [x.strip() for x in val.split(self.sep.strip()) if x.strip()]
|
||||||
if not ans:
|
if not ans:
|
||||||
ans = None
|
ans = None
|
||||||
return ans
|
return ans
|
||||||
@ -847,13 +855,20 @@ class BulkText(BulkBase):
|
|||||||
self.main_widget.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Preferred)
|
self.main_widget.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Preferred)
|
||||||
self.adding_widget = self.main_widget
|
self.adding_widget = self.main_widget
|
||||||
|
|
||||||
w = RemoveTags(parent, values)
|
if not self.col_metadata['display'].get('is_names', False):
|
||||||
self.widgets.append(QLabel('&'+self.col_metadata['name']+': ' +
|
w = RemoveTags(parent, values)
|
||||||
_('tags to remove'), parent))
|
self.widgets.append(QLabel('&'+self.col_metadata['name']+': ' +
|
||||||
self.widgets.append(w)
|
_('tags to remove'), parent))
|
||||||
self.removing_widget = w
|
self.widgets.append(w)
|
||||||
w.tags_box.textChanged.connect(self.a_c_checkbox_changed)
|
self.removing_widget = w
|
||||||
w.checkbox.stateChanged.connect(self.a_c_checkbox_changed)
|
self.main_widget.set_separator(',')
|
||||||
|
w.tags_box.textChanged.connect(self.a_c_checkbox_changed)
|
||||||
|
w.checkbox.stateChanged.connect(self.a_c_checkbox_changed)
|
||||||
|
else:
|
||||||
|
self.main_widget.set_separator('&')
|
||||||
|
self.main_widget.set_space_before_sep(True)
|
||||||
|
self.main_widget.set_add_separator(
|
||||||
|
tweaks['authors_completer_append_separator'])
|
||||||
else:
|
else:
|
||||||
self.make_widgets(parent, MultiCompleteComboBox)
|
self.make_widgets(parent, MultiCompleteComboBox)
|
||||||
self.main_widget.set_separator(None)
|
self.main_widget.set_separator(None)
|
||||||
@ -882,21 +897,26 @@ class BulkText(BulkBase):
|
|||||||
if not self.a_c_checkbox.isChecked():
|
if not self.a_c_checkbox.isChecked():
|
||||||
return
|
return
|
||||||
if self.col_metadata['is_multiple']:
|
if self.col_metadata['is_multiple']:
|
||||||
remove_all, adding, rtext = self.gui_val
|
if self.col_metadata['display'].get('is_names', False):
|
||||||
remove = set()
|
val = self.gui_val
|
||||||
if remove_all:
|
add = [v.strip() for v in val.split('&') if v.strip()]
|
||||||
remove = set(self.db.all_custom(num=self.col_id))
|
self.db.set_custom_bulk(book_ids, add, num=self.col_id)
|
||||||
else:
|
else:
|
||||||
txt = rtext
|
remove_all, adding, rtext = self.gui_val
|
||||||
|
remove = set()
|
||||||
|
if remove_all:
|
||||||
|
remove = set(self.db.all_custom(num=self.col_id))
|
||||||
|
else:
|
||||||
|
txt = rtext
|
||||||
|
if txt:
|
||||||
|
remove = set([v.strip() for v in txt.split(',')])
|
||||||
|
txt = adding
|
||||||
if txt:
|
if txt:
|
||||||
remove = set([v.strip() for v in txt.split(',')])
|
add = set([v.strip() for v in txt.split(',')])
|
||||||
txt = adding
|
else:
|
||||||
if txt:
|
add = set()
|
||||||
add = set([v.strip() for v in txt.split(',')])
|
self.db.set_custom_bulk_multiple(book_ids, add=add,
|
||||||
else:
|
remove=remove, num=self.col_id)
|
||||||
add = set()
|
|
||||||
self.db.set_custom_bulk_multiple(book_ids, add=add, remove=remove,
|
|
||||||
num=self.col_id)
|
|
||||||
else:
|
else:
|
||||||
val = self.gui_val
|
val = self.gui_val
|
||||||
val = self.normalize_ui_val(val)
|
val = self.normalize_ui_val(val)
|
||||||
@ -905,10 +925,11 @@ class BulkText(BulkBase):
|
|||||||
|
|
||||||
def getter(self):
|
def getter(self):
|
||||||
if self.col_metadata['is_multiple']:
|
if self.col_metadata['is_multiple']:
|
||||||
return self.removing_widget.checkbox.isChecked(), \
|
if not self.col_metadata['display'].get('is_names', False):
|
||||||
unicode(self.adding_widget.text()), \
|
return self.removing_widget.checkbox.isChecked(), \
|
||||||
unicode(self.removing_widget.tags_box.text())
|
unicode(self.adding_widget.text()), \
|
||||||
|
unicode(self.removing_widget.tags_box.text())
|
||||||
|
return unicode(self.adding_widget.text())
|
||||||
val = unicode(self.main_widget.currentText()).strip()
|
val = unicode(self.main_widget.currentText()).strip()
|
||||||
if not val:
|
if not val:
|
||||||
val = None
|
val = None
|
||||||
|
@ -64,7 +64,7 @@ class DeviceJob(BaseJob): # {{{
|
|||||||
self.result = self.func(*self.args, **self.kwargs)
|
self.result = self.func(*self.args, **self.kwargs)
|
||||||
if self._aborted:
|
if self._aborted:
|
||||||
return
|
return
|
||||||
except (Exception, SystemExit), err:
|
except (Exception, SystemExit) as err:
|
||||||
if self._aborted:
|
if self._aborted:
|
||||||
return
|
return
|
||||||
self.failed = True
|
self.failed = True
|
||||||
@ -162,7 +162,7 @@ class DeviceManager(Thread): # {{{
|
|||||||
dev.reset(detected_device=detected_device,
|
dev.reset(detected_device=detected_device,
|
||||||
report_progress=self.report_progress)
|
report_progress=self.report_progress)
|
||||||
dev.open(self.current_library_uuid)
|
dev.open(self.current_library_uuid)
|
||||||
except OpenFeedback, e:
|
except OpenFeedback as e:
|
||||||
if dev not in self.ejected_devices:
|
if dev not in self.ejected_devices:
|
||||||
self.open_feedback_msg(dev.get_gui_name(), e.feedback_msg)
|
self.open_feedback_msg(dev.get_gui_name(), e.feedback_msg)
|
||||||
self.ejected_devices.add(dev)
|
self.ejected_devices.add(dev)
|
||||||
|
@ -133,7 +133,7 @@ class ConfigWidget(QWidget, Ui_ConfigWidget):
|
|||||||
try:
|
try:
|
||||||
validation_formatter.validate(tmpl)
|
validation_formatter.validate(tmpl)
|
||||||
return True
|
return True
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
error_dialog(self, _('Invalid template'),
|
error_dialog(self, _('Invalid template'),
|
||||||
'<p>'+_('The template %s is invalid:')%tmpl + \
|
'<p>'+_('The template %s is invalid:')%tmpl + \
|
||||||
'<br>'+unicode(err), show=True)
|
'<br>'+unicode(err), show=True)
|
||||||
|
@ -6,7 +6,7 @@ __license__ = 'GPL v3'
|
|||||||
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||||
__docformat__ = 'restructuredtext en'
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
import os, sys
|
import os, sys, importlib
|
||||||
|
|
||||||
from calibre.customize.ui import config
|
from calibre.customize.ui import config
|
||||||
from calibre.gui2.dialogs.catalog_ui import Ui_Dialog
|
from calibre.gui2.dialogs.catalog_ui import Ui_Dialog
|
||||||
@ -43,8 +43,7 @@ class Catalog(ResizableDialog, Ui_Dialog):
|
|||||||
name = plugin.name.lower().replace(' ', '_')
|
name = plugin.name.lower().replace(' ', '_')
|
||||||
if type(plugin) in builtin_plugins:
|
if type(plugin) in builtin_plugins:
|
||||||
try:
|
try:
|
||||||
catalog_widget = __import__('calibre.gui2.catalog.'+name,
|
catalog_widget = importlib.import_module('calibre.gui2.catalog.'+name)
|
||||||
fromlist=[1])
|
|
||||||
pw = catalog_widget.PluginWidget()
|
pw = catalog_widget.PluginWidget()
|
||||||
pw.initialize(name, db)
|
pw.initialize(name, db)
|
||||||
pw.ICON = I('forward.png')
|
pw.ICON = I('forward.png')
|
||||||
@ -75,7 +74,7 @@ class Catalog(ResizableDialog, Ui_Dialog):
|
|||||||
# Import the dynamic PluginWidget() from .py file provided in plugin.zip
|
# Import the dynamic PluginWidget() from .py file provided in plugin.zip
|
||||||
try:
|
try:
|
||||||
sys.path.insert(0, plugin.resources_path)
|
sys.path.insert(0, plugin.resources_path)
|
||||||
catalog_widget = __import__(name, fromlist=[1])
|
catalog_widget = importlib.import_module(name)
|
||||||
pw = catalog_widget.PluginWidget()
|
pw = catalog_widget.PluginWidget()
|
||||||
pw.initialize(name)
|
pw.initialize(name)
|
||||||
pw.ICON = I('forward.png')
|
pw.ICON = I('forward.png')
|
||||||
|
@ -68,7 +68,7 @@ class DBCheck(QDialog): # {{{
|
|||||||
self.start_load()
|
self.start_load()
|
||||||
return
|
return
|
||||||
QTimer.singleShot(0, self.do_one_dump)
|
QTimer.singleShot(0, self.do_one_dump)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
import traceback
|
import traceback
|
||||||
self.error = (as_unicode(e), traceback.format_exc())
|
self.error = (as_unicode(e), traceback.format_exc())
|
||||||
self.reject()
|
self.reject()
|
||||||
@ -90,7 +90,7 @@ class DBCheck(QDialog): # {{{
|
|||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
|
|
||||||
QTimer.singleShot(0, self.do_one_load)
|
QTimer.singleShot(0, self.do_one_load)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
import traceback
|
import traceback
|
||||||
self.error = (as_unicode(e), traceback.format_exc())
|
self.error = (as_unicode(e), traceback.format_exc())
|
||||||
self.reject()
|
self.reject()
|
||||||
@ -111,7 +111,7 @@ class DBCheck(QDialog): # {{{
|
|||||||
self.pb.setValue(self.pb.value() + 1)
|
self.pb.setValue(self.pb.value() + 1)
|
||||||
self.count -= 1
|
self.count -= 1
|
||||||
QTimer.singleShot(0, self.do_one_load)
|
QTimer.singleShot(0, self.do_one_load)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
import traceback
|
import traceback
|
||||||
self.error = (as_unicode(e), traceback.format_exc())
|
self.error = (as_unicode(e), traceback.format_exc())
|
||||||
self.reject()
|
self.reject()
|
||||||
|
@ -120,7 +120,7 @@ class MyBlockingBusy(QDialog): # {{{
|
|||||||
self.msg.setText(self.msg_text.format(self.phases[self.current_phase],
|
self.msg.setText(self.msg_text.format(self.phases[self.current_phase],
|
||||||
percent))
|
percent))
|
||||||
self.do_one(id)
|
self.do_one(id)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
import traceback
|
import traceback
|
||||||
try:
|
try:
|
||||||
err = unicode(err)
|
err = unicode(err)
|
||||||
@ -653,7 +653,10 @@ class MetadataBulkDialog(ResizableDialog, Ui_MetadataBulkDialog):
|
|||||||
|
|
||||||
if self.destination_field_fm['is_multiple']:
|
if self.destination_field_fm['is_multiple']:
|
||||||
if self.comma_separated.isChecked():
|
if self.comma_separated.isChecked():
|
||||||
if dest == 'authors':
|
if dest == 'authors' or \
|
||||||
|
(self.destination_field_fm['is_custom'] and
|
||||||
|
self.destination_field_fm['datatype'] == 'text' and
|
||||||
|
self.destination_field_fm['display'].get('is_names', False)):
|
||||||
splitter = ' & '
|
splitter = ' & '
|
||||||
else:
|
else:
|
||||||
splitter = ','
|
splitter = ','
|
||||||
|
@ -76,7 +76,7 @@ class CoverFetcher(Thread): # {{{
|
|||||||
|
|
||||||
self.cover_data, self.errors = download_cover(mi,
|
self.cover_data, self.errors = download_cover(mi,
|
||||||
timeout=self.timeout)
|
timeout=self.timeout)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.exception = e
|
self.exception = e
|
||||||
self.traceback = traceback.format_exc()
|
self.traceback = traceback.format_exc()
|
||||||
print self.traceback
|
print self.traceback
|
||||||
@ -183,7 +183,7 @@ class MetadataSingleDialog(ResizableDialog, Ui_MetadataSingleDialog):
|
|||||||
try:
|
try:
|
||||||
cf = open(_file, "rb")
|
cf = open(_file, "rb")
|
||||||
cover = cf.read()
|
cover = cf.read()
|
||||||
except IOError, e:
|
except IOError as e:
|
||||||
d = error_dialog(self, _('Error reading file'),
|
d = error_dialog(self, _('Error reading file'),
|
||||||
_("<p>There was an error reading from file: <br /><b>") + _file + "</b></p><br />"+str(e))
|
_("<p>There was an error reading from file: <br /><b>") + _file + "</b></p><br />"+str(e))
|
||||||
d.exec_()
|
d.exec_()
|
||||||
|
@ -122,6 +122,8 @@ class TagEditor(QDialog, Ui_TagEditor):
|
|||||||
tags = unicode(self.add_tag_input.text()).split(',')
|
tags = unicode(self.add_tag_input.text()).split(',')
|
||||||
for tag in tags:
|
for tag in tags:
|
||||||
tag = tag.strip()
|
tag = tag.strip()
|
||||||
|
if not tag:
|
||||||
|
continue
|
||||||
for item in self.available_tags.findItems(tag, Qt.MatchFixedString):
|
for item in self.available_tags.findItems(tag, Qt.MatchFixedString):
|
||||||
self.available_tags.takeItem(self.available_tags.row(item))
|
self.available_tags.takeItem(self.available_tags.row(item))
|
||||||
if tag not in self.tags:
|
if tag not in self.tags:
|
||||||
|
@ -237,7 +237,7 @@ class %(classname)s(%(base_class)s):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
compile_recipe(src)
|
compile_recipe(src)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
error_dialog(self, _('Invalid input'),
|
error_dialog(self, _('Invalid input'),
|
||||||
_('<p>Could not create recipe. Error:<br>%s')%str(err)).exec_()
|
_('<p>Could not create recipe. Error:<br>%s')%str(err)).exec_()
|
||||||
return
|
return
|
||||||
@ -246,7 +246,7 @@ class %(classname)s(%(base_class)s):
|
|||||||
src = unicode(self.source_code.toPlainText())
|
src = unicode(self.source_code.toPlainText())
|
||||||
try:
|
try:
|
||||||
title = compile_recipe(src).title
|
title = compile_recipe(src).title
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
error_dialog(self, _('Invalid input'),
|
error_dialog(self, _('Invalid input'),
|
||||||
_('<p>Could not create recipe. Error:<br>%s')%str(err)).exec_()
|
_('<p>Could not create recipe. Error:<br>%s')%str(err)).exec_()
|
||||||
return
|
return
|
||||||
@ -333,7 +333,7 @@ class %(classname)s(%(base_class)s):
|
|||||||
try:
|
try:
|
||||||
profile = open(file, 'rb').read().decode('utf-8')
|
profile = open(file, 'rb').read().decode('utf-8')
|
||||||
title = compile_recipe(profile).title
|
title = compile_recipe(profile).title
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
error_dialog(self, _('Invalid input'),
|
error_dialog(self, _('Invalid input'),
|
||||||
_('<p>Could not create recipe. Error:<br>%s')%str(err)).exec_()
|
_('<p>Could not create recipe. Error:<br>%s')%str(err)).exec_()
|
||||||
return
|
return
|
||||||
|
@ -35,7 +35,7 @@ class Worker(Thread): # {{{
|
|||||||
try:
|
try:
|
||||||
br = browser()
|
br = browser()
|
||||||
br.retrieve(self.url, self.fpath, self.callback)
|
br.retrieve(self.url, self.fpath, self.callback)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.err = as_unicode(e)
|
self.err = as_unicode(e)
|
||||||
import traceback
|
import traceback
|
||||||
self.tb = traceback.format_exc()
|
self.tb = traceback.format_exc()
|
||||||
|
@ -116,7 +116,7 @@ class Emailer(Thread): # {{{
|
|||||||
try:
|
try:
|
||||||
self.sendmail(job)
|
self.sendmail(job)
|
||||||
break
|
break
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
if not self._run:
|
if not self._run:
|
||||||
return
|
return
|
||||||
import traceback
|
import traceback
|
||||||
|
@ -398,7 +398,7 @@ class CcTemplateDelegate(QStyledItemDelegate): # {{{
|
|||||||
val = unicode(editor.textbox.toPlainText())
|
val = unicode(editor.textbox.toPlainText())
|
||||||
try:
|
try:
|
||||||
validation_formatter.validate(val)
|
validation_formatter.validate(val)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
error_dialog(self.parent(), _('Invalid template'),
|
error_dialog(self.parent(), _('Invalid template'),
|
||||||
'<p>'+_('The template %s is invalid:')%val + \
|
'<p>'+_('The template %s is invalid:')%val + \
|
||||||
'<br>'+str(err), show=True)
|
'<br>'+str(err), show=True)
|
||||||
|
@ -640,18 +640,18 @@ class BooksModel(QAbstractTableModel): # {{{
|
|||||||
return self.bool_yes_icon
|
return self.bool_yes_icon
|
||||||
return self.bool_blank_icon
|
return self.bool_blank_icon
|
||||||
|
|
||||||
def text_type(r, mult=False, idx=-1):
|
def text_type(r, mult=None, idx=-1):
|
||||||
text = self.db.data[r][idx]
|
text = self.db.data[r][idx]
|
||||||
if text and mult:
|
if text and mult is not None:
|
||||||
return QVariant(', '.join(sorted(text.split('|'),key=sort_key)))
|
if mult:
|
||||||
|
return QVariant(u' & '.join(text.split('|')))
|
||||||
|
return QVariant(u', '.join(sorted(text.split('|'),key=sort_key)))
|
||||||
return QVariant(text)
|
return QVariant(text)
|
||||||
|
|
||||||
def decorated_text_type(r, mult=False, idx=-1):
|
def decorated_text_type(r, idx=-1):
|
||||||
text = self.db.data[r][idx]
|
text = self.db.data[r][idx]
|
||||||
if force_to_bool(text) is not None:
|
if force_to_bool(text) is not None:
|
||||||
return None
|
return None
|
||||||
if text and mult:
|
|
||||||
return QVariant(', '.join(sorted(text.split('|'),key=sort_key)))
|
|
||||||
return QVariant(text)
|
return QVariant(text)
|
||||||
|
|
||||||
def number_type(r, idx=-1):
|
def number_type(r, idx=-1):
|
||||||
@ -659,7 +659,7 @@ class BooksModel(QAbstractTableModel): # {{{
|
|||||||
|
|
||||||
self.dc = {
|
self.dc = {
|
||||||
'title' : functools.partial(text_type,
|
'title' : functools.partial(text_type,
|
||||||
idx=self.db.field_metadata['title']['rec_index'], mult=False),
|
idx=self.db.field_metadata['title']['rec_index'], mult=None),
|
||||||
'authors' : functools.partial(authors,
|
'authors' : functools.partial(authors,
|
||||||
idx=self.db.field_metadata['authors']['rec_index']),
|
idx=self.db.field_metadata['authors']['rec_index']),
|
||||||
'size' : functools.partial(size,
|
'size' : functools.partial(size,
|
||||||
@ -671,14 +671,14 @@ class BooksModel(QAbstractTableModel): # {{{
|
|||||||
'rating' : functools.partial(rating_type,
|
'rating' : functools.partial(rating_type,
|
||||||
idx=self.db.field_metadata['rating']['rec_index']),
|
idx=self.db.field_metadata['rating']['rec_index']),
|
||||||
'publisher': functools.partial(text_type,
|
'publisher': functools.partial(text_type,
|
||||||
idx=self.db.field_metadata['publisher']['rec_index'], mult=False),
|
idx=self.db.field_metadata['publisher']['rec_index'], mult=None),
|
||||||
'tags' : functools.partial(tags,
|
'tags' : functools.partial(tags,
|
||||||
idx=self.db.field_metadata['tags']['rec_index']),
|
idx=self.db.field_metadata['tags']['rec_index']),
|
||||||
'series' : functools.partial(series_type,
|
'series' : functools.partial(series_type,
|
||||||
idx=self.db.field_metadata['series']['rec_index'],
|
idx=self.db.field_metadata['series']['rec_index'],
|
||||||
siix=self.db.field_metadata['series_index']['rec_index']),
|
siix=self.db.field_metadata['series_index']['rec_index']),
|
||||||
'ondevice' : functools.partial(text_type,
|
'ondevice' : functools.partial(text_type,
|
||||||
idx=self.db.field_metadata['ondevice']['rec_index'], mult=False),
|
idx=self.db.field_metadata['ondevice']['rec_index'], mult=None),
|
||||||
}
|
}
|
||||||
|
|
||||||
self.dc_decorator = {
|
self.dc_decorator = {
|
||||||
@ -692,11 +692,12 @@ class BooksModel(QAbstractTableModel): # {{{
|
|||||||
datatype = self.custom_columns[col]['datatype']
|
datatype = self.custom_columns[col]['datatype']
|
||||||
if datatype in ('text', 'comments', 'composite', 'enumeration'):
|
if datatype in ('text', 'comments', 'composite', 'enumeration'):
|
||||||
mult=self.custom_columns[col]['is_multiple']
|
mult=self.custom_columns[col]['is_multiple']
|
||||||
|
if mult is not None:
|
||||||
|
mult = self.custom_columns[col]['display'].get('is_names', False)
|
||||||
self.dc[col] = functools.partial(text_type, idx=idx, mult=mult)
|
self.dc[col] = functools.partial(text_type, idx=idx, mult=mult)
|
||||||
if datatype in ['text', 'composite', 'enumeration'] and not mult:
|
if datatype in ['text', 'composite', 'enumeration'] and not mult:
|
||||||
if self.custom_columns[col]['display'].get('use_decorations', False):
|
if self.custom_columns[col]['display'].get('use_decorations', False):
|
||||||
self.dc[col] = functools.partial(decorated_text_type,
|
self.dc[col] = functools.partial(decorated_text_type, idx=idx)
|
||||||
idx=idx, mult=mult)
|
|
||||||
self.dc_decorator[col] = functools.partial(
|
self.dc_decorator[col] = functools.partial(
|
||||||
bool_type_decorator, idx=idx,
|
bool_type_decorator, idx=idx,
|
||||||
bool_cols_are_tristate=
|
bool_cols_are_tristate=
|
||||||
|
@ -78,6 +78,7 @@ class BooksView(QTableView): # {{{
|
|||||||
self.pubdate_delegate = PubDateDelegate(self)
|
self.pubdate_delegate = PubDateDelegate(self)
|
||||||
self.tags_delegate = CompleteDelegate(self, ',', 'all_tags')
|
self.tags_delegate = CompleteDelegate(self, ',', 'all_tags')
|
||||||
self.authors_delegate = CompleteDelegate(self, '&', 'all_author_names', True)
|
self.authors_delegate = CompleteDelegate(self, '&', 'all_author_names', True)
|
||||||
|
self.cc_names_delegate = CompleteDelegate(self, '&', 'all_custom', True)
|
||||||
self.series_delegate = TextDelegate(self)
|
self.series_delegate = TextDelegate(self)
|
||||||
self.publisher_delegate = TextDelegate(self)
|
self.publisher_delegate = TextDelegate(self)
|
||||||
self.text_delegate = TextDelegate(self)
|
self.text_delegate = TextDelegate(self)
|
||||||
@ -410,6 +411,7 @@ class BooksView(QTableView): # {{{
|
|||||||
self.save_state()
|
self.save_state()
|
||||||
self._model.set_database(db)
|
self._model.set_database(db)
|
||||||
self.tags_delegate.set_database(db)
|
self.tags_delegate.set_database(db)
|
||||||
|
self.cc_names_delegate.set_database(db)
|
||||||
self.authors_delegate.set_database(db)
|
self.authors_delegate.set_database(db)
|
||||||
self.series_delegate.set_auto_complete_function(db.all_series)
|
self.series_delegate.set_auto_complete_function(db.all_series)
|
||||||
self.publisher_delegate.set_auto_complete_function(db.all_publishers)
|
self.publisher_delegate.set_auto_complete_function(db.all_publishers)
|
||||||
@ -431,12 +433,17 @@ class BooksView(QTableView): # {{{
|
|||||||
self.setItemDelegateForColumn(cm.index(colhead), delegate)
|
self.setItemDelegateForColumn(cm.index(colhead), delegate)
|
||||||
elif cc['datatype'] == 'comments':
|
elif cc['datatype'] == 'comments':
|
||||||
self.setItemDelegateForColumn(cm.index(colhead), self.cc_comments_delegate)
|
self.setItemDelegateForColumn(cm.index(colhead), self.cc_comments_delegate)
|
||||||
elif cc['datatype'] in ('text', 'series'):
|
elif cc['datatype'] == 'text':
|
||||||
if cc['is_multiple']:
|
if cc['is_multiple']:
|
||||||
self.setItemDelegateForColumn(cm.index(colhead), self.tags_delegate)
|
if cc['display'].get('is_names', False):
|
||||||
|
self.setItemDelegateForColumn(cm.index(colhead),
|
||||||
|
self.cc_names_delegate)
|
||||||
|
else:
|
||||||
|
self.setItemDelegateForColumn(cm.index(colhead),
|
||||||
|
self.tags_delegate)
|
||||||
else:
|
else:
|
||||||
self.setItemDelegateForColumn(cm.index(colhead), self.cc_text_delegate)
|
self.setItemDelegateForColumn(cm.index(colhead), self.cc_text_delegate)
|
||||||
elif cc['datatype'] in ('int', 'float'):
|
elif cc['datatype'] in ('series', 'int', 'float'):
|
||||||
self.setItemDelegateForColumn(cm.index(colhead), self.cc_text_delegate)
|
self.setItemDelegateForColumn(cm.index(colhead), self.cc_text_delegate)
|
||||||
elif cc['datatype'] == 'bool':
|
elif cc['datatype'] == 'bool':
|
||||||
self.setItemDelegateForColumn(cm.index(colhead), self.cc_bool_delegate)
|
self.setItemDelegateForColumn(cm.index(colhead), self.cc_bool_delegate)
|
||||||
|
@ -35,7 +35,7 @@ class RenderWorker(QThread):
|
|||||||
self.stream = None
|
self.stream = None
|
||||||
if self.aborted:
|
if self.aborted:
|
||||||
self.lrf = None
|
self.lrf = None
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.lrf, self.stream = None, None
|
self.lrf, self.stream = None, None
|
||||||
self.exception = err
|
self.exception = err
|
||||||
self.formatted_traceback = traceback.format_exc()
|
self.formatted_traceback = traceback.format_exc()
|
||||||
|
@ -399,7 +399,7 @@ def main(args=sys.argv):
|
|||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
try:
|
try:
|
||||||
sys.exit(main())
|
sys.exit(main())
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
if not iswindows: raise
|
if not iswindows: raise
|
||||||
tb = traceback.format_exc()
|
tb = traceback.format_exc()
|
||||||
from PyQt4.QtGui import QErrorMessage
|
from PyQt4.QtGui import QErrorMessage
|
||||||
|
@ -656,7 +656,7 @@ class Cover(ImageView): # {{{
|
|||||||
try:
|
try:
|
||||||
cf = open(_file, "rb")
|
cf = open(_file, "rb")
|
||||||
cover = cf.read()
|
cover = cf.read()
|
||||||
except IOError, e:
|
except IOError as e:
|
||||||
d = error_dialog(self, _('Error reading file'),
|
d = error_dialog(self, _('Error reading file'),
|
||||||
_("<p>There was an error reading from file: <br /><b>")
|
_("<p>There was an error reading from file: <br /><b>")
|
||||||
+ _file + "</b></p><br />"+str(e))
|
+ _file + "</b></p><br />"+str(e))
|
||||||
|
@ -88,7 +88,7 @@ class DownloadMetadata(Thread):
|
|||||||
def run(self):
|
def run(self):
|
||||||
try:
|
try:
|
||||||
self._run()
|
self._run()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.exception = e
|
self.exception = e
|
||||||
self.tb = traceback.format_exc()
|
self.tb = traceback.format_exc()
|
||||||
|
|
||||||
|
@ -303,7 +303,7 @@ class MetadataSingleDialogBase(ResizableDialog):
|
|||||||
return False
|
return False
|
||||||
self.books_to_refresh |= getattr(widget, 'books_to_refresh',
|
self.books_to_refresh |= getattr(widget, 'books_to_refresh',
|
||||||
set([]))
|
set([]))
|
||||||
except IOError, err:
|
except IOError as err:
|
||||||
if err.errno == 13: # Permission denied
|
if err.errno == 13: # Permission denied
|
||||||
import traceback
|
import traceback
|
||||||
fname = err.filename if err.filename else 'file'
|
fname = err.filename if err.filename else 'file'
|
||||||
|
@ -34,7 +34,7 @@ class DBUSNotifier(Notifier):
|
|||||||
import dbus
|
import dbus
|
||||||
self.dbus = dbus
|
self.dbus = dbus
|
||||||
self._notify = dbus.Interface(dbus.SessionBus().get_object(server, path), interface)
|
self._notify = dbus.Interface(dbus.SessionBus().get_object(server, path), interface)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.ok = False
|
self.ok = False
|
||||||
self.err = str(err)
|
self.err = str(err)
|
||||||
|
|
||||||
|
@ -5,6 +5,8 @@ __license__ = 'GPL v3'
|
|||||||
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||||
__docformat__ = 'restructuredtext en'
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
|
import importlib
|
||||||
|
|
||||||
from PyQt4.Qt import QIcon, Qt, QStringListModel, QVariant
|
from PyQt4.Qt import QIcon, Qt, QStringListModel, QVariant
|
||||||
|
|
||||||
from calibre.gui2.preferences import ConfigWidgetBase, test_widget, AbortCommit
|
from calibre.gui2.preferences import ConfigWidgetBase, test_widget, AbortCommit
|
||||||
@ -104,8 +106,8 @@ class OutputOptions(Base):
|
|||||||
for plugin in output_format_plugins():
|
for plugin in output_format_plugins():
|
||||||
name = plugin.name.lower().replace(' ', '_')
|
name = plugin.name.lower().replace(' ', '_')
|
||||||
try:
|
try:
|
||||||
output_widget = __import__('calibre.gui2.convert.'+name,
|
output_widget = importlib.import_module(
|
||||||
fromlist=[1])
|
'calibre.gui2.convert.'+name)
|
||||||
pw = output_widget.PluginWidget
|
pw = output_widget.PluginWidget
|
||||||
self.conversion_widgets.append(pw)
|
self.conversion_widgets.append(pw)
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
@ -63,7 +63,7 @@ class CreateCustomColumn(QDialog, Ui_QCreateCustomColumn):
|
|||||||
for col, name in [('isbn', _('ISBN')), ('formats', _('Formats')),
|
for col, name in [('isbn', _('ISBN')), ('formats', _('Formats')),
|
||||||
('last_modified', _('Modified Date')), ('yesno', _('Yes/No')),
|
('last_modified', _('Modified Date')), ('yesno', _('Yes/No')),
|
||||||
('tags', _('Tags')), ('series', _('Series')), ('rating',
|
('tags', _('Tags')), ('series', _('Series')), ('rating',
|
||||||
_('Rating'))]:
|
_('Rating')), ('people', _("People's names"))]:
|
||||||
text += ' <a href="col:%s">%s</a>,'%(col, name)
|
text += ' <a href="col:%s">%s</a>,'%(col, name)
|
||||||
text = text[:-1]
|
text = text[:-1]
|
||||||
self.shortcuts.setText(text)
|
self.shortcuts.setText(text)
|
||||||
@ -125,6 +125,8 @@ class CreateCustomColumn(QDialog, Ui_QCreateCustomColumn):
|
|||||||
self.datatype_changed()
|
self.datatype_changed()
|
||||||
if ct in ['text', 'composite', 'enumeration']:
|
if ct in ['text', 'composite', 'enumeration']:
|
||||||
self.use_decorations.setChecked(c['display'].get('use_decorations', False))
|
self.use_decorations.setChecked(c['display'].get('use_decorations', False))
|
||||||
|
elif ct == '*text':
|
||||||
|
self.is_names.setChecked(c['display'].get('is_names', False))
|
||||||
self.exec_()
|
self.exec_()
|
||||||
|
|
||||||
def shortcut_activated(self, url):
|
def shortcut_activated(self, url):
|
||||||
@ -134,6 +136,7 @@ class CreateCustomColumn(QDialog, Ui_QCreateCustomColumn):
|
|||||||
'tags' : 1,
|
'tags' : 1,
|
||||||
'series': 3,
|
'series': 3,
|
||||||
'rating': 8,
|
'rating': 8,
|
||||||
|
'people': 1,
|
||||||
}.get(which, 10))
|
}.get(which, 10))
|
||||||
self.column_name_box.setText(which)
|
self.column_name_box.setText(which)
|
||||||
self.column_heading_box.setText({
|
self.column_heading_box.setText({
|
||||||
@ -143,7 +146,9 @@ class CreateCustomColumn(QDialog, Ui_QCreateCustomColumn):
|
|||||||
'tags': _('My Tags'),
|
'tags': _('My Tags'),
|
||||||
'series': _('My Series'),
|
'series': _('My Series'),
|
||||||
'rating': _('My Rating'),
|
'rating': _('My Rating'),
|
||||||
'last_modified':_('Modified Date')}[which])
|
'last_modified':_('Modified Date'),
|
||||||
|
'people': _('People')}[which])
|
||||||
|
self.is_names.setChecked(which == 'people')
|
||||||
if self.composite_box.isVisible():
|
if self.composite_box.isVisible():
|
||||||
self.composite_box.setText(
|
self.composite_box.setText(
|
||||||
{
|
{
|
||||||
@ -153,7 +158,6 @@ class CreateCustomColumn(QDialog, Ui_QCreateCustomColumn):
|
|||||||
}[which])
|
}[which])
|
||||||
self.composite_sort_by.setCurrentIndex(2 if which == 'last_modified' else 0)
|
self.composite_sort_by.setCurrentIndex(2 if which == 'last_modified' else 0)
|
||||||
|
|
||||||
|
|
||||||
def datatype_changed(self, *args):
|
def datatype_changed(self, *args):
|
||||||
try:
|
try:
|
||||||
col_type = self.column_types[self.column_type_box.currentIndex()]['datatype']
|
col_type = self.column_types[self.column_type_box.currentIndex()]['datatype']
|
||||||
@ -167,6 +171,7 @@ class CreateCustomColumn(QDialog, Ui_QCreateCustomColumn):
|
|||||||
for x in ('box', 'default_label', 'label'):
|
for x in ('box', 'default_label', 'label'):
|
||||||
getattr(self, 'enum_'+x).setVisible(col_type == 'enumeration')
|
getattr(self, 'enum_'+x).setVisible(col_type == 'enumeration')
|
||||||
self.use_decorations.setVisible(col_type in ['text', 'composite', 'enumeration'])
|
self.use_decorations.setVisible(col_type in ['text', 'composite', 'enumeration'])
|
||||||
|
self.is_names.setVisible(col_type == '*text')
|
||||||
|
|
||||||
def accept(self):
|
def accept(self):
|
||||||
col = unicode(self.column_name_box.text()).strip()
|
col = unicode(self.column_name_box.text()).strip()
|
||||||
@ -241,6 +246,8 @@ class CreateCustomColumn(QDialog, Ui_QCreateCustomColumn):
|
|||||||
return self.simple_error('', _('The value "{0}" is in the '
|
return self.simple_error('', _('The value "{0}" is in the '
|
||||||
'list more than once').format(l[i]))
|
'list more than once').format(l[i]))
|
||||||
display_dict = {'enum_values': l}
|
display_dict = {'enum_values': l}
|
||||||
|
elif col_type == 'text' and is_multiple:
|
||||||
|
display_dict = {'is_names': self.is_names.isChecked()}
|
||||||
|
|
||||||
if col_type in ['text', 'composite', 'enumeration']:
|
if col_type in ['text', 'composite', 'enumeration']:
|
||||||
display_dict['use_decorations'] = self.use_decorations.checkState()
|
display_dict['use_decorations'] = self.use_decorations.checkState()
|
||||||
|
@ -9,7 +9,7 @@
|
|||||||
<rect>
|
<rect>
|
||||||
<x>0</x>
|
<x>0</x>
|
||||||
<y>0</y>
|
<y>0</y>
|
||||||
<width>603</width>
|
<width>831</width>
|
||||||
<height>344</height>
|
<height>344</height>
|
||||||
</rect>
|
</rect>
|
||||||
</property>
|
</property>
|
||||||
@ -110,27 +110,37 @@
|
|||||||
</item>
|
</item>
|
||||||
<item>
|
<item>
|
||||||
<widget class="QCheckBox" name="use_decorations">
|
<widget class="QCheckBox" name="use_decorations">
|
||||||
<property name="text">
|
|
||||||
<string>Show checkmarks</string>
|
|
||||||
</property>
|
|
||||||
<property name="toolTip">
|
<property name="toolTip">
|
||||||
<string>Show check marks in the GUI. Values of 'yes', 'checked', and 'true'
|
<string>Show check marks in the GUI. Values of 'yes', 'checked', and 'true'
|
||||||
will show a green check. Values of 'no', 'unchecked', and 'false' will show a red X.
|
will show a green check. Values of 'no', 'unchecked', and 'false' will show a red X.
|
||||||
Everything else will show nothing.</string>
|
Everything else will show nothing.</string>
|
||||||
</property>
|
</property>
|
||||||
|
<property name="text">
|
||||||
|
<string>Show checkmarks</string>
|
||||||
|
</property>
|
||||||
|
</widget>
|
||||||
|
</item>
|
||||||
|
<item>
|
||||||
|
<widget class="QCheckBox" name="is_names">
|
||||||
|
<property name="toolTip">
|
||||||
|
<string>Check this box if this column contains names, like the authors column.</string>
|
||||||
|
</property>
|
||||||
|
<property name="text">
|
||||||
|
<string>Contains names</string>
|
||||||
|
</property>
|
||||||
</widget>
|
</widget>
|
||||||
</item>
|
</item>
|
||||||
<item>
|
<item>
|
||||||
<spacer name="horizontalSpacer_27">
|
<spacer name="horizontalSpacer_27">
|
||||||
<property name="orientation">
|
|
||||||
<enum>Qt::Horizontal</enum>
|
|
||||||
</property>
|
|
||||||
<property name="sizePolicy">
|
<property name="sizePolicy">
|
||||||
<sizepolicy hsizetype="Expanding" vsizetype="Fixed">
|
<sizepolicy hsizetype="Expanding" vsizetype="Fixed">
|
||||||
<horstretch>10</horstretch>
|
<horstretch>10</horstretch>
|
||||||
<verstretch>0</verstretch>
|
<verstretch>0</verstretch>
|
||||||
</sizepolicy>
|
</sizepolicy>
|
||||||
</property>
|
</property>
|
||||||
|
<property name="orientation">
|
||||||
|
<enum>Qt::Horizontal</enum>
|
||||||
|
</property>
|
||||||
<property name="sizeHint" stdset="0">
|
<property name="sizeHint" stdset="0">
|
||||||
<size>
|
<size>
|
||||||
<width>20</width>
|
<width>20</width>
|
||||||
@ -241,25 +251,25 @@ Everything else will show nothing.</string>
|
|||||||
</item>
|
</item>
|
||||||
<item>
|
<item>
|
||||||
<widget class="QCheckBox" name="composite_make_category">
|
<widget class="QCheckBox" name="composite_make_category">
|
||||||
<property name="text">
|
|
||||||
<string>Show in tags browser</string>
|
|
||||||
</property>
|
|
||||||
<property name="toolTip">
|
<property name="toolTip">
|
||||||
<string>If checked, this column will appear in the tags browser as a category</string>
|
<string>If checked, this column will appear in the tags browser as a category</string>
|
||||||
</property>
|
</property>
|
||||||
|
<property name="text">
|
||||||
|
<string>Show in tags browser</string>
|
||||||
|
</property>
|
||||||
</widget>
|
</widget>
|
||||||
</item>
|
</item>
|
||||||
<item>
|
<item>
|
||||||
<spacer name="horizontalSpacer_24">
|
<spacer name="horizontalSpacer_24">
|
||||||
<property name="orientation">
|
|
||||||
<enum>Qt::Horizontal</enum>
|
|
||||||
</property>
|
|
||||||
<property name="sizePolicy">
|
<property name="sizePolicy">
|
||||||
<sizepolicy hsizetype="Expanding" vsizetype="Fixed">
|
<sizepolicy hsizetype="Expanding" vsizetype="Fixed">
|
||||||
<horstretch>10</horstretch>
|
<horstretch>10</horstretch>
|
||||||
<verstretch>0</verstretch>
|
<verstretch>0</verstretch>
|
||||||
</sizepolicy>
|
</sizepolicy>
|
||||||
</property>
|
</property>
|
||||||
|
<property name="orientation">
|
||||||
|
<enum>Qt::Horizontal</enum>
|
||||||
|
</property>
|
||||||
<property name="sizeHint" stdset="0">
|
<property name="sizeHint" stdset="0">
|
||||||
<size>
|
<size>
|
||||||
<width>20</width>
|
<width>20</width>
|
||||||
|
@ -64,8 +64,9 @@ class ConfigWidget(ConfigWidgetBase, Ui_Form):
|
|||||||
r('tags_browser_collapse_at', gprefs)
|
r('tags_browser_collapse_at', gprefs)
|
||||||
|
|
||||||
choices = set([k for k in db.field_metadata.all_field_keys()
|
choices = set([k for k in db.field_metadata.all_field_keys()
|
||||||
if db.field_metadata[k]['is_category'] and
|
if db.field_metadata[k]['is_category'] and
|
||||||
db.field_metadata[k]['datatype'] in ['text', 'series', 'enumeration']])
|
(db.field_metadata[k]['datatype'] in ['text', 'series', 'enumeration']) and
|
||||||
|
not db.field_metadata[k]['display'].get('is_names', False)])
|
||||||
choices -= set(['authors', 'publisher', 'formats', 'news', 'identifiers'])
|
choices -= set(['authors', 'publisher', 'formats', 'news', 'identifiers'])
|
||||||
choices |= set(['search'])
|
choices |= set(['search'])
|
||||||
self.opt_categories_using_hierarchy.update_items_cache(choices)
|
self.opt_categories_using_hierarchy.update_items_cache(choices)
|
||||||
|
@ -251,7 +251,7 @@ class ConfigWidget(ConfigWidgetBase, Ui_Form):
|
|||||||
if d != 0:
|
if d != 0:
|
||||||
try:
|
try:
|
||||||
validation_formatter.validate(s)
|
validation_formatter.validate(s)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
error_dialog(self, _('Invalid template'),
|
error_dialog(self, _('Invalid template'),
|
||||||
'<p>'+_('The template %s is invalid:')%s + \
|
'<p>'+_('The template %s is invalid:')%s + \
|
||||||
'<br>'+str(err), show=True)
|
'<br>'+str(err), show=True)
|
||||||
|
@ -57,7 +57,7 @@ class SaveTemplate(QWidget, Ui_Form):
|
|||||||
return question_dialog(self, _('Constant template'),
|
return question_dialog(self, _('Constant template'),
|
||||||
_('The template contains no {fields}, so all '
|
_('The template contains no {fields}, so all '
|
||||||
'books will have the same name. Is this OK?'))
|
'books will have the same name. Is this OK?'))
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
error_dialog(self, _('Invalid template'),
|
error_dialog(self, _('Invalid template'),
|
||||||
'<p>'+_('The template %s is invalid:')%tmpl + \
|
'<p>'+_('The template %s is invalid:')%tmpl + \
|
||||||
'<br>'+str(err), show=True)
|
'<br>'+str(err), show=True)
|
||||||
|
@ -658,8 +658,7 @@ class TagTreeItem(object): # {{{
|
|||||||
|
|
||||||
def tag_data(self, role):
|
def tag_data(self, role):
|
||||||
tag = self.tag
|
tag = self.tag
|
||||||
if tag.category == 'authors' and \
|
if tag.use_sort_as_name:
|
||||||
tweaks['categories_use_field_for_author_name'] == 'author_sort':
|
|
||||||
name = tag.sort
|
name = tag.sort
|
||||||
tt_author = True
|
tt_author = True
|
||||||
else:
|
else:
|
||||||
@ -1275,6 +1274,7 @@ class TagsModel(QAbstractItemModel): # {{{
|
|||||||
if len(components) == 0 or '.'.join(components) != tag.original_name:
|
if len(components) == 0 or '.'.join(components) != tag.original_name:
|
||||||
components = [tag.original_name]
|
components = [tag.original_name]
|
||||||
if (not tag.is_hierarchical) and (in_uc or
|
if (not tag.is_hierarchical) and (in_uc or
|
||||||
|
(fm['is_custom'] and fm['display'].get('is_names', False)) or
|
||||||
key in ['authors', 'publisher', 'news', 'formats', 'rating'] or
|
key in ['authors', 'publisher', 'news', 'formats', 'rating'] or
|
||||||
key not in self.db.prefs.get('categories_using_hierarchy', []) or
|
key not in self.db.prefs.get('categories_using_hierarchy', []) or
|
||||||
len(components) == 1):
|
len(components) == 1):
|
||||||
|
@ -36,7 +36,7 @@ class Lookup(QThread):
|
|||||||
def run(self):
|
def run(self):
|
||||||
try:
|
try:
|
||||||
self.define()
|
self.define()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
import traceback
|
import traceback
|
||||||
self.exception = e
|
self.exception = e
|
||||||
self.traceback = traceback.format_exc()
|
self.traceback = traceback.format_exc()
|
||||||
|
@ -97,7 +97,7 @@ class FilenamePattern(QWidget, Ui_Form):
|
|||||||
def do_test(self):
|
def do_test(self):
|
||||||
try:
|
try:
|
||||||
pat = self.pattern()
|
pat = self.pattern()
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
error_dialog(self, _('Invalid regular expression'),
|
error_dialog(self, _('Invalid regular expression'),
|
||||||
_('Invalid regular expression: %s')%err).exec_()
|
_('Invalid regular expression: %s')%err).exec_()
|
||||||
return
|
return
|
||||||
|
@ -565,7 +565,7 @@ def move_library(oldloc, newloc, parent, callback_on_complete):
|
|||||||
# Try to load existing library at new location
|
# Try to load existing library at new location
|
||||||
try:
|
try:
|
||||||
LibraryDatabase2(newloc)
|
LibraryDatabase2(newloc)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
det = traceback.format_exc()
|
det = traceback.format_exc()
|
||||||
error_dialog(parent, _('Invalid database'),
|
error_dialog(parent, _('Invalid database'),
|
||||||
_('<p>An invalid library already exists at '
|
_('<p>An invalid library already exists at '
|
||||||
@ -577,7 +577,7 @@ def move_library(oldloc, newloc, parent, callback_on_complete):
|
|||||||
else:
|
else:
|
||||||
callback(newloc)
|
callback(newloc)
|
||||||
return
|
return
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
det = traceback.format_exc()
|
det = traceback.format_exc()
|
||||||
error_dialog(parent, _('Could not move library'),
|
error_dialog(parent, _('Could not move library'),
|
||||||
unicode(err), det, show=True)
|
unicode(err), det, show=True)
|
||||||
|
@ -15,7 +15,7 @@ from calibre.utils.config import tweaks, prefs
|
|||||||
from calibre.utils.date import parse_date, now, UNDEFINED_DATE
|
from calibre.utils.date import parse_date, now, UNDEFINED_DATE
|
||||||
from calibre.utils.search_query_parser import SearchQueryParser
|
from calibre.utils.search_query_parser import SearchQueryParser
|
||||||
from calibre.utils.pyparsing import ParseException
|
from calibre.utils.pyparsing import ParseException
|
||||||
from calibre.ebooks.metadata import title_sort
|
from calibre.ebooks.metadata import title_sort, author_to_author_sort
|
||||||
from calibre.ebooks.metadata.opf2 import metadata_to_opf
|
from calibre.ebooks.metadata.opf2 import metadata_to_opf
|
||||||
from calibre import prints
|
from calibre import prints
|
||||||
|
|
||||||
@ -1023,7 +1023,11 @@ class SortKeyGenerator(object):
|
|||||||
if val:
|
if val:
|
||||||
sep = fm['is_multiple']
|
sep = fm['is_multiple']
|
||||||
if sep:
|
if sep:
|
||||||
val = sep.join(sorted(val.split(sep),
|
if fm['display'].get('is_names', False):
|
||||||
|
val = sep.join(
|
||||||
|
[author_to_author_sort(v) for v in val.split(sep)])
|
||||||
|
else:
|
||||||
|
val = sep.join(sorted(val.split(sep),
|
||||||
key=self.string_sort_key))
|
key=self.string_sort_key))
|
||||||
val = self.string_sort_key(val)
|
val = self.string_sort_key(val)
|
||||||
|
|
||||||
|
@ -117,7 +117,7 @@ class CustomColumns(object):
|
|||||||
if x is None:
|
if x is None:
|
||||||
return []
|
return []
|
||||||
if isinstance(x, (str, unicode, bytes)):
|
if isinstance(x, (str, unicode, bytes)):
|
||||||
x = x.split(',')
|
x = x.split('&' if d['display'].get('is_names', False) else',')
|
||||||
x = [y.strip() for y in x if y.strip()]
|
x = [y.strip() for y in x if y.strip()]
|
||||||
x = [y.decode(preferred_encoding, 'replace') if not isinstance(y,
|
x = [y.decode(preferred_encoding, 'replace') if not isinstance(y,
|
||||||
unicode) else y for y in x]
|
unicode) else y for y in x]
|
||||||
@ -482,8 +482,11 @@ class CustomColumns(object):
|
|||||||
set_val = val if data['is_multiple'] else [val]
|
set_val = val if data['is_multiple'] else [val]
|
||||||
existing = getter()
|
existing = getter()
|
||||||
if not existing:
|
if not existing:
|
||||||
existing = []
|
existing = set([])
|
||||||
for x in set(set_val) - set(existing):
|
else:
|
||||||
|
existing = set(existing)
|
||||||
|
# preserve the order in set_val
|
||||||
|
for x in [v for v in set_val if v not in existing]:
|
||||||
# normalized types are text and ratings, so we can do this check
|
# normalized types are text and ratings, so we can do this check
|
||||||
# to see if we need to re-add the value
|
# to see if we need to re-add the value
|
||||||
if not x:
|
if not x:
|
||||||
|
@ -48,7 +48,7 @@ class Tag(object):
|
|||||||
|
|
||||||
def __init__(self, name, id=None, count=0, state=0, avg=0, sort=None,
|
def __init__(self, name, id=None, count=0, state=0, avg=0, sort=None,
|
||||||
tooltip=None, icon=None, category=None, id_set=None,
|
tooltip=None, icon=None, category=None, id_set=None,
|
||||||
is_editable = True, is_searchable=True):
|
is_editable = True, is_searchable=True, use_sort_as_name=False):
|
||||||
self.name = self.original_name = name
|
self.name = self.original_name = name
|
||||||
self.id = id
|
self.id = id
|
||||||
self.count = count
|
self.count = count
|
||||||
@ -59,6 +59,7 @@ class Tag(object):
|
|||||||
self.id_set = id_set if id_set is not None else set([])
|
self.id_set = id_set if id_set is not None else set([])
|
||||||
self.avg_rating = avg/2.0 if avg is not None else 0
|
self.avg_rating = avg/2.0 if avg is not None else 0
|
||||||
self.sort = sort
|
self.sort = sort
|
||||||
|
self.use_sort_as_name = use_sort_as_name
|
||||||
if self.avg_rating > 0:
|
if self.avg_rating > 0:
|
||||||
if tooltip:
|
if tooltip:
|
||||||
tooltip = tooltip + ': '
|
tooltip = tooltip + ': '
|
||||||
@ -1323,6 +1324,11 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
for l in list:
|
for l in list:
|
||||||
(id, val) = (l[0], l[1])
|
(id, val) = (l[0], l[1])
|
||||||
tids[category][val] = (id, '{0:05.2f}'.format(val))
|
tids[category][val] = (id, '{0:05.2f}'.format(val))
|
||||||
|
elif cat['datatype'] == 'text' and cat['is_multiple'] and \
|
||||||
|
cat['display'].get('is_names', False):
|
||||||
|
for l in list:
|
||||||
|
(id, val) = (l[0], l[1])
|
||||||
|
tids[category][val] = (id, author_to_author_sort(val))
|
||||||
else:
|
else:
|
||||||
for l in list:
|
for l in list:
|
||||||
(id, val) = (l[0], l[1])
|
(id, val) = (l[0], l[1])
|
||||||
@ -1480,11 +1486,20 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
reverse=True
|
reverse=True
|
||||||
items.sort(key=kf, reverse=reverse)
|
items.sort(key=kf, reverse=reverse)
|
||||||
|
|
||||||
|
if tweaks['categories_use_field_for_author_name'] == 'author_sort' and\
|
||||||
|
(category == 'authors' or
|
||||||
|
(cat['display'].get('is_names', False) and
|
||||||
|
cat['is_custom'] and cat['is_multiple'] and
|
||||||
|
cat['datatype'] == 'text')):
|
||||||
|
use_sort_as_name = True
|
||||||
|
else:
|
||||||
|
use_sort_as_name = False
|
||||||
is_editable = category not in ['news', 'rating']
|
is_editable = category not in ['news', 'rating']
|
||||||
categories[category] = [tag_class(formatter(r.n), count=r.c, id=r.id,
|
categories[category] = [tag_class(formatter(r.n), count=r.c, id=r.id,
|
||||||
avg=avgr(r), sort=r.s, icon=icon,
|
avg=avgr(r), sort=r.s, icon=icon,
|
||||||
tooltip=tooltip, category=category,
|
tooltip=tooltip, category=category,
|
||||||
id_set=r.id_set, is_editable=is_editable)
|
id_set=r.id_set, is_editable=is_editable,
|
||||||
|
use_sort_as_name=use_sort_as_name)
|
||||||
for r in items]
|
for r in items]
|
||||||
|
|
||||||
#print 'end phase "tags list":', time.clock() - last, 'seconds'
|
#print 'end phase "tags list":', time.clock() - last, 'seconds'
|
||||||
|
@ -222,7 +222,7 @@ class LibraryServer(ContentServer, MobileServer, XMLServer, OPDSServer, Cache,
|
|||||||
# cherrypy.engine.signal_handler.subscribe()
|
# cherrypy.engine.signal_handler.subscribe()
|
||||||
|
|
||||||
cherrypy.engine.block()
|
cherrypy.engine.block()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
self.exception = e
|
self.exception = e
|
||||||
finally:
|
finally:
|
||||||
self.is_running = False
|
self.is_running = False
|
||||||
|
@ -15,7 +15,7 @@ from calibre import isbytestring, force_unicode, fit_image, \
|
|||||||
prepare_string_for_xml
|
prepare_string_for_xml
|
||||||
from calibre.utils.ordered_dict import OrderedDict
|
from calibre.utils.ordered_dict import OrderedDict
|
||||||
from calibre.utils.filenames import ascii_filename
|
from calibre.utils.filenames import ascii_filename
|
||||||
from calibre.utils.config import prefs, tweaks
|
from calibre.utils.config import prefs
|
||||||
from calibre.utils.icu import sort_key
|
from calibre.utils.icu import sort_key
|
||||||
from calibre.utils.magick import Image
|
from calibre.utils.magick import Image
|
||||||
from calibre.library.comments import comments_to_html
|
from calibre.library.comments import comments_to_html
|
||||||
@ -155,8 +155,7 @@ def get_category_items(category, items, restriction, datatype, prefix): # {{{
|
|||||||
'<div>{1}</div>'
|
'<div>{1}</div>'
|
||||||
'<div>{2}</div></div>')
|
'<div>{2}</div></div>')
|
||||||
rating, rstring = render_rating(i.avg_rating, prefix)
|
rating, rstring = render_rating(i.avg_rating, prefix)
|
||||||
if i.category == 'authors' and \
|
if i.use_sort_as_name:
|
||||||
tweaks['categories_use_field_for_author_name'] == 'author_sort':
|
|
||||||
name = xml(i.sort)
|
name = xml(i.sort)
|
||||||
else:
|
else:
|
||||||
name = xml(i.name)
|
name = xml(i.name)
|
||||||
@ -696,7 +695,10 @@ class BrowseServer(object):
|
|||||||
xml(href, True),
|
xml(href, True),
|
||||||
xml(val if len(dbtags) == 1 else tag.name),
|
xml(val if len(dbtags) == 1 else tag.name),
|
||||||
xml(key, True)))
|
xml(key, True)))
|
||||||
join = ' & ' if key == 'authors' else ', '
|
join = ' & ' if key == 'authors' or \
|
||||||
|
(fm['is_custom'] and
|
||||||
|
fm['display'].get('is_names', False)) \
|
||||||
|
else ', '
|
||||||
args[key] = join.join(vals)
|
args[key] = join.join(vals)
|
||||||
added_key = True
|
added_key = True
|
||||||
if not added_key:
|
if not added_key:
|
||||||
|
@ -169,7 +169,7 @@ class ContentServer(object):
|
|||||||
return cover
|
return cover
|
||||||
return save_cover_data_to(img, 'img.jpg', return_data=True,
|
return save_cover_data_to(img, 'img.jpg', return_data=True,
|
||||||
resize_to=(width, height))
|
resize_to=(width, height))
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
import traceback
|
import traceback
|
||||||
cherrypy.log.error('Failed to generate cover:')
|
cherrypy.log.error('Failed to generate cover:')
|
||||||
cherrypy.log.error(traceback.print_exc())
|
cherrypy.log.error(traceback.print_exc())
|
||||||
|
@ -69,7 +69,7 @@ def daemonize(stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'):
|
|||||||
if pid > 0:
|
if pid > 0:
|
||||||
# exit first parent
|
# exit first parent
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
except OSError, e:
|
except OSError as e:
|
||||||
print >>sys.stderr, "fork #1 failed: %d (%s)" % (e.errno, e.strerror)
|
print >>sys.stderr, "fork #1 failed: %d (%s)" % (e.errno, e.strerror)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
@ -84,7 +84,7 @@ def daemonize(stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'):
|
|||||||
if pid > 0:
|
if pid > 0:
|
||||||
# exit from second parent
|
# exit from second parent
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
except OSError, e:
|
except OSError as e:
|
||||||
print >>sys.stderr, "fork #2 failed: %d (%s)" % (e.errno, e.strerror)
|
print >>sys.stderr, "fork #2 failed: %d (%s)" % (e.errno, e.strerror)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
@ -22,7 +22,6 @@ from calibre.library.server.utils import format_tag_string, Offsets
|
|||||||
from calibre import guess_type, prepare_string_for_xml as xml
|
from calibre import guess_type, prepare_string_for_xml as xml
|
||||||
from calibre.utils.icu import sort_key
|
from calibre.utils.icu import sort_key
|
||||||
from calibre.utils.ordered_dict import OrderedDict
|
from calibre.utils.ordered_dict import OrderedDict
|
||||||
from calibre.utils.config import tweaks
|
|
||||||
|
|
||||||
BASE_HREFS = {
|
BASE_HREFS = {
|
||||||
0 : '/stanza',
|
0 : '/stanza',
|
||||||
@ -126,8 +125,7 @@ def CATALOG_ENTRY(item, item_kind, base_href, version, updated,
|
|||||||
count = (_('%d books') if item.count > 1 else _('%d book'))%item.count
|
count = (_('%d books') if item.count > 1 else _('%d book'))%item.count
|
||||||
if ignore_count:
|
if ignore_count:
|
||||||
count = ''
|
count = ''
|
||||||
if item.category == 'authors' and \
|
if item.use_sort_as_name:
|
||||||
tweaks['categories_use_field_for_author_name'] == 'author_sort':
|
|
||||||
name = item.sort
|
name = item.sort
|
||||||
else:
|
else:
|
||||||
name = item.name
|
name = item.name
|
||||||
|
@ -193,7 +193,7 @@ def load_c_extensions(conn, debug=DEBUG):
|
|||||||
conn.load_extension(ext_path)
|
conn.load_extension(ext_path)
|
||||||
conn.enable_load_extension(False)
|
conn.enable_load_extension(False)
|
||||||
return True
|
return True
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
if debug:
|
if debug:
|
||||||
print 'Failed to load high performance sqlite C extension'
|
print 'Failed to load high performance sqlite C extension'
|
||||||
print e
|
print e
|
||||||
@ -247,14 +247,14 @@ class DBThread(Thread):
|
|||||||
if func == 'dump':
|
if func == 'dump':
|
||||||
try:
|
try:
|
||||||
ok, res = True, tuple(self.conn.iterdump())
|
ok, res = True, tuple(self.conn.iterdump())
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
ok, res = False, (err, traceback.format_exc())
|
ok, res = False, (err, traceback.format_exc())
|
||||||
elif func == 'create_dynamic_filter':
|
elif func == 'create_dynamic_filter':
|
||||||
try:
|
try:
|
||||||
f = DynamicFilter(args[0])
|
f = DynamicFilter(args[0])
|
||||||
self.conn.create_function(args[0], 1, f)
|
self.conn.create_function(args[0], 1, f)
|
||||||
ok, res = True, f
|
ok, res = True, f
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
ok, res = False, (err, traceback.format_exc())
|
ok, res = False, (err, traceback.format_exc())
|
||||||
else:
|
else:
|
||||||
bfunc = getattr(self.conn, func)
|
bfunc = getattr(self.conn, func)
|
||||||
@ -263,7 +263,7 @@ class DBThread(Thread):
|
|||||||
try:
|
try:
|
||||||
ok, res = True, bfunc(*args, **kwargs)
|
ok, res = True, bfunc(*args, **kwargs)
|
||||||
break
|
break
|
||||||
except OperationalError, err:
|
except OperationalError as err:
|
||||||
# Retry if unable to open db file
|
# Retry if unable to open db file
|
||||||
e = str(err)
|
e = str(err)
|
||||||
if 'unable to open' not in e or i == 2:
|
if 'unable to open' not in e or i == 2:
|
||||||
@ -273,10 +273,10 @@ class DBThread(Thread):
|
|||||||
reprlib.repr(kwargs))
|
reprlib.repr(kwargs))
|
||||||
raise
|
raise
|
||||||
time.sleep(0.5)
|
time.sleep(0.5)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
ok, res = False, (err, traceback.format_exc())
|
ok, res = False, (err, traceback.format_exc())
|
||||||
self.results.put((ok, res))
|
self.results.put((ok, res))
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.unhandled_error = (err, traceback.format_exc())
|
self.unhandled_error = (err, traceback.format_exc())
|
||||||
|
|
||||||
class DatabaseException(Exception):
|
class DatabaseException(Exception):
|
||||||
|
@ -3,7 +3,7 @@ __copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
|
|||||||
|
|
||||||
''' Post installation script for linux '''
|
''' Post installation script for linux '''
|
||||||
|
|
||||||
import sys, os, cPickle, textwrap, stat
|
import sys, os, cPickle, textwrap, stat, importlib
|
||||||
from subprocess import check_call
|
from subprocess import check_call
|
||||||
|
|
||||||
from calibre import __appname__, prints, guess_type
|
from calibre import __appname__, prints, guess_type
|
||||||
@ -59,7 +59,7 @@ for x in {manifest!r}:
|
|||||||
shutil.rmtree(x)
|
shutil.rmtree(x)
|
||||||
else:
|
else:
|
||||||
os.unlink(x)
|
os.unlink(x)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
print 'Failed to delete', x
|
print 'Failed to delete', x
|
||||||
print '\t', e
|
print '\t', e
|
||||||
|
|
||||||
@ -285,7 +285,7 @@ class PostInstall:
|
|||||||
|
|
||||||
complete -o nospace -C calibre-complete ebook-convert
|
complete -o nospace -C calibre-complete ebook-convert
|
||||||
'''))
|
'''))
|
||||||
except TypeError, err:
|
except TypeError as err:
|
||||||
if 'resolve_entities' in str(err):
|
if 'resolve_entities' in str(err):
|
||||||
print 'You need python-lxml >= 2.0.5 for calibre'
|
print 'You need python-lxml >= 2.0.5 for calibre'
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@ -309,7 +309,7 @@ class PostInstall:
|
|||||||
for src in entry_points['console_scripts']:
|
for src in entry_points['console_scripts']:
|
||||||
prog, right = src.split('=')
|
prog, right = src.split('=')
|
||||||
prog = prog.strip()
|
prog = prog.strip()
|
||||||
module = __import__(right.split(':')[0].strip(), fromlist=['a'])
|
module = importlib.import_module(right.split(':')[0].strip())
|
||||||
parser = getattr(module, 'option_parser', None)
|
parser = getattr(module, 'option_parser', None)
|
||||||
if parser is None:
|
if parser is None:
|
||||||
continue
|
continue
|
||||||
|
@ -493,7 +493,16 @@ Most purchased EPUB books have `DRM <http://wiki.mobileread.com/wiki/DRM>`_. Thi
|
|||||||
I am getting a "Permission Denied" error?
|
I am getting a "Permission Denied" error?
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
A permission denied error can occur because of many possible reasons, none of them having anything to do with |app|. You can get permission denied errors if you are using an SD card with write protect enabled. Or if you, or some program you used changed the file permissions of the files in question to read only. Or if there is a filesystem error on the device which caused your operating system to mount the filesystem in read only mode or mark a particular file as read only pending recovery. Or if the files have their owner set to a user other than you. Or if your file is open in another program. You will need to fix the underlying cause of the permissions error before resuming to use |app|. Read the error message carefully, see what file it points to and fix the permissions on that file.
|
A permission denied error can occur because of many possible reasons, none of them having anything to do with |app|.
|
||||||
|
|
||||||
|
* You can get permission denied errors if you are using an SD card with write protect enabled.
|
||||||
|
* If you, or some program you used changed the file permissions of the files in question to read only.
|
||||||
|
* If there is a filesystem error on the device which caused your operating system to mount the filesystem in read only mode or mark a particular file as read only pending recovery.
|
||||||
|
* If the files have their owner set to a user other than you.
|
||||||
|
* If your file is open in another program.
|
||||||
|
* If the file resides on a device, you may have reached the limit of a maximum of 256 files in the root of the device. In this case you need to reformat the device/sd card referered to in the error message with a FAT32 filesystem, or delete some files from the SD card/device memory.
|
||||||
|
|
||||||
|
You will need to fix the underlying cause of the permissions error before resuming to use |app|. Read the error message carefully, see what file it points to and fix the permissions on that file.
|
||||||
|
|
||||||
Can I have the comment metadata show up on my reader?
|
Can I have the comment metadata show up on my reader?
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
@ -510,7 +519,7 @@ You have two choices:
|
|||||||
|
|
||||||
How is |app| licensed?
|
How is |app| licensed?
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|app| is licensed under the GNU General Public License v3 (an open source license). This means that you are free to redistribute |app| as long as you make the source code available. So if you want to put |app| on a CD with your product, you must also put the |app| source code on the CD. The source code is available for download `from googlecode <http://code.google.com/p/calibre-ebook/downloads/list>`_. You are free to use the results of conversions from |app| however you want. You cannot use code, libraries from |app| in your software without maing your software open source. For details, see `The GNU GPL v3 http://www.gnu.org/licenses/gpl.html`_.
|
|app| is licensed under the GNU General Public License v3 (an open source license). This means that you are free to redistribute |app| as long as you make the source code available. So if you want to put |app| on a CD with your product, you must also put the |app| source code on the CD. The source code is available for download `from googlecode <http://code.google.com/p/calibre-ebook/downloads/list>`_. You are free to use the results of conversions from |app| however you want. You cannot use code, libraries from |app| in your software without maing your software open source. For details, see `The GNU GPL v3 <http://www.gnu.org/licenses/gpl.html>`_.
|
||||||
|
|
||||||
How do I run calibre from my USB stick?
|
How do I run calibre from my USB stick?
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
@ -137,7 +137,7 @@ to the recipe. Finally, lets replace some of the :term:`CSS` that we disabled ea
|
|||||||
|
|
||||||
With these additions, our recipe has become "production quality", indeed it is very close to the actual recipe used by |app| for the *BBC*, shown below:
|
With these additions, our recipe has become "production quality", indeed it is very close to the actual recipe used by |app| for the *BBC*, shown below:
|
||||||
|
|
||||||
.. literalinclude:: ../../../resources/recipes/bbc.recipe
|
.. literalinclude:: ../../../recipes/bbc.recipe
|
||||||
|
|
||||||
This :term:`recipe` explores only the tip of the iceberg when it comes to the power of |app|. To explore more of the abilities of |app| we'll examine a more complex real life example in the next section.
|
This :term:`recipe` explores only the tip of the iceberg when it comes to the power of |app|. To explore more of the abilities of |app| we'll examine a more complex real life example in the next section.
|
||||||
|
|
||||||
|
@ -105,8 +105,8 @@ After creating the saved search, you can use it as a restriction.
|
|||||||
.. image:: images/sg_restrict2.jpg
|
.. image:: images/sg_restrict2.jpg
|
||||||
:align: center
|
:align: center
|
||||||
|
|
||||||
Useful Template Functions
|
Useful Template Functions
|
||||||
-------------------------
|
-------------------------
|
||||||
|
|
||||||
You might want to use the genre information in a template, such as with save to disk or send to device. The question might then be "How do I get the outermost genre name or names?" An |app| template function, subitems, is provided to make doing this easier.
|
You might want to use the genre information in a template, such as with save to disk or send to device. The question might then be "How do I get the outermost genre name or names?" An |app| template function, subitems, is provided to make doing this easier.
|
||||||
|
|
||||||
@ -114,4 +114,4 @@ After creating the saved search, you can use it as a restriction.
|
|||||||
|
|
||||||
{#genre:subitems(0,1)||/}{title} - {authors}
|
{#genre:subitems(0,1)||/}{title} - {authors}
|
||||||
|
|
||||||
See :ref:`The |app| template language <templatelangcalibre>` for more information templates and the subitem function.
|
See :ref:`The |app| template language <templatelangcalibre>` for more information templates and the subitem function.
|
||||||
|
@ -863,7 +863,7 @@ class Engine(threading.Thread):
|
|||||||
for socket in rr:
|
for socket in rr:
|
||||||
try:
|
try:
|
||||||
self.readers[socket].handle_read()
|
self.readers[socket].handle_read()
|
||||||
except NonLocalNameException, err:
|
except NonLocalNameException as err:
|
||||||
print err
|
print err
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
if DEBUG:
|
if DEBUG:
|
||||||
|
@ -316,7 +316,7 @@ class TemplateFormatter(string.Formatter):
|
|||||||
self.locals = {}
|
self.locals = {}
|
||||||
try:
|
try:
|
||||||
ans = self.vformat(fmt, [], kwargs).strip()
|
ans = self.vformat(fmt, [], kwargs).strip()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
if DEBUG:
|
if DEBUG:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
ans = error_value + ' ' + e.message
|
ans = error_value + ' ' + e.message
|
||||||
|
@ -6,7 +6,7 @@ __license__ = 'GPL v3'
|
|||||||
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
|
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||||
__docformat__ = 'restructuredtext en'
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
import os, cPickle, sys
|
import os, cPickle, sys, importlib
|
||||||
from multiprocessing.connection import Client
|
from multiprocessing.connection import Client
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
from Queue import Queue
|
from Queue import Queue
|
||||||
@ -75,7 +75,7 @@ class Progress(Thread):
|
|||||||
|
|
||||||
def get_func(name):
|
def get_func(name):
|
||||||
module, func, notification = PARALLEL_FUNCS[name]
|
module, func, notification = PARALLEL_FUNCS[name]
|
||||||
module = __import__(module, fromlist=[1])
|
module = importlib.import_module(module)
|
||||||
func = getattr(module, func)
|
func = getattr(module, func)
|
||||||
return func, notification
|
return func, notification
|
||||||
|
|
||||||
|
@ -32,7 +32,7 @@ class WindowsExclFile(object):
|
|||||||
None, #No template file
|
None, #No template file
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
except pywintypes.error, err:
|
except pywintypes.error as err:
|
||||||
if getattr(err, 'args', [-1])[0] in (0x20, 0x21):
|
if getattr(err, 'args', [-1])[0] in (0x20, 0x21):
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
continue
|
continue
|
||||||
|
@ -56,7 +56,7 @@ def set_metadata(stream, mi):
|
|||||||
try:
|
try:
|
||||||
p.wait()
|
p.wait()
|
||||||
break
|
break
|
||||||
except OSError, e:
|
except OSError as e:
|
||||||
if e.errno == errno.EINTR:
|
if e.errno == errno.EINTR:
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
|
@ -76,7 +76,7 @@ def sendmail_direct(from_, to, msg, timeout, localhost, verbose,
|
|||||||
s.connect(host, 25)
|
s.connect(host, 25)
|
||||||
s.sendmail(from_, [to], msg)
|
s.sendmail(from_, [to], msg)
|
||||||
return s.quit()
|
return s.quit()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
last_error, last_traceback = e, traceback.format_exc()
|
last_error, last_traceback = e, traceback.format_exc()
|
||||||
if last_error is not None:
|
if last_error is not None:
|
||||||
print last_traceback
|
print last_traceback
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -14,7 +14,7 @@ from contextlib import nested, closing
|
|||||||
|
|
||||||
|
|
||||||
from calibre import browser, __appname__, iswindows, \
|
from calibre import browser, __appname__, iswindows, \
|
||||||
strftime, preferred_encoding
|
strftime, preferred_encoding, as_unicode
|
||||||
from calibre.ebooks.BeautifulSoup import BeautifulSoup, NavigableString, CData, Tag
|
from calibre.ebooks.BeautifulSoup import BeautifulSoup, NavigableString, CData, Tag
|
||||||
from calibre.ebooks.metadata.opf2 import OPFCreator
|
from calibre.ebooks.metadata.opf2 import OPFCreator
|
||||||
from calibre import entity_to_unicode
|
from calibre import entity_to_unicode
|
||||||
@ -986,8 +986,8 @@ class BasicNewsRecipe(Recipe):
|
|||||||
self.cover_path = None
|
self.cover_path = None
|
||||||
try:
|
try:
|
||||||
cu = self.get_cover_url()
|
cu = self.get_cover_url()
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.error(_('Could not download cover: %s')%str(err))
|
self.log.error(_('Could not download cover: %s')%as_unicode(err))
|
||||||
self.log.debug(traceback.format_exc())
|
self.log.debug(traceback.format_exc())
|
||||||
else:
|
else:
|
||||||
if not cu:
|
if not cu:
|
||||||
@ -1318,11 +1318,11 @@ class BasicNewsRecipe(Recipe):
|
|||||||
oldest_article=self.oldest_article,
|
oldest_article=self.oldest_article,
|
||||||
max_articles_per_feed=self.max_articles_per_feed,
|
max_articles_per_feed=self.max_articles_per_feed,
|
||||||
get_article_url=self.get_article_url))
|
get_article_url=self.get_article_url))
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
feed = Feed()
|
feed = Feed()
|
||||||
msg = 'Failed feed: %s'%(title if title else url)
|
msg = 'Failed feed: %s'%(title if title else url)
|
||||||
feed.populate_from_preparsed_feed(msg, [])
|
feed.populate_from_preparsed_feed(msg, [])
|
||||||
feed.description = repr(err)
|
feed.description = as_unicode(err)
|
||||||
parsed_feeds.append(feed)
|
parsed_feeds.append(feed)
|
||||||
self.log.exception(msg)
|
self.log.exception(msg)
|
||||||
|
|
||||||
@ -1468,7 +1468,7 @@ class CalibrePeriodical(BasicNewsRecipe):
|
|||||||
'http://news.calibre-ebook.com/subscribed_files/%s/0/temp.downloaded_recipe'
|
'http://news.calibre-ebook.com/subscribed_files/%s/0/temp.downloaded_recipe'
|
||||||
% self.calibre_periodicals_slug
|
% self.calibre_periodicals_slug
|
||||||
).read()
|
).read()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
if hasattr(e, 'getcode') and e.getcode() == 403:
|
if hasattr(e, 'getcode') and e.getcode() == 403:
|
||||||
raise DownloadDenied(
|
raise DownloadDenied(
|
||||||
_('You do not have permission to download this issue.'
|
_('You do not have permission to download this issue.'
|
||||||
|
@ -210,7 +210,7 @@ class RecursiveFetcher(object):
|
|||||||
with closing(open_func(url, timeout=self.timeout)) as f:
|
with closing(open_func(url, timeout=self.timeout)) as f:
|
||||||
data = response(f.read()+f.read())
|
data = response(f.read()+f.read())
|
||||||
data.newurl = f.geturl()
|
data.newurl = f.geturl()
|
||||||
except urllib2.URLError, err:
|
except urllib2.URLError as err:
|
||||||
if hasattr(err, 'code') and responses.has_key(err.code):
|
if hasattr(err, 'code') and responses.has_key(err.code):
|
||||||
raise FetchError, responses[err.code]
|
raise FetchError, responses[err.code]
|
||||||
if getattr(err, 'reason', [0])[0] == 104 or \
|
if getattr(err, 'reason', [0])[0] == 104 or \
|
||||||
|
Loading…
x
Reference in New Issue
Block a user