mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
merge from trunk
This commit is contained in:
commit
6a3d5f3af4
@ -1,6 +1,6 @@
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2008-2010, Darko Miletic <darko.miletic at gmail.com>'
|
||||
__copyright__ = '2008-2011, Darko Miletic <darko.miletic at gmail.com>'
|
||||
'''
|
||||
novosti.rs
|
||||
'''
|
||||
@ -21,10 +21,12 @@ class Novosti(BasicNewsRecipe):
|
||||
encoding = 'utf-8'
|
||||
language = 'sr'
|
||||
publication_type = 'newspaper'
|
||||
masthead_url = 'http://www.novosti.rs/images/basic/logo-print.png'
|
||||
extra_css = """ @font-face {font-family: "sans1";src:url(res:///opt/sony/ebook/FONT/tt0003m_.ttf)}
|
||||
.article_description,body{font-family: Arial,Helvetica,sans1,sans-serif}
|
||||
.author{font-size: small}
|
||||
.articleLead{font-size: large; font-weight: bold}
|
||||
img{display: block; margin-bottom: 1em; margin-top: 1em}
|
||||
"""
|
||||
|
||||
conversion_options = {
|
||||
@ -32,23 +34,58 @@ class Novosti(BasicNewsRecipe):
|
||||
, 'tags' : category
|
||||
, 'publisher' : publisher
|
||||
, 'language' : language
|
||||
, 'pretty_print' : True
|
||||
}
|
||||
|
||||
preprocess_regexps = [(re.compile(u'\u0110'), lambda match: u'\u00D0')]
|
||||
|
||||
keep_only_tags = [dict(attrs={'class':['articleTitle','author','articleLead','articleBody']})]
|
||||
remove_tags = [dict(name=['embed','object','iframe','base','link','meta'])]
|
||||
feeds = [(u'Vesti', u'http://www.novosti.rs/rss/rss-vesti')]
|
||||
keep_only_tags = [dict(attrs={'class':['articleTitle','articleInfo','articleLead','singlePhoto fl','articleBody']})]
|
||||
remove_tags = [
|
||||
dict(name=['embed','object','iframe','base','link','meta'])
|
||||
,dict(name='a', attrs={'class':'loadComments topCommentsLink'})
|
||||
]
|
||||
remove_attributes = ['lang','xmlns:fb']
|
||||
|
||||
feeds = [
|
||||
(u'Politika' , u'http://www.novosti.rs/rss/2-Sve%20vesti')
|
||||
,(u'Drustvo' , u'http://www.novosti.rs/rss/1-Sve%20vesti')
|
||||
,(u'Ekonomija' , u'http://www.novosti.rs/rss/3-Sve%20vesti')
|
||||
,(u'Hronika' , u'http://www.novosti.rs/rss/4-Sve%20vesti')
|
||||
,(u'Dosije' , u'http://www.novosti.rs/rss/5-Sve%20vesti')
|
||||
,(u'Reportaze' , u'http://www.novosti.rs/rss/6-Sve%20vesti')
|
||||
,(u'Tehnologije' , u'http://www.novosti.rs/rss/35-Sve%20vesti')
|
||||
,(u'Zanimljivosti', u'http://www.novosti.rs/rss/26-Sve%20vesti')
|
||||
,(u'Auto' , u'http://www.novosti.rs/rss/50-Sve%20vesti')
|
||||
,(u'Sport' , u'http://www.novosti.rs/rss/11|47|12|14|13-Sve%20vesti')
|
||||
,(u'Svet' , u'http://www.novosti.rs/rss/7-Sve%20vesti')
|
||||
,(u'Region' , u'http://www.novosti.rs/rss/8-Sve%20vesti')
|
||||
,(u'Dijaspora' , u'http://www.novosti.rs/rss/9-Sve%20vesti')
|
||||
,(u'Spektakl' , u'http://www.novosti.rs/rss/10-Sve%20vesti')
|
||||
,(u'Kultura' , u'http://www.novosti.rs/rss/31-Sve%20vesti')
|
||||
,(u'Srbija' , u'http://www.novosti.rs/rss/15-Sve%20vesti')
|
||||
,(u'Beograd' , u'http://www.novosti.rs/rss/16-Sve%20vesti')
|
||||
,(u'Zivot+' , u'http://www.novosti.rs/rss/24|33|34|25|20|18|32|19-Sve%20vesti')
|
||||
,(u'Turizam' , u'http://www.novosti.rs/rss/36-Sve%20vesti')
|
||||
]
|
||||
|
||||
def preprocess_html(self, soup):
|
||||
for item in soup.findAll(style=True):
|
||||
del item['style']
|
||||
for item in soup.findAll('span', attrs={'class':'author'}):
|
||||
item.name='p'
|
||||
for item in soup.findAll('a'):
|
||||
limg = item.find('img')
|
||||
if item.string is not None:
|
||||
str = item.string
|
||||
item.replaceWith(str)
|
||||
else:
|
||||
if limg:
|
||||
item.name = 'div'
|
||||
item.attrs = []
|
||||
else:
|
||||
str = self.tag_to_string(item)
|
||||
item.replaceWith(str)
|
||||
for item in soup.findAll('img'):
|
||||
if not item.has_key('alt'):
|
||||
item['alt'] = 'image'
|
||||
return soup
|
||||
|
||||
|
||||
|
||||
|
@ -31,23 +31,22 @@ class TabuRo(BasicNewsRecipe):
|
||||
}
|
||||
|
||||
keep_only_tags = [
|
||||
dict(name='div', attrs={'id':'Article'}),
|
||||
dict(name='h2', attrs={'class':'articol_titlu'}),
|
||||
dict(name='div', attrs={'class':'poza_articol_featured'}),
|
||||
dict(name='div', attrs={'class':'articol_text'})
|
||||
]
|
||||
|
||||
remove_tags = [
|
||||
dict(name='div', attrs={'id':['advertisementArticle']}),
|
||||
dict(name='div', attrs={'class':'voting_number'}),
|
||||
dict(name='div', attrs={'id':'number_votes'}),
|
||||
dict(name='div', attrs={'id':'rating_one'}),
|
||||
dict(name='div', attrs={'class':'float: right;'})
|
||||
dict(name='div', attrs={'class':'asemanatoare'})
|
||||
]
|
||||
|
||||
remove_tags_after = [
|
||||
dict(name='div', attrs={'id':'comments'}),
|
||||
dict(name='div', attrs={'class':'asemanatoare'})
|
||||
]
|
||||
|
||||
feeds = [
|
||||
(u'Feeds', u'http://www.tabu.ro/rss_all.xml')
|
||||
(u'Feeds', u'http://www.tabu.ro/feed/')
|
||||
]
|
||||
|
||||
def preprocess_html(self, soup):
|
||||
|
@ -625,8 +625,9 @@ if test_eight_code:
|
||||
from calibre.ebooks.metadata.sources.google import GoogleBooks
|
||||
from calibre.ebooks.metadata.sources.amazon import Amazon
|
||||
from calibre.ebooks.metadata.sources.openlibrary import OpenLibrary
|
||||
from calibre.ebooks.metadata.sources.isbndb import ISBNDB
|
||||
|
||||
plugins += [GoogleBooks, Amazon, OpenLibrary]
|
||||
plugins += [GoogleBooks, Amazon, OpenLibrary, ISBNDB]
|
||||
|
||||
# }}}
|
||||
else:
|
||||
|
@ -244,7 +244,7 @@ class EEEREADER(USBMS):
|
||||
FORMATS = ['epub', 'fb2', 'txt', 'pdf']
|
||||
|
||||
VENDOR_ID = [0x0b05]
|
||||
PRODUCT_ID = [0x178f]
|
||||
PRODUCT_ID = [0x178f, 0x17a1]
|
||||
BCD = [0x0319]
|
||||
|
||||
EBOOK_DIR_MAIN = EBOOK_DIR_CARD_A = 'Book'
|
||||
|
@ -26,7 +26,7 @@ class ParserError(ValueError):
|
||||
pass
|
||||
|
||||
BOOK_EXTENSIONS = ['lrf', 'rar', 'zip', 'rtf', 'lit', 'txt', 'txtz', 'text', 'htm', 'xhtm',
|
||||
'html', 'xhtml', 'pdf', 'pdb', 'pdr', 'prc', 'mobi', 'azw', 'doc',
|
||||
'html', 'htmlz', 'xhtml', 'pdf', 'pdb', 'pdr', 'prc', 'mobi', 'azw', 'doc',
|
||||
'epub', 'fb2', 'djvu', 'lrx', 'cbr', 'cbz', 'cbc', 'oebzip',
|
||||
'rb', 'imp', 'odt', 'chm', 'tpz', 'azw1', 'pml', 'pmlz', 'mbp', 'tan', 'snb']
|
||||
|
||||
|
@ -12,7 +12,7 @@ from lxml import etree
|
||||
|
||||
from calibre.customize.conversion import OutputFormatPlugin, \
|
||||
OptionRecommendation
|
||||
from calibre.ebooks.oeb.base import OEB_IMAGES
|
||||
from calibre.ebooks.oeb.base import OEB_IMAGES, SVG_MIME
|
||||
from calibre.ptempfile import TemporaryDirectory
|
||||
from calibre.utils.zipfile import ZipFile
|
||||
|
||||
@ -71,9 +71,13 @@ class HTMLZOutput(OutputFormatPlugin):
|
||||
os.makedirs(os.path.join(tdir, 'images'))
|
||||
for item in oeb_book.manifest:
|
||||
if item.media_type in OEB_IMAGES and item.href in images:
|
||||
if item.media_type == SVG_MIME:
|
||||
data = unicode(etree.tostring(item.data, encoding=unicode))
|
||||
else:
|
||||
data = item.data
|
||||
fname = os.path.join(tdir, 'images', images[item.href])
|
||||
with open(fname, 'wb') as img:
|
||||
img.write(item.data)
|
||||
img.write(data)
|
||||
|
||||
# Metadata
|
||||
with open(os.path.join(tdir, 'metadata.opf'), 'wb') as mdataf:
|
||||
|
@ -8,12 +8,13 @@ Read meta information from extZ (TXTZ, HTMLZ...) files.
|
||||
'''
|
||||
|
||||
import os
|
||||
import posixpath
|
||||
|
||||
from cStringIO import StringIO
|
||||
|
||||
from calibre.ebooks.metadata import MetaInformation
|
||||
from calibre.ebooks.metadata.opf2 import OPF, metadata_to_opf
|
||||
from calibre.ptempfile import TemporaryDirectory
|
||||
from calibre.ebooks.metadata.opf2 import OPF
|
||||
from calibre.ptempfile import PersistentTemporaryFile
|
||||
from calibre.utils.zipfile import ZipFile, safe_replace
|
||||
|
||||
def get_metadata(stream, extract_cover=True):
|
||||
@ -23,16 +24,75 @@ def get_metadata(stream, extract_cover=True):
|
||||
mi = MetaInformation(_('Unknown'), [_('Unknown')])
|
||||
stream.seek(0)
|
||||
|
||||
with TemporaryDirectory('_untxtz_mdata') as tdir:
|
||||
try:
|
||||
zf = ZipFile(stream)
|
||||
zf.extract('metadata.opf', tdir)
|
||||
with open(os.path.join(tdir, 'metadata.opf'), 'rb') as opff:
|
||||
mi = OPF(opff).to_book_metadata()
|
||||
with ZipFile(stream) as zf:
|
||||
opf_name = get_first_opf_name(zf)
|
||||
opf_stream = StringIO(zf.read(opf_name))
|
||||
opf = OPF(opf_stream)
|
||||
mi = opf.to_book_metadata()
|
||||
if extract_cover:
|
||||
cover_name = opf.raster_cover
|
||||
if cover_name:
|
||||
mi.cover_data = ('jpg', zf.read(cover_name))
|
||||
except:
|
||||
return mi
|
||||
return mi
|
||||
|
||||
def set_metadata(stream, mi):
|
||||
opf = StringIO(metadata_to_opf(mi))
|
||||
safe_replace(stream, 'metadata.opf', opf)
|
||||
replacements = {}
|
||||
|
||||
# Get the OPF in the archive.
|
||||
with ZipFile(stream) as zf:
|
||||
opf_path = get_first_opf_name(zf)
|
||||
opf_stream = StringIO(zf.read(opf_path))
|
||||
opf = OPF(opf_stream)
|
||||
|
||||
# Cover.
|
||||
new_cdata = None
|
||||
try:
|
||||
new_cdata = mi.cover_data[1]
|
||||
if not new_cdata:
|
||||
raise Exception('no cover')
|
||||
except:
|
||||
try:
|
||||
new_cdata = open(mi.cover, 'rb').read()
|
||||
except:
|
||||
pass
|
||||
if new_cdata:
|
||||
raster_cover = opf.raster_cover
|
||||
if not raster_cover:
|
||||
raster_cover = 'cover.jpg'
|
||||
cpath = posixpath.join(posixpath.dirname(opf_path), raster_cover)
|
||||
new_cover = _write_new_cover(new_cdata, cpath)
|
||||
replacements[cpath] = open(new_cover.name, 'rb')
|
||||
|
||||
# Update the metadata.
|
||||
opf.smart_update(mi, replace_metadata=True)
|
||||
newopf = StringIO(opf.render())
|
||||
safe_replace(stream, opf_path, newopf, extra_replacements=replacements)
|
||||
|
||||
# Cleanup temporary files.
|
||||
try:
|
||||
if cpath is not None:
|
||||
replacements[cpath].close()
|
||||
os.remove(replacements[cpath].name)
|
||||
except:
|
||||
pass
|
||||
|
||||
def get_first_opf_name(zf):
|
||||
names = zf.namelist()
|
||||
opfs = []
|
||||
for n in names:
|
||||
if n.endswith('.opf') and '/' not in n:
|
||||
opfs.append(n)
|
||||
if not opfs:
|
||||
raise Exception('No OPF found')
|
||||
opfs.sort()
|
||||
return opfs[0]
|
||||
|
||||
def _write_new_cover(new_cdata, cpath):
|
||||
from calibre.utils.magick.draw import save_cover_data_to
|
||||
new_cover = PersistentTemporaryFile(suffix=os.path.splitext(cpath)[1])
|
||||
new_cover.close()
|
||||
save_cover_data_to(new_cdata, new_cover.name)
|
||||
return new_cover
|
||||
|
@ -181,6 +181,10 @@ class Source(Plugin):
|
||||
#: construct the configuration widget for this plugin
|
||||
options = ()
|
||||
|
||||
#: A string that is displayed at the top of the config widget for this
|
||||
#: plugin
|
||||
config_help_message = None
|
||||
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
Plugin.__init__(self, *args, **kwargs)
|
||||
|
@ -76,6 +76,11 @@ def run_download(log, results, abort,
|
||||
(plugin, width, height, fmt, bytes)
|
||||
|
||||
'''
|
||||
if title == _('Unknown'):
|
||||
title = None
|
||||
if authors == [_('Unknown')]:
|
||||
authors = None
|
||||
|
||||
plugins = [p for p in metadata_plugins(['cover']) if p.is_configured()]
|
||||
|
||||
rq = Queue()
|
||||
@ -145,7 +150,7 @@ def download_cover(log,
|
||||
Synchronous cover download. Returns the "best" cover as per user
|
||||
prefs/cover resolution.
|
||||
|
||||
Return cover is a tuple: (plugin, width, height, fmt, data)
|
||||
Returned cover is a tuple: (plugin, width, height, fmt, data)
|
||||
|
||||
Returns None if no cover is found.
|
||||
'''
|
||||
|
@ -253,6 +253,10 @@ def merge_identify_results(result_map, log):
|
||||
|
||||
def identify(log, abort, # {{{
|
||||
title=None, authors=None, identifiers={}, timeout=30):
|
||||
if title == _('Unknown'):
|
||||
title = None
|
||||
if authors == [_('Unknown')]:
|
||||
authors = None
|
||||
start_time = time.time()
|
||||
plugins = [p for p in metadata_plugins(['identify']) if p.is_configured()]
|
||||
|
||||
|
@ -7,7 +7,19 @@ __license__ = 'GPL v3'
|
||||
__copyright__ = '2011, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
|
||||
from calibre.ebooks.metadata.sources.base import Source
|
||||
from urllib import quote
|
||||
|
||||
from lxml import etree
|
||||
|
||||
from calibre.ebooks.metadata import check_isbn
|
||||
from calibre.ebooks.metadata.sources.base import Source, Option
|
||||
from calibre.ebooks.chardet import xml_to_unicode
|
||||
from calibre.utils.cleantext import clean_ascii_chars
|
||||
from calibre.utils.icu import lower
|
||||
from calibre.ebooks.metadata.book.base import Metadata
|
||||
|
||||
BASE_URL = 'http://isbndb.com/api/books.xml?access_key=%s&page_number=1&results=subjects,authors,texts&'
|
||||
|
||||
|
||||
class ISBNDB(Source):
|
||||
|
||||
@ -18,6 +30,20 @@ class ISBNDB(Source):
|
||||
touched_fields = frozenset(['title', 'authors',
|
||||
'identifier:isbn', 'comments', 'publisher'])
|
||||
supports_gzip_transfer_encoding = True
|
||||
# Shortcut, since we have no cached cover URLS
|
||||
cached_cover_url_is_reliable = False
|
||||
|
||||
options = (
|
||||
Option('isbndb_key', 'string', None, _('IsbnDB key:'),
|
||||
_('To use isbndb.com you have to sign up for a free account'
|
||||
'at isbndb.com and get an access key.')),
|
||||
)
|
||||
|
||||
config_help_message = '<p>'+_('To use metadata from isbndb.com you must sign'
|
||||
' up for a free account and get an isbndb key and enter it below.'
|
||||
' Instructions to get the key are '
|
||||
'<a href="http://isbndb.com/docs/api/30-keys.html">here</a>.')
|
||||
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
Source.__init__(self, *args, **kwargs)
|
||||
@ -35,9 +61,186 @@ class ISBNDB(Source):
|
||||
except:
|
||||
pass
|
||||
|
||||
self.isbndb_key = prefs['isbndb_key']
|
||||
@property
|
||||
def isbndb_key(self):
|
||||
return self.prefs['isbndb_key']
|
||||
|
||||
def is_configured(self):
|
||||
return self.isbndb_key is not None
|
||||
|
||||
def create_query(self, title=None, authors=None, identifiers={}): # {{{
|
||||
base_url = BASE_URL%self.isbndb_key
|
||||
isbn = check_isbn(identifiers.get('isbn', None))
|
||||
q = ''
|
||||
if isbn is not None:
|
||||
q = 'index1=isbn&value1='+isbn
|
||||
elif title or authors:
|
||||
tokens = []
|
||||
title_tokens = list(self.get_title_tokens(title))
|
||||
tokens += title_tokens
|
||||
author_tokens = self.get_author_tokens(authors,
|
||||
only_first_author=True)
|
||||
tokens += author_tokens
|
||||
tokens = [quote(t) for t in tokens]
|
||||
q = '+'.join(tokens)
|
||||
q = 'index1=combined&value1='+q
|
||||
|
||||
if not q:
|
||||
return None
|
||||
if isinstance(q, unicode):
|
||||
q = q.encode('utf-8')
|
||||
return base_url + q
|
||||
# }}}
|
||||
|
||||
def identify(self, log, result_queue, abort, title=None, authors=None, # {{{
|
||||
identifiers={}, timeout=30):
|
||||
if not self.is_configured():
|
||||
return
|
||||
query = self.create_query(title=title, authors=authors,
|
||||
identifiers=identifiers)
|
||||
if not query:
|
||||
err = 'Insufficient metadata to construct query'
|
||||
log.error(err)
|
||||
return err
|
||||
|
||||
results = []
|
||||
try:
|
||||
results = self.make_query(query, abort, title=title, authors=authors,
|
||||
identifiers=identifiers, timeout=timeout)
|
||||
except:
|
||||
err = 'Failed to make query to ISBNDb, aborting.'
|
||||
log.exception(err)
|
||||
return err
|
||||
|
||||
if not results and identifiers.get('isbn', False) and title and authors and \
|
||||
not abort.is_set():
|
||||
return self.identify(log, result_queue, abort, title=title,
|
||||
authors=authors, timeout=timeout)
|
||||
|
||||
for result in results:
|
||||
self.clean_downloaded_metadata(result)
|
||||
result_queue.put(result)
|
||||
|
||||
def parse_feed(self, feed, seen, orig_title, orig_authors, identifiers):
|
||||
|
||||
def tostring(x):
|
||||
if x is None:
|
||||
return ''
|
||||
return etree.tostring(x, method='text', encoding=unicode).strip()
|
||||
|
||||
orig_isbn = identifiers.get('isbn', None)
|
||||
title_tokens = list(self.get_title_tokens(orig_title))
|
||||
author_tokens = list(self.get_author_tokens(orig_authors))
|
||||
results = []
|
||||
|
||||
def ismatch(title, authors):
|
||||
authors = lower(' '.join(authors))
|
||||
title = lower(title)
|
||||
match = not title_tokens
|
||||
for t in title_tokens:
|
||||
if lower(t) in title:
|
||||
match = True
|
||||
break
|
||||
amatch = not author_tokens
|
||||
for a in author_tokens:
|
||||
if lower(a) in authors:
|
||||
amatch = True
|
||||
break
|
||||
if not author_tokens: amatch = True
|
||||
return match and amatch
|
||||
|
||||
bl = feed.find('BookList')
|
||||
if bl is None:
|
||||
err = tostring(etree.find('errormessage'))
|
||||
raise ValueError('ISBNDb query failed:' + err)
|
||||
total_results = int(bl.get('total_results'))
|
||||
shown_results = int(bl.get('shown_results'))
|
||||
for bd in bl.xpath('.//BookData'):
|
||||
isbn = check_isbn(bd.get('isbn13', bd.get('isbn', None)))
|
||||
if not isbn:
|
||||
continue
|
||||
if orig_isbn and isbn != orig_isbn:
|
||||
continue
|
||||
title = tostring(bd.find('Title'))
|
||||
if not title:
|
||||
continue
|
||||
authors = []
|
||||
for au in bd.xpath('.//Authors/Person'):
|
||||
au = tostring(au)
|
||||
if au:
|
||||
if ',' in au:
|
||||
ln, _, fn = au.partition(',')
|
||||
au = fn.strip() + ' ' + ln.strip()
|
||||
authors.append(au)
|
||||
if not authors:
|
||||
continue
|
||||
comments = tostring(bd.find('Summary'))
|
||||
if not comments:
|
||||
# Require comments, since without them the result is useless
|
||||
# anyway
|
||||
continue
|
||||
id_ = (title, tuple(authors))
|
||||
if id_ in seen:
|
||||
continue
|
||||
seen.add(id_)
|
||||
if not ismatch(title, authors):
|
||||
continue
|
||||
publisher = tostring(bd.find('PublisherText'))
|
||||
if not publisher: publisher = None
|
||||
if publisher and 'audio' in publisher.lower():
|
||||
continue
|
||||
mi = Metadata(title, authors)
|
||||
mi.isbn = isbn
|
||||
mi.publisher = publisher
|
||||
mi.comments = comments
|
||||
results.append(mi)
|
||||
return total_results, shown_results, results
|
||||
|
||||
def make_query(self, q, abort, title=None, authors=None, identifiers={},
|
||||
max_pages=10, timeout=30):
|
||||
page_num = 1
|
||||
parser = etree.XMLParser(recover=True, no_network=True)
|
||||
br = self.browser
|
||||
|
||||
seen = set()
|
||||
|
||||
candidates = []
|
||||
total_found = 0
|
||||
while page_num <= max_pages and not abort.is_set():
|
||||
url = q.replace('&page_number=1&', '&page_number=%d&'%page_num)
|
||||
page_num += 1
|
||||
raw = br.open_novisit(url, timeout=timeout).read()
|
||||
feed = etree.fromstring(xml_to_unicode(clean_ascii_chars(raw),
|
||||
strip_encoding_pats=True)[0], parser=parser)
|
||||
total, found, results = self.parse_feed(
|
||||
feed, seen, title, authors, identifiers)
|
||||
total_found += found
|
||||
candidates += results
|
||||
if total_found >= total or len(candidates) > 9:
|
||||
break
|
||||
|
||||
return candidates
|
||||
# }}}
|
||||
|
||||
if __name__ == '__main__':
|
||||
# To run these test use:
|
||||
# calibre-debug -e src/calibre/ebooks/metadata/sources/isbndb.py
|
||||
from calibre.ebooks.metadata.sources.test import (test_identify_plugin,
|
||||
title_test, authors_test)
|
||||
test_identify_plugin(ISBNDB.name,
|
||||
[
|
||||
|
||||
|
||||
(
|
||||
{'title':'Great Gatsby',
|
||||
'authors':['Fitzgerald']},
|
||||
[title_test('The great gatsby', exact=True),
|
||||
authors_test(['F. Scott Fitzgerald'])]
|
||||
),
|
||||
|
||||
(
|
||||
{'title': 'Flatland', 'authors':['Abbott']},
|
||||
[title_test('Flatland', exact=False)]
|
||||
),
|
||||
])
|
||||
|
||||
|
@ -496,6 +496,10 @@ class MobiMLizer(object):
|
||||
vtag.append(child)
|
||||
return
|
||||
|
||||
if tag == 'blockquote':
|
||||
old_mim = self.opts.mobi_ignore_margins
|
||||
self.opts.mobi_ignore_margins = False
|
||||
|
||||
if text or tag in CONTENT_TAGS or tag in NESTABLE_TAGS:
|
||||
self.mobimlize_content(tag, text, bstate, istates)
|
||||
for child in elem:
|
||||
@ -511,6 +515,8 @@ class MobiMLizer(object):
|
||||
if tail:
|
||||
self.mobimlize_content(tag, tail, bstate, istates)
|
||||
|
||||
if tag == 'blockquote':
|
||||
self.opts.mobi_ignore_margins = old_mim
|
||||
|
||||
if bstate.content and style['page-break-after'] in PAGE_BREAKS:
|
||||
bstate.pbreak = True
|
||||
|
@ -310,6 +310,7 @@ class Serializer(object):
|
||||
if href not in id_offsets:
|
||||
self.logger.warn('Hyperlink target %r not found' % href)
|
||||
href, _ = urldefrag(href)
|
||||
if href in self.id_offsets:
|
||||
ioff = self.id_offsets[href]
|
||||
for hoff in hoffs:
|
||||
buffer.seek(hoff)
|
||||
|
@ -357,6 +357,7 @@ class FileIconProvider(QFileIconProvider):
|
||||
'bmp' : 'bmp',
|
||||
'svg' : 'svg',
|
||||
'html' : 'html',
|
||||
'htmlz' : 'html',
|
||||
'htm' : 'html',
|
||||
'xhtml' : 'html',
|
||||
'xhtm' : 'html',
|
||||
|
@ -22,6 +22,8 @@ from calibre.constants import preferred_encoding, filesystem_encoding
|
||||
from calibre.gui2.actions import InterfaceAction
|
||||
from calibre.gui2 import config, question_dialog
|
||||
from calibre.ebooks.metadata import MetaInformation
|
||||
from calibre.utils.config import test_eight_code
|
||||
from calibre.ebooks.metadata.sources.base import msprefs
|
||||
|
||||
def get_filters():
|
||||
return [
|
||||
@ -178,6 +180,19 @@ class AddAction(InterfaceAction):
|
||||
except IndexError:
|
||||
self.gui.library_view.model().books_added(self.isbn_add_dialog.value)
|
||||
self.isbn_add_dialog.accept()
|
||||
if test_eight_code:
|
||||
orig = msprefs['ignore_fields']
|
||||
new = list(orig)
|
||||
for x in ('title', 'authors'):
|
||||
if x in new:
|
||||
new.remove(x)
|
||||
msprefs['ignore_fields'] = new
|
||||
try:
|
||||
self.gui.iactions['Edit Metadata'].download_metadata(
|
||||
ids=self.add_by_isbn_ids)
|
||||
finally:
|
||||
msprefs['ignore_fields'] = orig
|
||||
else:
|
||||
orig = config['overwrite_author_title_metadata']
|
||||
config['overwrite_author_title_metadata'] = True
|
||||
try:
|
||||
|
@ -10,7 +10,7 @@ from functools import partial
|
||||
|
||||
from PyQt4.Qt import Qt, QMenu, QModelIndex
|
||||
|
||||
from calibre.gui2 import error_dialog, config
|
||||
from calibre.gui2 import error_dialog, config, Dispatcher
|
||||
from calibre.gui2.dialogs.metadata_single import MetadataSingleDialog
|
||||
from calibre.gui2.dialogs.metadata_bulk import MetadataBulkDialog
|
||||
from calibre.gui2.dialogs.confirm_delete import confirm
|
||||
@ -35,16 +35,23 @@ class EditMetadataAction(InterfaceAction):
|
||||
md.addAction(_('Edit metadata in bulk'),
|
||||
partial(self.edit_metadata, False, bulk=True))
|
||||
md.addSeparator()
|
||||
md.addAction(_('Download metadata and covers'),
|
||||
partial(self.download_metadata, False, covers=True),
|
||||
if test_eight_code:
|
||||
dall = self.download_metadata
|
||||
dident = partial(self.download_metadata, covers=False)
|
||||
dcovers = partial(self.download_metadata, identify=False)
|
||||
else:
|
||||
dall = partial(self.download_metadata_old, False, covers=True)
|
||||
dident = partial(self.download_metadata_old, False, covers=False)
|
||||
dcovers = partial(self.download_metadata_old, False, covers=True,
|
||||
set_metadata=False, set_social_metadata=False)
|
||||
|
||||
md.addAction(_('Download metadata and covers'), dall,
|
||||
Qt.ControlModifier+Qt.Key_D)
|
||||
md.addAction(_('Download only metadata'),
|
||||
partial(self.download_metadata, False, covers=False))
|
||||
md.addAction(_('Download only covers'),
|
||||
partial(self.download_metadata, False, covers=True,
|
||||
set_metadata=False, set_social_metadata=False))
|
||||
md.addAction(_('Download only metadata'), dident)
|
||||
md.addAction(_('Download only covers'), dcovers)
|
||||
if not test_eight_code:
|
||||
md.addAction(_('Download only social metadata'),
|
||||
partial(self.download_metadata, False, covers=False,
|
||||
partial(self.download_metadata_old, False, covers=False,
|
||||
set_metadata=False, set_social_metadata=True))
|
||||
self.metadata_menu = md
|
||||
|
||||
@ -73,7 +80,26 @@ class EditMetadataAction(InterfaceAction):
|
||||
self.qaction.setEnabled(enabled)
|
||||
self.action_merge.setEnabled(enabled)
|
||||
|
||||
def download_metadata(self, checked, covers=True, set_metadata=True,
|
||||
def download_metadata(self, identify=True, covers=True, ids=None):
|
||||
if ids is None:
|
||||
rows = self.gui.library_view.selectionModel().selectedRows()
|
||||
if not rows or len(rows) == 0:
|
||||
return error_dialog(self.gui, _('Cannot download metadata'),
|
||||
_('No books selected'), show=True)
|
||||
db = self.gui.library_view.model().db
|
||||
ids = [db.id(row.row()) for row in rows]
|
||||
from calibre.gui2.metadata.bulk_download2 import start_download
|
||||
start_download(self.gui, ids,
|
||||
Dispatcher(self.bulk_metadata_downloaded), identify, covers)
|
||||
|
||||
def bulk_metadata_downloaded(self, job):
|
||||
if job.failed:
|
||||
self.gui.job_exception(job, dialog_title=_('Failed to download metadata'))
|
||||
return
|
||||
from calibre.gui2.metadata.bulk_download2 import proceed
|
||||
proceed(self.gui, job)
|
||||
|
||||
def download_metadata_old(self, checked, covers=True, set_metadata=True,
|
||||
set_social_metadata=None):
|
||||
rows = self.gui.library_view.selectionModel().selectedRows()
|
||||
if not rows or len(rows) == 0:
|
||||
|
@ -1,3 +1,4 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ui version="4.0">
|
||||
<class>Dialog</class>
|
||||
<widget class="QDialog" name="Dialog">
|
||||
@ -30,13 +31,16 @@
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="1" column="0" >
|
||||
<item row="2" column="0">
|
||||
<widget class="QDialogButtonBox" name="buttonBox">
|
||||
<property name="standardButtons">
|
||||
<set>QDialogButtonBox::Ok</set>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="1" column="0">
|
||||
<widget class="QTextBrowser" name="tb"/>
|
||||
</item>
|
||||
</layout>
|
||||
</widget>
|
||||
<resources>
|
||||
|
@ -6,9 +6,7 @@ __license__ = 'GPL v3'
|
||||
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
|
||||
import os, socket, time, cStringIO
|
||||
from threading import Thread
|
||||
from Queue import Queue
|
||||
import os, socket, time
|
||||
from binascii import unhexlify
|
||||
from functools import partial
|
||||
from itertools import repeat
|
||||
@ -16,67 +14,20 @@ from itertools import repeat
|
||||
from calibre.utils.smtp import compose_mail, sendmail, extract_email_address, \
|
||||
config as email_config
|
||||
from calibre.utils.filenames import ascii_filename
|
||||
from calibre.utils.ipc.job import BaseJob
|
||||
from calibre.ptempfile import PersistentTemporaryFile
|
||||
from calibre.customize.ui import available_input_formats, available_output_formats
|
||||
from calibre.ebooks.metadata import authors_to_string
|
||||
from calibre.constants import preferred_encoding
|
||||
from calibre.gui2 import config, Dispatcher, warning_dialog
|
||||
from calibre.library.save_to_disk import get_components
|
||||
from calibre.utils.config import tweaks
|
||||
from calibre.gui2.threaded_jobs import ThreadedJob
|
||||
|
||||
class EmailJob(BaseJob): # {{{
|
||||
|
||||
def __init__(self, callback, description, attachment, aname, to, subject, text, job_manager):
|
||||
BaseJob.__init__(self, description)
|
||||
self.exception = None
|
||||
self.job_manager = job_manager
|
||||
self.email_args = (attachment, aname, to, subject, text)
|
||||
self.email_sent_callback = callback
|
||||
self.log_path = None
|
||||
self._log_file = cStringIO.StringIO()
|
||||
self._log_file.write(self.description.encode('utf-8') + '\n')
|
||||
|
||||
@property
|
||||
def log_file(self):
|
||||
if self.log_path is not None:
|
||||
return open(self.log_path, 'rb')
|
||||
return cStringIO.StringIO(self._log_file.getvalue())
|
||||
|
||||
def start_work(self):
|
||||
self.start_time = time.time()
|
||||
self.job_manager.changed_queue.put(self)
|
||||
|
||||
def job_done(self):
|
||||
self.duration = time.time() - self.start_time
|
||||
self.percent = 1
|
||||
# Dump log onto disk
|
||||
lf = PersistentTemporaryFile('email_log')
|
||||
lf.write(self._log_file.getvalue())
|
||||
lf.close()
|
||||
self.log_path = lf.name
|
||||
self._log_file.close()
|
||||
self._log_file = None
|
||||
|
||||
self.job_manager.changed_queue.put(self)
|
||||
|
||||
def log_write(self, what):
|
||||
self._log_file.write(what)
|
||||
|
||||
# }}}
|
||||
|
||||
class Emailer(Thread): # {{{
|
||||
class Sendmail(object):
|
||||
|
||||
MAX_RETRIES = 1
|
||||
|
||||
def __init__(self, job_manager):
|
||||
Thread.__init__(self)
|
||||
self.daemon = True
|
||||
self.jobs = Queue()
|
||||
self.job_manager = job_manager
|
||||
self._run = True
|
||||
def __init__(self):
|
||||
self.calculate_rate_limit()
|
||||
|
||||
self.last_send_time = time.time() - self.rate_limit
|
||||
|
||||
def calculate_rate_limit(self):
|
||||
@ -87,70 +38,28 @@ class Emailer(Thread): # {{{
|
||||
'gmail.com' in rh or 'live.com' in rh):
|
||||
self.rate_limit = tweaks['public_smtp_relay_delay']
|
||||
|
||||
def stop(self):
|
||||
self._run = False
|
||||
self.jobs.put(None)
|
||||
def __call__(self, attachment, aname, to, subject, text, log=None,
|
||||
abort=None, notifications=None):
|
||||
|
||||
def run(self):
|
||||
while self._run:
|
||||
try:
|
||||
job = self.jobs.get()
|
||||
except:
|
||||
break
|
||||
if job is None or not self._run:
|
||||
break
|
||||
try_count = 0
|
||||
failed, exc = False, None
|
||||
job.start_work()
|
||||
if job.kill_on_start:
|
||||
job.log_write('Aborted\n')
|
||||
job.failed = failed
|
||||
job.killed = True
|
||||
job.job_done()
|
||||
continue
|
||||
|
||||
while try_count <= self.MAX_RETRIES:
|
||||
failed = False
|
||||
if try_count > 0:
|
||||
job.log_write('\nRetrying in %d seconds...\n' %
|
||||
log('\nRetrying in %d seconds...\n' %
|
||||
self.rate_limit)
|
||||
try:
|
||||
self.sendmail(job)
|
||||
break
|
||||
except Exception as e:
|
||||
if not self._run:
|
||||
self.sendmail(attachment, aname, to, subject, text, log)
|
||||
try_count = self.MAX_RETRIES
|
||||
log('Email successfully sent')
|
||||
except:
|
||||
if abort.is_set():
|
||||
return
|
||||
import traceback
|
||||
failed = True
|
||||
exc = e
|
||||
job.log_write('\nSending failed...\n')
|
||||
job.log_write(traceback.format_exc())
|
||||
if try_count == self.MAX_RETRIES:
|
||||
raise
|
||||
log.exception('\nSending failed...\n')
|
||||
|
||||
try_count += 1
|
||||
|
||||
if not self._run:
|
||||
break
|
||||
|
||||
job.failed = failed
|
||||
job.exception = exc
|
||||
job.job_done()
|
||||
try:
|
||||
job.email_sent_callback(job)
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
def send_mails(self, jobnames, callback, attachments, to_s, subjects,
|
||||
texts, attachment_names):
|
||||
for name, attachment, to, subject, text, aname in zip(jobnames,
|
||||
attachments, to_s, subjects, texts, attachment_names):
|
||||
description = _('Email %s to %s') % (name, to)
|
||||
job = EmailJob(callback, description, attachment, aname, to,
|
||||
subject, text, self.job_manager)
|
||||
self.job_manager.add_job(job)
|
||||
self.jobs.put(job)
|
||||
|
||||
def sendmail(self, job):
|
||||
def sendmail(self, attachment, aname, to, subject, text, log):
|
||||
while time.time() - self.last_send_time <= self.rate_limit:
|
||||
time.sleep(1)
|
||||
try:
|
||||
@ -158,7 +67,6 @@ class Emailer(Thread): # {{{
|
||||
from_ = opts.from_
|
||||
if not from_:
|
||||
from_ = 'calibre <calibre@'+socket.getfqdn()+'>'
|
||||
attachment, aname, to, subject, text = job.email_args
|
||||
msg = compose_mail(from_, to, text, subject, open(attachment, 'rb'),
|
||||
aname)
|
||||
efrom, eto = map(extract_email_address, (from_, to))
|
||||
@ -169,11 +77,24 @@ class Emailer(Thread): # {{{
|
||||
username=opts.relay_username,
|
||||
password=unhexlify(opts.relay_password), port=opts.relay_port,
|
||||
encryption=opts.encryption,
|
||||
debug_output=partial(print, file=job._log_file))
|
||||
debug_output=log.debug)
|
||||
finally:
|
||||
self.last_send_time = time.time()
|
||||
|
||||
def email_news(self, mi, remove, get_fmts, done):
|
||||
gui_sendmail = Sendmail()
|
||||
|
||||
|
||||
def send_mails(jobnames, callback, attachments, to_s, subjects,
|
||||
texts, attachment_names, job_manager):
|
||||
for name, attachment, to, subject, text, aname in zip(jobnames,
|
||||
attachments, to_s, subjects, texts, attachment_names):
|
||||
description = _('Email %s to %s') % (name, to)
|
||||
job = ThreadedJob('email', description, gui_sendmail, (attachment, aname, to,
|
||||
subject, text), {}, callback, killable=False)
|
||||
job_manager.run_threaded_job(job)
|
||||
|
||||
|
||||
def email_news(mi, remove, get_fmts, done, job_manager):
|
||||
opts = email_config().parse()
|
||||
accounts = [(account, [x.strip().lower() for x in x[0].split(',')])
|
||||
for account, x in opts.accounts.items() if x[1]]
|
||||
@ -197,20 +118,15 @@ class Emailer(Thread): # {{{
|
||||
do_remove = []
|
||||
if i == len(accounts) - 1:
|
||||
do_remove = remove
|
||||
self.send_mails(jobnames,
|
||||
send_mails(jobnames,
|
||||
Dispatcher(partial(done, remove=do_remove)),
|
||||
attachments, to_s, subjects, texts, attachment_names)
|
||||
attachments, to_s, subjects, texts, attachment_names,
|
||||
job_manager)
|
||||
sent_mails.append(to_s[0])
|
||||
return sent_mails
|
||||
|
||||
|
||||
# }}}
|
||||
|
||||
class EmailMixin(object): # {{{
|
||||
|
||||
def __init__(self):
|
||||
self.emailer = Emailer(self.job_manager)
|
||||
|
||||
def send_by_mail(self, to, fmts, delete_from_library, subject='', send_ids=None,
|
||||
do_auto_convert=True, specific_format=None):
|
||||
ids = [self.library_view.model().id(r) for r in self.library_view.selectionModel().selectedRows()] if send_ids is None else send_ids
|
||||
@ -246,8 +162,7 @@ class EmailMixin(object): # {{{
|
||||
components = get_components(subject, mi, id)
|
||||
if not components:
|
||||
components = [mi.title]
|
||||
subject = os.path.join(*components)
|
||||
subjects.append(subject)
|
||||
subjects.append(os.path.join(*components))
|
||||
a = authors_to_string(mi.authors if mi.authors else \
|
||||
[_('Unknown')])
|
||||
texts.append(_('Attached, you will find the e-book') + \
|
||||
@ -262,11 +177,10 @@ class EmailMixin(object): # {{{
|
||||
|
||||
to_s = list(repeat(to, len(attachments)))
|
||||
if attachments:
|
||||
if not self.emailer.is_alive():
|
||||
self.emailer.start()
|
||||
self.emailer.send_mails(jobnames,
|
||||
send_mails(jobnames,
|
||||
Dispatcher(partial(self.email_sent, remove=remove)),
|
||||
attachments, to_s, subjects, texts, attachment_names)
|
||||
attachments, to_s, subjects, texts, attachment_names,
|
||||
self.job_manager)
|
||||
self.status_bar.show_message(_('Sending email to')+' '+to, 3000)
|
||||
|
||||
auto = []
|
||||
@ -334,10 +248,8 @@ class EmailMixin(object): # {{{
|
||||
files, auto = self.library_view.model().\
|
||||
get_preferred_formats_from_ids([id_], fmts)
|
||||
return files
|
||||
if not self.emailer.is_alive():
|
||||
self.emailer.start()
|
||||
sent_mails = self.emailer.email_news(mi, remove,
|
||||
get_fmts, self.email_sent)
|
||||
sent_mails = email_news(mi, remove,
|
||||
get_fmts, self.email_sent, self.job_manager)
|
||||
if sent_mails:
|
||||
self.status_bar.show_message(_('Sent news to')+' '+\
|
||||
', '.join(sent_mails), 3000)
|
||||
|
@ -247,6 +247,11 @@ class LayoutMixin(object): # {{{
|
||||
for x in ('cb', 'tb', 'bd'):
|
||||
button = getattr(self, x+'_splitter').button
|
||||
button.setIconSize(QSize(24, 24))
|
||||
if isosx:
|
||||
button.setStyleSheet('''
|
||||
QToolButton { background: none; border:none; padding: 0px; }
|
||||
QToolButton:checked { background: rgba(0, 0, 0, 25%); }
|
||||
''')
|
||||
self.status_bar.addPermanentWidget(button)
|
||||
self.status_bar.addPermanentWidget(self.jobs_button)
|
||||
self.setStatusBar(self.status_bar)
|
||||
|
@ -8,14 +8,13 @@ Job management.
|
||||
'''
|
||||
|
||||
import re
|
||||
|
||||
from Queue import Empty, Queue
|
||||
|
||||
from PyQt4.Qt import QAbstractTableModel, QVariant, QModelIndex, Qt, \
|
||||
QTimer, pyqtSignal, QIcon, QDialog, QAbstractItemDelegate, QApplication, \
|
||||
QSize, QStyleOptionProgressBarV2, QString, QStyle, QToolTip, QFrame, \
|
||||
QHBoxLayout, QVBoxLayout, QSizePolicy, QLabel, QCoreApplication, QAction, \
|
||||
QByteArray
|
||||
from PyQt4.Qt import (QAbstractTableModel, QVariant, QModelIndex, Qt,
|
||||
QTimer, pyqtSignal, QIcon, QDialog, QAbstractItemDelegate, QApplication,
|
||||
QSize, QStyleOptionProgressBarV2, QString, QStyle, QToolTip, QFrame,
|
||||
QHBoxLayout, QVBoxLayout, QSizePolicy, QLabel, QCoreApplication, QAction,
|
||||
QByteArray)
|
||||
|
||||
from calibre.utils.ipc.server import Server
|
||||
from calibre.utils.ipc.job import ParallelJob
|
||||
@ -25,8 +24,9 @@ from calibre.gui2.dialogs.jobs_ui import Ui_JobsDialog
|
||||
from calibre import __appname__
|
||||
from calibre.gui2.dialogs.job_view_ui import Ui_Dialog
|
||||
from calibre.gui2.progress_indicator import ProgressIndicator
|
||||
from calibre.gui2.threaded_jobs import ThreadedJobServer, ThreadedJob
|
||||
|
||||
class JobManager(QAbstractTableModel):
|
||||
class JobManager(QAbstractTableModel): # {{{
|
||||
|
||||
job_added = pyqtSignal(int)
|
||||
job_done = pyqtSignal(int)
|
||||
@ -42,6 +42,7 @@ class JobManager(QAbstractTableModel):
|
||||
self.add_job = Dispatcher(self._add_job)
|
||||
self.server = Server(limit=int(config['worker_limit']/2.0),
|
||||
enforce_cpu_limit=config['enforce_cpu_limit'])
|
||||
self.threaded_server = ThreadedJobServer()
|
||||
self.changed_queue = Queue()
|
||||
|
||||
self.timer = QTimer(self)
|
||||
@ -146,12 +147,21 @@ class JobManager(QAbstractTableModel):
|
||||
jobs.add(self.server.changed_jobs_queue.get_nowait())
|
||||
except Empty:
|
||||
break
|
||||
|
||||
# Update device jobs
|
||||
while True:
|
||||
try:
|
||||
jobs.add(self.changed_queue.get_nowait())
|
||||
except Empty:
|
||||
break
|
||||
|
||||
# Update threaded jobs
|
||||
while True:
|
||||
try:
|
||||
jobs.add(self.threaded_server.changed_jobs.get_nowait())
|
||||
except Empty:
|
||||
break
|
||||
|
||||
if jobs:
|
||||
needs_reset = False
|
||||
for job in jobs:
|
||||
@ -207,11 +217,22 @@ class JobManager(QAbstractTableModel):
|
||||
self.server.add_job(job)
|
||||
return job
|
||||
|
||||
def run_threaded_job(self, job):
|
||||
self.add_job(job)
|
||||
self.threaded_server.add_job(job)
|
||||
|
||||
def launch_gui_app(self, name, args=[], kwargs={}, description=''):
|
||||
job = ParallelJob(name, description, lambda x: x,
|
||||
args=args, kwargs=kwargs)
|
||||
self.server.run_job(job, gui=True, redirect_output=False)
|
||||
|
||||
def _kill_job(self, job):
|
||||
if isinstance(job, ParallelJob):
|
||||
self.server.kill_job(job)
|
||||
elif isinstance(job, ThreadedJob):
|
||||
self.threaded_server.kill_job(job)
|
||||
else:
|
||||
job.kill_on_start = True
|
||||
|
||||
def kill_job(self, row, view):
|
||||
job = self.jobs[row]
|
||||
@ -221,29 +242,29 @@ class JobManager(QAbstractTableModel):
|
||||
if job.duration is not None:
|
||||
return error_dialog(view, _('Cannot kill job'),
|
||||
_('Job has already run')).exec_()
|
||||
if isinstance(job, ParallelJob):
|
||||
self.server.kill_job(job)
|
||||
else:
|
||||
job.kill_on_start = True
|
||||
if not getattr(job, 'killable', True):
|
||||
return error_dialog(view, _('Cannot kill job'),
|
||||
_('This job cannot be stopped'), show=True)
|
||||
self._kill_job(job)
|
||||
|
||||
def kill_all_jobs(self):
|
||||
for job in self.jobs:
|
||||
if isinstance(job, DeviceJob) or job.duration is not None:
|
||||
if (isinstance(job, DeviceJob) or job.duration is not None or
|
||||
not getattr(job, 'killable', True)):
|
||||
continue
|
||||
if isinstance(job, ParallelJob):
|
||||
self.server.kill_job(job)
|
||||
else:
|
||||
job.kill_on_start = True
|
||||
self._kill_job(job)
|
||||
|
||||
def terminate_all_jobs(self):
|
||||
self.server.killall()
|
||||
for job in self.jobs:
|
||||
if isinstance(job, DeviceJob) or job.duration is not None:
|
||||
if (isinstance(job, DeviceJob) or job.duration is not None or
|
||||
not getattr(job, 'killable', True)):
|
||||
continue
|
||||
if not isinstance(job, ParallelJob):
|
||||
job.kill_on_start = True
|
||||
|
||||
self._kill_job(job)
|
||||
# }}}
|
||||
|
||||
# Jobs UI {{{
|
||||
class ProgressBarDelegate(QAbstractItemDelegate):
|
||||
|
||||
def sizeHint(self, option, index):
|
||||
@ -269,6 +290,11 @@ class DetailView(QDialog, Ui_Dialog):
|
||||
self.setupUi(self)
|
||||
self.setWindowTitle(job.description)
|
||||
self.job = job
|
||||
self.html_view = hasattr(job, 'html_details')
|
||||
if self.html_view:
|
||||
self.log.setVisible(False)
|
||||
else:
|
||||
self.tb.setVisible(False)
|
||||
self.next_pos = 0
|
||||
self.update()
|
||||
self.timer = QTimer(self)
|
||||
@ -277,6 +303,13 @@ class DetailView(QDialog, Ui_Dialog):
|
||||
|
||||
|
||||
def update(self):
|
||||
if self.html_view:
|
||||
html = self.job.html_details
|
||||
if len(html) > self.next_pos:
|
||||
self.next_pos = len(html)
|
||||
self.tb.setHtml(
|
||||
'<pre style="font-family:monospace">%s</pre>'%html)
|
||||
else:
|
||||
f = self.job.log_file
|
||||
f.seek(self.next_pos)
|
||||
more = f.read()
|
||||
@ -441,3 +474,5 @@ class JobsDialog(QDialog, Ui_JobsDialog):
|
||||
def hide(self, *args):
|
||||
self.save_state()
|
||||
return QDialog.hide(self, *args)
|
||||
# }}}
|
||||
|
||||
|
@ -408,6 +408,7 @@ class ToolBar(BaseToolBar): # {{{
|
||||
self.d_widget.layout().addWidget(self.donate_button)
|
||||
if isosx:
|
||||
self.d_widget.setStyleSheet('QWidget, QToolButton {background-color: none; border: none; }')
|
||||
self.d_widget.layout().addWidget(QLabel(u'\u00a0'))
|
||||
bar.addWidget(self.d_widget)
|
||||
self.showing_donate = True
|
||||
elif what in self.gui.iactions:
|
||||
|
320
src/calibre/gui2/metadata/bulk_download2.py
Normal file
320
src/calibre/gui2/metadata/bulk_download2.py
Normal file
@ -0,0 +1,320 @@
|
||||
#!/usr/bin/env python
|
||||
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
|
||||
from __future__ import (unicode_literals, division, absolute_import,
|
||||
print_function)
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2011, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
|
||||
import os
|
||||
from functools import partial
|
||||
from itertools import izip
|
||||
|
||||
from PyQt4.Qt import (QIcon, QDialog, QVBoxLayout, QTextBrowser, QSize,
|
||||
QDialogButtonBox, QApplication, QTimer, QLabel, QProgressBar)
|
||||
|
||||
from calibre.gui2.dialogs.message_box import MessageBox
|
||||
from calibre.gui2.threaded_jobs import ThreadedJob
|
||||
from calibre.utils.icu import lower
|
||||
from calibre.ebooks.metadata import authors_to_string
|
||||
from calibre.gui2 import question_dialog, error_dialog
|
||||
from calibre.ebooks.metadata.sources.identify import identify, msprefs
|
||||
from calibre.ebooks.metadata.sources.covers import download_cover
|
||||
from calibre.ebooks.metadata.book.base import Metadata
|
||||
from calibre.customize.ui import metadata_plugins
|
||||
from calibre.ptempfile import PersistentTemporaryFile
|
||||
|
||||
def show_config(gui, parent):
|
||||
from calibre.gui2.preferences import show_config_widget
|
||||
show_config_widget('Sharing', 'Metadata download', parent=parent,
|
||||
gui=gui, never_shutdown=True)
|
||||
|
||||
def start_download(gui, ids, callback, identify, covers):
|
||||
q = MessageBox(MessageBox.QUESTION, _('Schedule download?'),
|
||||
'<p>'+_('The download of metadata for the <b>%d selected book(s)</b> will'
|
||||
' run in the background. Proceed?')%len(ids) +
|
||||
'<p>'+_('You can monitor the progress of the download '
|
||||
'by clicking the rotating spinner in the bottom right '
|
||||
'corner.') +
|
||||
'<p>'+_('When the download completes you will be asked for'
|
||||
' confirmation before calibre applies the downloaded metadata.'),
|
||||
show_copy_button=False, parent=gui)
|
||||
b = q.bb.addButton(_('Configure download'), q.bb.ActionRole)
|
||||
b.setIcon(QIcon(I('config.png')))
|
||||
b.clicked.connect(partial(show_config, gui, q))
|
||||
q.det_msg_toggle.setVisible(False)
|
||||
|
||||
ret = q.exec_()
|
||||
b.clicked.disconnect()
|
||||
if ret != q.Accepted:
|
||||
return
|
||||
|
||||
job = ThreadedJob('metadata bulk download',
|
||||
_('Download metadata for %d books')%len(ids),
|
||||
download, (ids, gui.current_db, identify, covers), {}, callback)
|
||||
gui.job_manager.run_threaded_job(job)
|
||||
gui.status_bar.show_message(_('Metadata download started'), 3000)
|
||||
|
||||
|
||||
class ViewLog(QDialog): # {{{
|
||||
|
||||
def __init__(self, html, parent=None):
|
||||
QDialog.__init__(self, parent)
|
||||
self.l = l = QVBoxLayout()
|
||||
self.setLayout(l)
|
||||
|
||||
self.tb = QTextBrowser(self)
|
||||
self.tb.setHtml('<pre style="font-family: monospace">%s</pre>' % html)
|
||||
l.addWidget(self.tb)
|
||||
|
||||
self.bb = QDialogButtonBox(QDialogButtonBox.Ok)
|
||||
self.bb.accepted.connect(self.accept)
|
||||
self.bb.rejected.connect(self.reject)
|
||||
self.copy_button = self.bb.addButton(_('Copy to clipboard'),
|
||||
self.bb.ActionRole)
|
||||
self.copy_button.setIcon(QIcon(I('edit-copy.png')))
|
||||
self.copy_button.clicked.connect(self.copy_to_clipboard)
|
||||
l.addWidget(self.bb)
|
||||
self.setModal(False)
|
||||
self.resize(QSize(700, 500))
|
||||
self.setWindowTitle(_('Download log'))
|
||||
self.setWindowIcon(QIcon(I('debug.png')))
|
||||
self.show()
|
||||
|
||||
def copy_to_clipboard(self):
|
||||
txt = self.tb.toPlainText()
|
||||
QApplication.clipboard().setText(txt)
|
||||
|
||||
_vl = None
|
||||
def view_log(job, parent):
|
||||
global _vl
|
||||
_vl = ViewLog(job.html_details, parent)
|
||||
|
||||
# }}}
|
||||
|
||||
class ApplyDialog(QDialog):
|
||||
|
||||
def __init__(self, id_map, gui):
|
||||
QDialog.__init__(self, gui)
|
||||
|
||||
self.l = l = QVBoxLayout()
|
||||
self.setLayout(l)
|
||||
l.addWidget(QLabel(_('Applying downloaded metadata to your library')))
|
||||
|
||||
self.pb = QProgressBar(self)
|
||||
l.addWidget(self.pb)
|
||||
self.pb.setMinimum(0)
|
||||
self.pb.setMaximum(len(id_map))
|
||||
|
||||
self.bb = QDialogButtonBox(QDialogButtonBox.Cancel)
|
||||
self.bb.rejected.connect(self.reject)
|
||||
self.bb.accepted.connect(self.accept)
|
||||
l.addWidget(self.bb)
|
||||
|
||||
self.gui = gui
|
||||
self.id_map = list(id_map.iteritems())
|
||||
self.current_idx = 0
|
||||
|
||||
self.failures = []
|
||||
self.ids = []
|
||||
self.canceled = False
|
||||
|
||||
QTimer.singleShot(20, self.do_one)
|
||||
|
||||
def do_one(self):
|
||||
if self.canceled:
|
||||
return
|
||||
i, mi = self.id_map[self.current_idx]
|
||||
db = self.gui.current_db
|
||||
try:
|
||||
set_title = not mi.is_null('title')
|
||||
set_authors = not mi.is_null('authors')
|
||||
db.set_metadata(i, mi, commit=False, set_title=set_title,
|
||||
set_authors=set_authors)
|
||||
self.ids.append(i)
|
||||
except:
|
||||
import traceback
|
||||
self.failures.append((i, traceback.format_exc()))
|
||||
|
||||
try:
|
||||
if mi.cover:
|
||||
os.remove(mi.cover)
|
||||
except:
|
||||
pass
|
||||
|
||||
self.pb.setValue(self.pb.value()+1)
|
||||
|
||||
if self.current_idx >= len(self.id_map) - 1:
|
||||
self.finalize()
|
||||
else:
|
||||
self.current_idx += 1
|
||||
QTimer.singleShot(20, self.do_one)
|
||||
|
||||
def reject(self):
|
||||
self.canceled = True
|
||||
QDialog.reject(self)
|
||||
|
||||
def finalize(self):
|
||||
if self.canceled:
|
||||
return
|
||||
if self.failures:
|
||||
msg = []
|
||||
db = self.gui.current_db
|
||||
for i, tb in self.failures:
|
||||
title = db.title(i, index_is_id=True)
|
||||
authors = db.authors(i, index_is_id=True)
|
||||
if authors:
|
||||
authors = [x.replace('|', ',') for x in authors.split(',')]
|
||||
title += ' - ' + authors_to_string(authors)
|
||||
msg.append(title+'\n\n'+tb+'\n'+('*'*80))
|
||||
|
||||
error_dialog(self, _('Some failures'),
|
||||
_('Failed to apply updated metadata for some books'
|
||||
' in your library. Click "Show Details" to see '
|
||||
'details.'), det_msg='\n\n'.join(msg), show=True)
|
||||
self.accept()
|
||||
if self.ids:
|
||||
cr = self.gui.library_view.currentIndex().row()
|
||||
self.gui.library_view.model().refresh_ids(
|
||||
self.ids, cr)
|
||||
if self.gui.cover_flow:
|
||||
self.gui.cover_flow.dataChanged()
|
||||
|
||||
_amd = None
|
||||
def apply_metadata(job, gui, q, result):
|
||||
global _amd
|
||||
q.vlb.clicked.disconnect()
|
||||
q.finished.disconnect()
|
||||
if result != q.Accepted:
|
||||
return
|
||||
id_map, failed_ids, failed_covers, title_map = job.result
|
||||
id_map = dict([(k, v) for k, v in id_map.iteritems() if k not in
|
||||
failed_ids])
|
||||
if not id_map:
|
||||
return
|
||||
|
||||
modified = set()
|
||||
db = gui.current_db
|
||||
|
||||
for i, mi in id_map.iteritems():
|
||||
lm = db.metadata_last_modified(i, index_is_id=True)
|
||||
if lm > mi.last_modified:
|
||||
title = db.title(i, index_is_id=True)
|
||||
authors = db.authors(i, index_is_id=True)
|
||||
if authors:
|
||||
authors = [x.replace('|', ',') for x in authors.split(',')]
|
||||
title += ' - ' + authors_to_string(authors)
|
||||
modified.add(title)
|
||||
|
||||
if modified:
|
||||
modified = sorted(modified, key=lower)
|
||||
if not question_dialog(gui, _('Some books changed'), '<p>'+
|
||||
_('The metadata for some books in your library has'
|
||||
' changed since you started the download. If you'
|
||||
' proceed, some of those changes may be overwritten. '
|
||||
'Click "Show details" to see the list of changed books. '
|
||||
'Do you want to proceed?'), det_msg='\n'.join(modified)):
|
||||
return
|
||||
|
||||
_amd = ApplyDialog(id_map, gui)
|
||||
_amd.exec_()
|
||||
|
||||
def proceed(gui, job):
|
||||
gui.status_bar.show_message(_('Metadata download completed'), 3000)
|
||||
id_map, failed_ids, failed_covers, title_map = job.result
|
||||
fmsg = det_msg = ''
|
||||
if failed_ids or failed_covers:
|
||||
fmsg = '<p>'+_('Could not download metadata and/or covers for %d of the books. Click'
|
||||
' "Show details" to see which books.')%len(failed_ids)
|
||||
det_msg = []
|
||||
for i in failed_ids | failed_covers:
|
||||
title = title_map[i]
|
||||
if i in failed_ids:
|
||||
title += (' ' + _('(Failed metadata)'))
|
||||
if i in failed_covers:
|
||||
title += (' ' + _('(Failed cover)'))
|
||||
det_msg.append(title)
|
||||
msg = '<p>' + _('Finished downloading metadata for <b>%d book(s)</b>. '
|
||||
'Proceed with updating the metadata in your library?')%len(id_map)
|
||||
q = MessageBox(MessageBox.QUESTION, _('Download complete'),
|
||||
msg + fmsg, det_msg='\n'.join(det_msg), show_copy_button=bool(failed_ids),
|
||||
parent=gui)
|
||||
q.vlb = q.bb.addButton(_('View log'), q.bb.ActionRole)
|
||||
q.vlb.setIcon(QIcon(I('debug.png')))
|
||||
q.vlb.clicked.connect(partial(view_log, job, q))
|
||||
q.det_msg_toggle.setVisible(bool(failed_ids | failed_covers))
|
||||
q.setModal(False)
|
||||
q.show()
|
||||
q.finished.connect(partial(apply_metadata, job, gui, q))
|
||||
|
||||
def merge_result(oldmi, newmi):
|
||||
dummy = Metadata(_('Unknown'))
|
||||
for f in msprefs['ignore_fields']:
|
||||
setattr(newmi, f, getattr(dummy, f))
|
||||
fields = set()
|
||||
for plugin in metadata_plugins(['identify']):
|
||||
fields |= plugin.touched_fields
|
||||
|
||||
for f in fields:
|
||||
# Optimize so that set_metadata does not have to do extra work later
|
||||
if not f.startswith('identifier:'):
|
||||
if (not newmi.is_null(f) and getattr(newmi, f) == getattr(oldmi, f)):
|
||||
setattr(newmi, f, getattr(dummy, f))
|
||||
|
||||
newmi.last_modified = oldmi.last_modified
|
||||
|
||||
return newmi
|
||||
|
||||
def download(ids, db, do_identify, covers,
|
||||
log=None, abort=None, notifications=None):
|
||||
ids = list(ids)
|
||||
metadata = [db.get_metadata(i, index_is_id=True, get_user_categories=False)
|
||||
for i in ids]
|
||||
failed_ids = set()
|
||||
failed_covers = set()
|
||||
title_map = {}
|
||||
ans = {}
|
||||
count = 0
|
||||
for i, mi in izip(ids, metadata):
|
||||
if abort.is_set():
|
||||
log.error('Aborting...')
|
||||
break
|
||||
title, authors, identifiers = mi.title, mi.authors, mi.identifiers
|
||||
title_map[i] = title
|
||||
if do_identify:
|
||||
results = []
|
||||
try:
|
||||
results = identify(log, abort, title=title, authors=authors,
|
||||
identifiers=identifiers)
|
||||
except:
|
||||
pass
|
||||
if results:
|
||||
mi = merge_result(mi, results[0])
|
||||
identifiers = mi.identifiers
|
||||
if not mi.is_null('rating'):
|
||||
# set_metadata expects a rating out of 10
|
||||
mi.rating *= 2
|
||||
else:
|
||||
log.error('Failed to download metadata for', title)
|
||||
failed_ids.add(i)
|
||||
# We don't want set_metadata operating on anything but covers
|
||||
mi = merge_result(mi, mi)
|
||||
if covers:
|
||||
cdata = download_cover(log, title=title, authors=authors,
|
||||
identifiers=identifiers)
|
||||
if cdata is not None:
|
||||
with PersistentTemporaryFile('.jpg', 'downloaded-cover-') as f:
|
||||
f.write(cdata[-1])
|
||||
mi.cover = f.name
|
||||
else:
|
||||
failed_covers.add(i)
|
||||
ans[i] = mi
|
||||
count += 1
|
||||
notifications.put((count/len(ids),
|
||||
_('Downloaded %d of %d')%(count, len(ids))))
|
||||
log('Download complete, with %d failures'%len(failed_ids))
|
||||
return (ans, failed_ids, failed_covers, title_map)
|
||||
|
||||
|
||||
|
@ -56,7 +56,12 @@ class ConfigWidget(QWidget):
|
||||
self.setLayout(l)
|
||||
|
||||
self.gb = QGroupBox(_('Downloaded metadata fields'), self)
|
||||
l.addWidget(self.gb, 0, 0, 1, 2)
|
||||
if plugin.config_help_message:
|
||||
self.pchm = QLabel(plugin.config_help_message)
|
||||
self.pchm.setWordWrap(True)
|
||||
self.pchm.setOpenExternalLinks(True)
|
||||
l.addWidget(self.pchm, 0, 0, 1, 2)
|
||||
l.addWidget(self.gb, l.rowCount(), 0, 1, 2)
|
||||
self.gb.l = QGridLayout()
|
||||
self.gb.setLayout(self.gb.l)
|
||||
self.fields_view = v = QListView(self)
|
||||
@ -81,7 +86,7 @@ class ConfigWidget(QWidget):
|
||||
widget.setValue(val)
|
||||
elif opt.type == 'string':
|
||||
widget = QLineEdit(self)
|
||||
widget.setText(val)
|
||||
widget.setText(val if val else '')
|
||||
elif opt.type == 'bool':
|
||||
widget = QCheckBox(opt.label, self)
|
||||
widget.setChecked(bool(val))
|
||||
|
@ -612,6 +612,10 @@ class MetadataSingleDialogAlt1(MetadataSingleDialogBase): # {{{
|
||||
|
||||
self.button_box.addButton(self.fetch_metadata_button,
|
||||
QDialogButtonBox.ActionRole)
|
||||
self.config_metadata_button.setToolButtonStyle(Qt.ToolButtonTextOnly)
|
||||
self.config_metadata_button.setText(_('Configure metadata downloading'))
|
||||
self.button_box.addButton(self.config_metadata_button,
|
||||
QDialogButtonBox.ActionRole)
|
||||
sto(self.button_box, self.title)
|
||||
|
||||
def create_row(row, widget, tab_to, button=None, icon=None, span=1):
|
||||
|
@ -84,11 +84,11 @@ class EmailAccounts(QAbstractTableModel): # {{{
|
||||
account = self.account_order[row]
|
||||
if col == 3:
|
||||
self.accounts[account][1] ^= True
|
||||
if col == 2:
|
||||
elif col == 2:
|
||||
self.subjects[account] = unicode(value.toString())
|
||||
elif col == 1:
|
||||
self.accounts[account][0] = unicode(value.toString()).upper()
|
||||
else:
|
||||
elif col == 0:
|
||||
na = unicode(value.toString())
|
||||
from email.utils import parseaddr
|
||||
addr = parseaddr(na)[-1]
|
||||
@ -100,7 +100,7 @@ class EmailAccounts(QAbstractTableModel): # {{{
|
||||
self.accounts[na][0] = 'AZW, MOBI, TPZ, PRC, AZW1'
|
||||
|
||||
self.dataChanged.emit(
|
||||
self.index(index.row(), 0), self.index(index.row(), 2))
|
||||
self.index(index.row(), 0), self.index(index.row(), 3))
|
||||
return True
|
||||
|
||||
def make_default(self, index):
|
||||
|
@ -10,7 +10,7 @@ __docformat__ = 'restructuredtext en'
|
||||
from operator import attrgetter
|
||||
|
||||
from PyQt4.Qt import (QAbstractTableModel, Qt, QAbstractListModel, QWidget,
|
||||
pyqtSignal, QVBoxLayout, QDialogButtonBox, QFrame, QLabel)
|
||||
pyqtSignal, QVBoxLayout, QDialogButtonBox, QFrame, QLabel, QIcon)
|
||||
|
||||
from calibre.gui2.preferences import ConfigWidgetBase, test_widget
|
||||
from calibre.gui2.preferences.metadata_sources_ui import Ui_Form
|
||||
@ -67,6 +67,13 @@ class SourcesModel(QAbstractTableModel): # {{{
|
||||
return self.enabled_overrides.get(plugin, orig)
|
||||
elif role == Qt.UserRole:
|
||||
return plugin
|
||||
elif (role == Qt.DecorationRole and col == 0 and not
|
||||
plugin.is_configured()):
|
||||
return QIcon(I('list_remove.png'))
|
||||
elif role == Qt.ToolTipRole:
|
||||
if plugin.is_configured():
|
||||
return _('This source is configured and ready to go')
|
||||
return _('This source needs configuration')
|
||||
return NONE
|
||||
|
||||
def setData(self, index, val, role):
|
||||
|
@ -48,6 +48,16 @@
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QLabel" name="label_5">
|
||||
<property name="text">
|
||||
<string>Sources with a red X next to their names must be configured before they will be used. </string>
|
||||
</property>
|
||||
<property name="wordWrap">
|
||||
<bool>true</bool>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QPushButton" name="configure_plugin_button">
|
||||
<property name="text">
|
||||
|
@ -436,13 +436,14 @@ class SavedSearchBoxMixin(object): # {{{
|
||||
b = getattr(self, x+'_search_button')
|
||||
b.setStatusTip(b.toolTip())
|
||||
|
||||
def saved_searches_changed(self, set_restriction=None):
|
||||
def saved_searches_changed(self, set_restriction=None, recount=True):
|
||||
p = sorted(saved_searches().names(), key=sort_key)
|
||||
if set_restriction is None:
|
||||
set_restriction = unicode(self.search_restriction.currentText())
|
||||
# rebuild the restrictions combobox using current saved searches
|
||||
self.search_restriction.clear()
|
||||
self.search_restriction.addItem('')
|
||||
if recount:
|
||||
self.tags_view.recount()
|
||||
for s in p:
|
||||
self.search_restriction.addItem(s)
|
||||
|
@ -25,6 +25,7 @@ class SearchRestrictionMixin(object):
|
||||
r = self.search_restriction.findText(name)
|
||||
if r < 0:
|
||||
r = 0
|
||||
if r != self.search_restriction.currentIndex():
|
||||
self.search_restriction.setCurrentIndex(r)
|
||||
self.apply_search_restriction(r)
|
||||
|
||||
|
245
src/calibre/gui2/threaded_jobs.py
Normal file
245
src/calibre/gui2/threaded_jobs.py
Normal file
@ -0,0 +1,245 @@
|
||||
#!/usr/bin/env python
|
||||
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
|
||||
from __future__ import (unicode_literals, division, absolute_import,
|
||||
print_function)
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2011, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
|
||||
import os, time, tempfile, json
|
||||
from threading import Thread, RLock, Event
|
||||
from Queue import Queue
|
||||
|
||||
from calibre.utils.ipc.job import BaseJob
|
||||
from calibre.utils.logging import GUILog
|
||||
from calibre.ptempfile import base_dir
|
||||
|
||||
class ThreadedJob(BaseJob):
|
||||
|
||||
def __init__(self,
|
||||
type_, description,
|
||||
|
||||
func, args, kwargs,
|
||||
|
||||
callback,
|
||||
|
||||
max_concurrent_count=1,
|
||||
killable=True,
|
||||
log=None):
|
||||
'''
|
||||
A job that is run in its own thread in the calibre main process
|
||||
|
||||
:param type_: The type of this job (a string). The type is used in
|
||||
conjunction with max_concurrent_count to prevent too many jobs of the
|
||||
same type from running
|
||||
|
||||
:param description: A user viewable job description
|
||||
|
||||
:func: The function that actually does the work. This function *must*
|
||||
accept at least three keyword arguments: abort, log and notifications. abort is
|
||||
An Event object. func should periodically check abort.is_set(0 and if
|
||||
it is True, it should stop processing as soon as possible. notifications
|
||||
is a Queue. func should put progress notifications into it in the form
|
||||
of a tuple (frac, msg). frac is a number between 0 and 1 indicating
|
||||
progress and msg is a string describing the progress. log is a Log
|
||||
object which func should use for all debugging output. func should
|
||||
raise an Exception to indicate failure. This exception is stored in
|
||||
job.exception and can thus be used to pass arbitrary information to
|
||||
callback.
|
||||
|
||||
:param args,kwargs: These are passed to func when it is called
|
||||
|
||||
:param callback: A callable that is called on completion of this job.
|
||||
Note that it is not called if the user kills the job. Check job.failed
|
||||
to see if the job succeeded or not. And use job.log to get the job log.
|
||||
|
||||
:param killable: If False the GUI wont let the user kill this job
|
||||
|
||||
:param log: Must be a subclass of GUILog or None. If None a default
|
||||
GUILog is created.
|
||||
'''
|
||||
BaseJob.__init__(self, description)
|
||||
|
||||
self.type = type_
|
||||
self.max_concurrent_count = max_concurrent_count
|
||||
self.killable = killable
|
||||
self.callback = callback
|
||||
self.abort = Event()
|
||||
self.exception = None
|
||||
|
||||
kwargs['notifications'] = self.notifications
|
||||
kwargs['abort'] = self.abort
|
||||
self.log = GUILog() if log is None else log
|
||||
kwargs['log'] = self.log
|
||||
|
||||
self.func, self.args, self.kwargs = func, args, kwargs
|
||||
self.consolidated_log = None
|
||||
|
||||
def start_work(self):
|
||||
self.start_time = time.time()
|
||||
self.log('Starting job:', self.description)
|
||||
try:
|
||||
self.result = self.func(*self.args, **self.kwargs)
|
||||
except Exception as e:
|
||||
self.exception = e
|
||||
self.failed = True
|
||||
self.log.exception('Job: "%s" failed with error:'%self.description)
|
||||
self.log.debug('Called with args:', self.args, self.kwargs)
|
||||
|
||||
self.duration = time.time() - self.start_time
|
||||
try:
|
||||
self.callback(self)
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
self._cleanup()
|
||||
|
||||
def _cleanup(self):
|
||||
|
||||
try:
|
||||
self.consolidate_log()
|
||||
except:
|
||||
self.log.exception('Log consolidation failed')
|
||||
|
||||
# No need to keep references to these around anymore
|
||||
self.func = self.args = self.kwargs = self.notifications = None
|
||||
# We can't delete self.callback as it might be a Dispatch object and if
|
||||
# it is garbage collected it won't work
|
||||
|
||||
def kill(self):
|
||||
if self.start_time is None:
|
||||
self.start_time = time.time()
|
||||
self.duration = 0.0001
|
||||
else:
|
||||
self.duration = time.time() - self.start_time()
|
||||
self.abort.set()
|
||||
|
||||
self.log('Aborted job:', self.description)
|
||||
self.killed = True
|
||||
self.failed = True
|
||||
self._cleanup()
|
||||
|
||||
def consolidate_log(self):
|
||||
logs = [self.log.html, self.log.plain_text]
|
||||
bdir = base_dir()
|
||||
log_dir = os.path.join(bdir, 'threaded_job_logs')
|
||||
if not os.path.exists(log_dir):
|
||||
os.makedirs(log_dir)
|
||||
fd, path = tempfile.mkstemp(suffix='.json', prefix='log-', dir=log_dir)
|
||||
with os.fdopen(fd, 'wb') as f:
|
||||
f.write(json.dumps(logs, ensure_ascii=False,
|
||||
indent=2).encode('utf-8'))
|
||||
self.consolidated_log = path
|
||||
self.log = None
|
||||
|
||||
def read_consolidated_log(self):
|
||||
with open(self.consolidated_log, 'rb') as f:
|
||||
return json.loads(f.read().decode('utf-8'))
|
||||
|
||||
@property
|
||||
def details(self):
|
||||
if self.consolidated_log is None:
|
||||
return self.log.plain_text
|
||||
return self.read_consolidated_log()[1]
|
||||
|
||||
@property
|
||||
def html_details(self):
|
||||
if self.consolidated_log is None:
|
||||
return self.log.html
|
||||
return self.read_consolidated_log()[0]
|
||||
|
||||
class ThreadedJobWorker(Thread):
|
||||
|
||||
def __init__(self, job):
|
||||
Thread.__init__(self)
|
||||
self.daemon = True
|
||||
self.job = job
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
self.job.start_work()
|
||||
except:
|
||||
import traceback
|
||||
from calibre import prints
|
||||
prints('Job had unhandled exception:', self.job.description)
|
||||
traceback.print_exc()
|
||||
|
||||
class ThreadedJobServer(Thread):
|
||||
|
||||
def __init__(self):
|
||||
Thread.__init__(self)
|
||||
self.daemon = True
|
||||
self.lock = RLock()
|
||||
|
||||
self.queued_jobs = []
|
||||
self.running_jobs = set()
|
||||
self.changed_jobs = Queue()
|
||||
self.keep_going = True
|
||||
|
||||
def close(self):
|
||||
self.keep_going = False
|
||||
|
||||
def add_job(self, job):
|
||||
with self.lock:
|
||||
self.queued_jobs.append(job)
|
||||
|
||||
if not self.is_alive():
|
||||
self.start()
|
||||
|
||||
def run(self):
|
||||
while self.keep_going:
|
||||
try:
|
||||
self.run_once()
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
time.sleep(0.1)
|
||||
|
||||
def run_once(self):
|
||||
with self.lock:
|
||||
remove = set()
|
||||
for worker in self.running_jobs:
|
||||
if worker.is_alive():
|
||||
# Get progress notifications
|
||||
if worker.job.consume_notifications():
|
||||
self.changed_jobs.put(worker.job)
|
||||
else:
|
||||
remove.add(worker)
|
||||
self.changed_jobs.put(worker.job)
|
||||
|
||||
for worker in remove:
|
||||
self.running_jobs.remove(worker)
|
||||
|
||||
jobs = self.get_startable_jobs()
|
||||
for job in jobs:
|
||||
w = ThreadedJobWorker(job)
|
||||
w.start()
|
||||
self.running_jobs.add(w)
|
||||
self.changed_jobs.put(job)
|
||||
self.queued_jobs.remove(job)
|
||||
|
||||
def kill_job(self, job):
|
||||
with self.lock:
|
||||
if job in self.queued_jobs:
|
||||
self.queued_jobs.remove(job)
|
||||
elif job in self.running_jobs:
|
||||
self.running_jobs.remove(job)
|
||||
job.kill()
|
||||
self.changed_jobs.put(job)
|
||||
|
||||
def running_jobs_of_type(self, type_):
|
||||
return len([w for w in self.running_jobs if w.job.type == type_])
|
||||
|
||||
def get_startable_jobs(self):
|
||||
queued_types = []
|
||||
ans = []
|
||||
for job in self.queued_jobs:
|
||||
num = self.running_jobs_of_type(job.type)
|
||||
num += queued_types.count(job.type)
|
||||
if num < job.max_concurrent_count:
|
||||
queued_types.append(job.type)
|
||||
ans.append(job)
|
||||
return ans
|
||||
|
||||
|
@ -446,12 +446,13 @@ class Main(MainWindow, MainWindowMixin, DeviceMixin, EmailMixin, # {{{
|
||||
self.search.clear()
|
||||
self.saved_search.clear()
|
||||
self.book_details.reset_info()
|
||||
self.library_view.model().count_changed()
|
||||
prefs['library_path'] = self.library_path
|
||||
#self.library_view.model().count_changed()
|
||||
db = self.library_view.model().db
|
||||
self.iactions['Choose Library'].count_changed(db.count())
|
||||
self.set_window_title()
|
||||
self.apply_named_search_restriction('') # reset restriction to null
|
||||
self.saved_searches_changed() # reload the search restrictions combo box
|
||||
self.saved_searches_changed(recount=False) # reload the search restrictions combo box
|
||||
self.apply_named_search_restriction(db.prefs['gui_restriction'])
|
||||
for action in self.iactions.values():
|
||||
action.library_changed(db)
|
||||
@ -607,6 +608,7 @@ class Main(MainWindow, MainWindowMixin, DeviceMixin, EmailMixin, # {{{
|
||||
self.update_checker.terminate()
|
||||
self.listener.close()
|
||||
self.job_manager.server.close()
|
||||
self.job_manager.threaded_server.close()
|
||||
while self.spare_servers:
|
||||
self.spare_servers.pop().close()
|
||||
self.device_manager.keep_going = False
|
||||
@ -615,8 +617,6 @@ class Main(MainWindow, MainWindowMixin, DeviceMixin, EmailMixin, # {{{
|
||||
mb.stop()
|
||||
|
||||
self.hide_windows()
|
||||
if self.emailer.is_alive():
|
||||
self.emailer.stop()
|
||||
try:
|
||||
try:
|
||||
if self.content_server is not None:
|
||||
|
@ -191,7 +191,8 @@ class CacheRow(list): # {{{
|
||||
if is_comp:
|
||||
id = list.__getitem__(self, 0)
|
||||
self._must_do = False
|
||||
mi = self.db.get_metadata(id, index_is_id=True)
|
||||
mi = self.db.get_metadata(id, index_is_id=True,
|
||||
get_user_categories=False)
|
||||
for c in self._composites:
|
||||
self[c] = mi.get(self._composites[c])
|
||||
return list.__getitem__(self, col)
|
||||
@ -390,21 +391,38 @@ class ResultCache(SearchQueryParser): # {{{
|
||||
def build_numeric_relop_dict(self):
|
||||
self.numeric_search_relops = {
|
||||
'=':[1, lambda r, q: r == q],
|
||||
'>':[1, lambda r, q: r > q],
|
||||
'<':[1, lambda r, q: r < q],
|
||||
'>':[1, lambda r, q: r is not None and r > q],
|
||||
'<':[1, lambda r, q: r is not None and r < q],
|
||||
'!=':[2, lambda r, q: r != q],
|
||||
'>=':[2, lambda r, q: r >= q],
|
||||
'<=':[2, lambda r, q: r <= q]
|
||||
'>=':[2, lambda r, q: r is not None and r >= q],
|
||||
'<=':[2, lambda r, q: r is not None and r <= q]
|
||||
}
|
||||
|
||||
def get_numeric_matches(self, location, query, candidates, val_func = None):
|
||||
matches = set([])
|
||||
if len(query) == 0:
|
||||
return matches
|
||||
|
||||
if val_func is None:
|
||||
loc = self.field_metadata[location]['rec_index']
|
||||
val_func = lambda item, loc=loc: item[loc]
|
||||
dt = self.field_metadata[location]['datatype']
|
||||
|
||||
q = ''
|
||||
val_func = lambda item, loc=loc: item[loc]
|
||||
cast = adjust = lambda x: x
|
||||
|
||||
if query == 'false':
|
||||
query = '0'
|
||||
if dt == 'rating':
|
||||
relop = lambda x,y: not bool(x)
|
||||
else:
|
||||
relop = lambda x,y: x is None
|
||||
elif query == 'true':
|
||||
query = '!=0'
|
||||
if dt == 'rating':
|
||||
relop = lambda x,y: bool(x)
|
||||
else:
|
||||
relop = lambda x,y: x is not None
|
||||
else:
|
||||
relop = None
|
||||
for k in self.numeric_search_relops.keys():
|
||||
if query.startswith(k):
|
||||
@ -413,23 +431,15 @@ class ResultCache(SearchQueryParser): # {{{
|
||||
if relop is None:
|
||||
(p, relop) = self.numeric_search_relops['=']
|
||||
|
||||
if val_func is None:
|
||||
loc = self.field_metadata[location]['rec_index']
|
||||
val_func = lambda item, loc=loc: item[loc]
|
||||
|
||||
dt = self.field_metadata[location]['datatype']
|
||||
if dt == 'int':
|
||||
cast = (lambda x: int (x))
|
||||
adjust = lambda x: x
|
||||
cast = lambda x: int (x)
|
||||
elif dt == 'rating':
|
||||
cast = (lambda x: int (x))
|
||||
cast = lambda x: 0 if x is None else int (x)
|
||||
adjust = lambda x: x/2
|
||||
elif dt in ('float', 'composite'):
|
||||
cast = lambda x : float (x)
|
||||
adjust = lambda x: x
|
||||
else: # count operation
|
||||
cast = (lambda x: int (x))
|
||||
adjust = lambda x: x
|
||||
|
||||
if len(query) > 1:
|
||||
mult = query[-1:].lower()
|
||||
@ -441,7 +451,8 @@ class ResultCache(SearchQueryParser): # {{{
|
||||
try:
|
||||
q = cast(query) * mult
|
||||
except:
|
||||
return matches
|
||||
raise ParseException(query, len(query),
|
||||
'Non-numeric value in query', self)
|
||||
|
||||
for id_ in candidates:
|
||||
item = self._data[id_]
|
||||
@ -450,10 +461,8 @@ class ResultCache(SearchQueryParser): # {{{
|
||||
try:
|
||||
v = cast(val_func(item))
|
||||
except:
|
||||
v = 0
|
||||
if not v:
|
||||
v = 0
|
||||
else:
|
||||
v = None
|
||||
if v:
|
||||
v = adjust(v)
|
||||
if relop(v, q):
|
||||
matches.add(item[0])
|
||||
@ -583,8 +592,7 @@ class ResultCache(SearchQueryParser): # {{{
|
||||
candidates = self.universal_set()
|
||||
if len(candidates) == 0:
|
||||
return matches
|
||||
if location not in self.all_search_locations:
|
||||
return matches
|
||||
self.test_location_is_valid(location, query)
|
||||
|
||||
if len(location) > 2 and location.startswith('@') and \
|
||||
location[1:] in self.db_prefs['grouped_search_terms']:
|
||||
|
@ -823,7 +823,8 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
||||
pass
|
||||
return (path, mi, sequence)
|
||||
|
||||
def get_metadata(self, idx, index_is_id=False, get_cover=False):
|
||||
def get_metadata(self, idx, index_is_id=False, get_cover=False,
|
||||
get_user_categories=True):
|
||||
'''
|
||||
Convenience method to return metadata as a :class:`Metadata` object.
|
||||
Note that the list of formats is not verified.
|
||||
@ -853,6 +854,7 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
||||
mi.uuid = row[fm['uuid']]
|
||||
mi.title_sort = row[fm['sort']]
|
||||
mi.last_modified = row[fm['last_modified']]
|
||||
mi.size = row[fm['size']]
|
||||
formats = row[fm['formats']]
|
||||
if not formats:
|
||||
formats = None
|
||||
@ -882,6 +884,7 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
||||
|
||||
user_cats = self.prefs['user_categories']
|
||||
user_cat_vals = {}
|
||||
if get_user_categories:
|
||||
for ucat in user_cats:
|
||||
res = []
|
||||
for name,cat,ign in user_cats[ucat]:
|
||||
|
@ -22,6 +22,8 @@ First start the |app| content server as shown below::
|
||||
|
||||
calibre-server --url-prefix /calibre --port 8080
|
||||
|
||||
The key parameter here is ``--url-prefix /calibre``. This causes the content server to serve all URLs prefixed by calibre. To see this in action, visit ``http://localhost:8080/calibre`` in your browser. You should see the normal content server website, but now it will run under /calibre.
|
||||
|
||||
Now suppose you are using Apache as your main server. First enable the proxy modules in apache, by adding the following to :file:`httpd.conf`::
|
||||
|
||||
LoadModule proxy_module modules/mod_proxy.so
|
||||
|
@ -75,12 +75,20 @@ class BaseJob(object):
|
||||
self._run_state = self.RUNNING
|
||||
self._status_text = _('Working...')
|
||||
|
||||
while consume_notifications:
|
||||
if consume_notifications:
|
||||
return self.consume_notifications()
|
||||
return False
|
||||
|
||||
def consume_notifications(self):
|
||||
got_notification = False
|
||||
while self.notifications is not None:
|
||||
try:
|
||||
self.percent, self._message = self.notifications.get_nowait()
|
||||
self.percent *= 100.
|
||||
got_notification = True
|
||||
except Empty:
|
||||
break
|
||||
return got_notification
|
||||
|
||||
@property
|
||||
def status_text(self):
|
||||
|
@ -20,7 +20,7 @@ import sys, string, operator
|
||||
|
||||
from calibre.utils.pyparsing import CaselessKeyword, Group, Forward, \
|
||||
CharsNotIn, Suppress, OneOrMore, MatchFirst, CaselessLiteral, \
|
||||
Optional, NoMatch, ParseException, QuotedString
|
||||
Optional, NoMatch, ParseException, QuotedString, Word
|
||||
from calibre.constants import preferred_encoding
|
||||
from calibre.utils.icu import sort_key
|
||||
|
||||
@ -128,12 +128,8 @@ class SearchQueryParser(object):
|
||||
self._tests_failed = False
|
||||
self.optimize = optimize
|
||||
# Define a token
|
||||
standard_locations = map(lambda x : CaselessLiteral(x)+Suppress(':'),
|
||||
locations)
|
||||
location = NoMatch()
|
||||
for l in standard_locations:
|
||||
location |= l
|
||||
location = Optional(location, default='all')
|
||||
self.standard_locations = locations
|
||||
location = Optional(Word(string.ascii_letters+'#')+Suppress(':'), default='all')
|
||||
word_query = CharsNotIn(string.whitespace + '()')
|
||||
#quoted_query = Suppress('"')+CharsNotIn('"')+Suppress('"')
|
||||
quoted_query = QuotedString('"', escChar='\\')
|
||||
@ -250,7 +246,14 @@ class SearchQueryParser(object):
|
||||
raise ParseException(query, len(query), 'undefined saved search', self)
|
||||
return self._get_matches(location, query, candidates)
|
||||
|
||||
def test_location_is_valid(self, location, query):
|
||||
if location not in self.standard_locations:
|
||||
raise ParseException(query, len(query),
|
||||
_('No column exists with lookup name ') + location, self)
|
||||
|
||||
def _get_matches(self, location, query, candidates):
|
||||
location = location.lower()
|
||||
self.test_location_is_valid(location, query)
|
||||
if self.optimize:
|
||||
return self.get_matches(location, query, candidates=candidates)
|
||||
else:
|
||||
|
Loading…
x
Reference in New Issue
Block a user