mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-07 18:24:30 -04:00
Merge from trunk
This commit is contained in:
commit
61c45eef39
@ -1,5 +1,5 @@
|
|||||||
__license__ = 'GPL v3'
|
__license__ = 'GPL v3'
|
||||||
__copyright__ = '2010, Darko Miletic <darko.miletic at gmail.com>'
|
__copyright__ = '2010-2011, Darko Miletic <darko.miletic at gmail.com>'
|
||||||
'''
|
'''
|
||||||
www.eluniversal.com
|
www.eluniversal.com
|
||||||
'''
|
'''
|
||||||
@ -15,12 +15,20 @@ class ElUniversal(BasicNewsRecipe):
|
|||||||
max_articles_per_feed = 100
|
max_articles_per_feed = 100
|
||||||
no_stylesheets = True
|
no_stylesheets = True
|
||||||
use_embedded_content = False
|
use_embedded_content = False
|
||||||
|
remove_empty_feeds = True
|
||||||
encoding = 'cp1252'
|
encoding = 'cp1252'
|
||||||
publisher = 'El Universal'
|
publisher = 'El Universal'
|
||||||
category = 'news, Caracas, Venezuela, world'
|
category = 'news, Caracas, Venezuela, world'
|
||||||
language = 'es_VE'
|
language = 'es_VE'
|
||||||
|
publication_type = 'newspaper'
|
||||||
cover_url = strftime('http://static.eluniversal.com/%Y/%m/%d/portada.jpg')
|
cover_url = strftime('http://static.eluniversal.com/%Y/%m/%d/portada.jpg')
|
||||||
|
extra_css = """
|
||||||
|
.txt60{font-family: Tahoma,Geneva,sans-serif; font-size: small}
|
||||||
|
.txt29{font-family: Tahoma,Geneva,sans-serif; font-size: small; color: gray}
|
||||||
|
.txt38{font-family: Georgia,"Times New Roman",Times,serif; font-size: xx-large}
|
||||||
|
.txt35{font-family: Georgia,"Times New Roman",Times,serif; font-size: large}
|
||||||
|
body{font-family: Verdana,Arial,Helvetica,sans-serif}
|
||||||
|
"""
|
||||||
conversion_options = {
|
conversion_options = {
|
||||||
'comments' : description
|
'comments' : description
|
||||||
,'tags' : category
|
,'tags' : category
|
||||||
@ -28,10 +36,11 @@ class ElUniversal(BasicNewsRecipe):
|
|||||||
,'publisher' : publisher
|
,'publisher' : publisher
|
||||||
}
|
}
|
||||||
|
|
||||||
keep_only_tags = [dict(name='div', attrs={'class':'Nota'})]
|
remove_tags_before=dict(attrs={'class':'header-print MB10'})
|
||||||
|
remove_tags_after= dict(attrs={'id':'SizeText'})
|
||||||
remove_tags = [
|
remove_tags = [
|
||||||
dict(name=['object','link','script','iframe'])
|
dict(name=['object','link','script','iframe','meta'])
|
||||||
,dict(name='div',attrs={'class':'Herramientas'})
|
,dict(attrs={'class':'header-print MB10'})
|
||||||
]
|
]
|
||||||
|
|
||||||
feeds = [
|
feeds = [
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from calibre.web.feeds.news import BasicNewsRecipe
|
from calibre.web.feeds.news import BasicNewsRecipe
|
||||||
import re
|
import re
|
||||||
|
from datetime import date, timedelta
|
||||||
|
|
||||||
class HBR(BasicNewsRecipe):
|
class HBR(BasicNewsRecipe):
|
||||||
|
|
||||||
@ -12,13 +13,14 @@ class HBR(BasicNewsRecipe):
|
|||||||
no_stylesheets = True
|
no_stylesheets = True
|
||||||
|
|
||||||
LOGIN_URL = 'http://hbr.org/login?request_url=/'
|
LOGIN_URL = 'http://hbr.org/login?request_url=/'
|
||||||
INDEX = 'http://hbr.org/current'
|
INDEX = 'http://hbr.org/archive-toc/BR'
|
||||||
|
|
||||||
keep_only_tags = [dict(name='div', id='pageContainer')]
|
keep_only_tags = [dict(name='div', id='pageContainer')]
|
||||||
remove_tags = [dict(id=['mastheadContainer', 'magazineHeadline',
|
remove_tags = [dict(id=['mastheadContainer', 'magazineHeadline',
|
||||||
'articleToolbarTopRD', 'pageRightSubColumn', 'pageRightColumn',
|
'articleToolbarTopRD', 'pageRightSubColumn', 'pageRightColumn',
|
||||||
'todayOnHBRListWidget', 'mostWidget', 'keepUpWithHBR',
|
'todayOnHBRListWidget', 'mostWidget', 'keepUpWithHBR',
|
||||||
'mailingListTout', 'partnerCenter', 'pageFooter',
|
'mailingListTout', 'partnerCenter', 'pageFooter',
|
||||||
|
'superNavHeadContainer', 'hbrDisqus',
|
||||||
'articleToolbarTop', 'articleToolbarBottom', 'articleToolbarRD']),
|
'articleToolbarTop', 'articleToolbarBottom', 'articleToolbarRD']),
|
||||||
dict(name='iframe')]
|
dict(name='iframe')]
|
||||||
extra_css = '''
|
extra_css = '''
|
||||||
@ -55,9 +57,14 @@ class HBR(BasicNewsRecipe):
|
|||||||
|
|
||||||
|
|
||||||
def hbr_get_toc(self):
|
def hbr_get_toc(self):
|
||||||
soup = self.index_to_soup(self.INDEX)
|
today = date.today()
|
||||||
url = soup.find('a', text=lambda t:'Full Table of Contents' in t).parent.get('href')
|
future = today + timedelta(days=30)
|
||||||
return self.index_to_soup('http://hbr.org'+url)
|
for x in [x.strftime('%y%m') for x in (future, today)]:
|
||||||
|
url = self.INDEX + x
|
||||||
|
soup = self.index_to_soup(url)
|
||||||
|
if not soup.find(text='Issue Not Found'):
|
||||||
|
return soup
|
||||||
|
raise Exception('Could not find current issue')
|
||||||
|
|
||||||
def hbr_parse_section(self, container, feeds):
|
def hbr_parse_section(self, container, feeds):
|
||||||
current_section = None
|
current_section = None
|
||||||
|
@ -179,17 +179,17 @@ class MPRecipe(BasicNewsRecipe):
|
|||||||
def get_dtlocal(self):
|
def get_dtlocal(self):
|
||||||
dt_utc = datetime.datetime.utcnow()
|
dt_utc = datetime.datetime.utcnow()
|
||||||
if __Region__ == 'Hong Kong':
|
if __Region__ == 'Hong Kong':
|
||||||
# convert UTC to local hk time - at HKT 4.30am, all news are available
|
# convert UTC to local hk time - at HKT 5.30am, all news are available
|
||||||
dt_local = dt_utc + datetime.timedelta(8.0/24) - datetime.timedelta(4.5/24)
|
dt_local = dt_utc + datetime.timedelta(8.0/24) - datetime.timedelta(5.5/24)
|
||||||
# dt_local = dt_utc.astimezone(pytz.timezone('Asia/Hong_Kong')) - datetime.timedelta(4.5/24)
|
# dt_local = dt_utc.astimezone(pytz.timezone('Asia/Hong_Kong')) - datetime.timedelta(5.5/24)
|
||||||
elif __Region__ == 'Vancouver':
|
elif __Region__ == 'Vancouver':
|
||||||
# convert UTC to local Vancouver time - at PST time 4.30am, all news are available
|
# convert UTC to local Vancouver time - at PST time 5.30am, all news are available
|
||||||
dt_local = dt_utc + datetime.timedelta(-8.0/24) - datetime.timedelta(4.5/24)
|
dt_local = dt_utc + datetime.timedelta(-8.0/24) - datetime.timedelta(5.5/24)
|
||||||
#dt_local = dt_utc.astimezone(pytz.timezone('America/Vancouver')) - datetime.timedelta(4.5/24)
|
#dt_local = dt_utc.astimezone(pytz.timezone('America/Vancouver')) - datetime.timedelta(5.5/24)
|
||||||
elif __Region__ == 'Toronto':
|
elif __Region__ == 'Toronto':
|
||||||
# convert UTC to local Toronto time - at EST time 4.30am, all news are available
|
# convert UTC to local Toronto time - at EST time 8.30am, all news are available
|
||||||
dt_local = dt_utc + datetime.timedelta(-5.0/24) - datetime.timedelta(4.5/24)
|
dt_local = dt_utc + datetime.timedelta(-5.0/24) - datetime.timedelta(8.5/24)
|
||||||
#dt_local = dt_utc.astimezone(pytz.timezone('America/Toronto')) - datetime.timedelta(4.5/24)
|
#dt_local = dt_utc.astimezone(pytz.timezone('America/Toronto')) - datetime.timedelta(8.5/24)
|
||||||
return dt_local
|
return dt_local
|
||||||
|
|
||||||
def get_fetchdate(self):
|
def get_fetchdate(self):
|
||||||
|
@ -179,17 +179,17 @@ class MPRecipe(BasicNewsRecipe):
|
|||||||
def get_dtlocal(self):
|
def get_dtlocal(self):
|
||||||
dt_utc = datetime.datetime.utcnow()
|
dt_utc = datetime.datetime.utcnow()
|
||||||
if __Region__ == 'Hong Kong':
|
if __Region__ == 'Hong Kong':
|
||||||
# convert UTC to local hk time - at HKT 4.30am, all news are available
|
# convert UTC to local hk time - at HKT 5.30am, all news are available
|
||||||
dt_local = dt_utc + datetime.timedelta(8.0/24) - datetime.timedelta(4.5/24)
|
dt_local = dt_utc + datetime.timedelta(8.0/24) - datetime.timedelta(5.5/24)
|
||||||
# dt_local = dt_utc.astimezone(pytz.timezone('Asia/Hong_Kong')) - datetime.timedelta(4.5/24)
|
# dt_local = dt_utc.astimezone(pytz.timezone('Asia/Hong_Kong')) - datetime.timedelta(5.5/24)
|
||||||
elif __Region__ == 'Vancouver':
|
elif __Region__ == 'Vancouver':
|
||||||
# convert UTC to local Vancouver time - at PST time 4.30am, all news are available
|
# convert UTC to local Vancouver time - at PST time 5.30am, all news are available
|
||||||
dt_local = dt_utc + datetime.timedelta(-8.0/24) - datetime.timedelta(4.5/24)
|
dt_local = dt_utc + datetime.timedelta(-8.0/24) - datetime.timedelta(5.5/24)
|
||||||
#dt_local = dt_utc.astimezone(pytz.timezone('America/Vancouver')) - datetime.timedelta(4.5/24)
|
#dt_local = dt_utc.astimezone(pytz.timezone('America/Vancouver')) - datetime.timedelta(5.5/24)
|
||||||
elif __Region__ == 'Toronto':
|
elif __Region__ == 'Toronto':
|
||||||
# convert UTC to local Toronto time - at EST time 4.30am, all news are available
|
# convert UTC to local Toronto time - at EST time 8.30am, all news are available
|
||||||
dt_local = dt_utc + datetime.timedelta(-5.0/24) - datetime.timedelta(4.5/24)
|
dt_local = dt_utc + datetime.timedelta(-5.0/24) - datetime.timedelta(8.5/24)
|
||||||
#dt_local = dt_utc.astimezone(pytz.timezone('America/Toronto')) - datetime.timedelta(4.5/24)
|
#dt_local = dt_utc.astimezone(pytz.timezone('America/Toronto')) - datetime.timedelta(8.5/24)
|
||||||
return dt_local
|
return dt_local
|
||||||
|
|
||||||
def get_fetchdate(self):
|
def get_fetchdate(self):
|
||||||
|
@ -179,17 +179,17 @@ class MPRecipe(BasicNewsRecipe):
|
|||||||
def get_dtlocal(self):
|
def get_dtlocal(self):
|
||||||
dt_utc = datetime.datetime.utcnow()
|
dt_utc = datetime.datetime.utcnow()
|
||||||
if __Region__ == 'Hong Kong':
|
if __Region__ == 'Hong Kong':
|
||||||
# convert UTC to local hk time - at HKT 4.30am, all news are available
|
# convert UTC to local hk time - at HKT 5.30am, all news are available
|
||||||
dt_local = dt_utc + datetime.timedelta(8.0/24) - datetime.timedelta(4.5/24)
|
dt_local = dt_utc + datetime.timedelta(8.0/24) - datetime.timedelta(5.5/24)
|
||||||
# dt_local = dt_utc.astimezone(pytz.timezone('Asia/Hong_Kong')) - datetime.timedelta(4.5/24)
|
# dt_local = dt_utc.astimezone(pytz.timezone('Asia/Hong_Kong')) - datetime.timedelta(5.5/24)
|
||||||
elif __Region__ == 'Vancouver':
|
elif __Region__ == 'Vancouver':
|
||||||
# convert UTC to local Vancouver time - at PST time 4.30am, all news are available
|
# convert UTC to local Vancouver time - at PST time 5.30am, all news are available
|
||||||
dt_local = dt_utc + datetime.timedelta(-8.0/24) - datetime.timedelta(4.5/24)
|
dt_local = dt_utc + datetime.timedelta(-8.0/24) - datetime.timedelta(5.5/24)
|
||||||
#dt_local = dt_utc.astimezone(pytz.timezone('America/Vancouver')) - datetime.timedelta(4.5/24)
|
#dt_local = dt_utc.astimezone(pytz.timezone('America/Vancouver')) - datetime.timedelta(5.5/24)
|
||||||
elif __Region__ == 'Toronto':
|
elif __Region__ == 'Toronto':
|
||||||
# convert UTC to local Toronto time - at EST time 4.30am, all news are available
|
# convert UTC to local Toronto time - at EST time 8.30am, all news are available
|
||||||
dt_local = dt_utc + datetime.timedelta(-5.0/24) - datetime.timedelta(4.5/24)
|
dt_local = dt_utc + datetime.timedelta(-5.0/24) - datetime.timedelta(8.5/24)
|
||||||
#dt_local = dt_utc.astimezone(pytz.timezone('America/Toronto')) - datetime.timedelta(4.5/24)
|
#dt_local = dt_utc.astimezone(pytz.timezone('America/Toronto')) - datetime.timedelta(8.5/24)
|
||||||
return dt_local
|
return dt_local
|
||||||
|
|
||||||
def get_fetchdate(self):
|
def get_fetchdate(self):
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
CREATE TABLE authors ( id INTEGER PRIMARY KEY,
|
CREATE TABLE authors ( id INTEGER PRIMARY KEY,
|
||||||
name TEXT NOT NULL COLLATE NOCASE,
|
name TEXT NOT NULL COLLATE NOCASE,
|
||||||
sort TEXT COLLATE NOCASE,
|
sort TEXT COLLATE NOCASE,
|
||||||
|
link TEXT NOT NULL DEFAULT "",
|
||||||
UNIQUE(name)
|
UNIQUE(name)
|
||||||
);
|
);
|
||||||
CREATE TABLE books ( id INTEGER PRIMARY KEY AUTOINCREMENT,
|
CREATE TABLE books ( id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
@ -545,4 +546,4 @@ CREATE TRIGGER series_update_trg
|
|||||||
BEGIN
|
BEGIN
|
||||||
UPDATE series SET sort=NEW.name WHERE id=NEW.id;
|
UPDATE series SET sort=NEW.name WHERE id=NEW.id;
|
||||||
END;
|
END;
|
||||||
pragma user_version=20;
|
pragma user_version=21;
|
||||||
|
@ -105,11 +105,13 @@ class ManyToManyTable(ManyToOneTable):
|
|||||||
class AuthorsTable(ManyToManyTable):
|
class AuthorsTable(ManyToManyTable):
|
||||||
|
|
||||||
def read_id_maps(self, db):
|
def read_id_maps(self, db):
|
||||||
|
self.alink_map = {}
|
||||||
for row in db.conn.execute(
|
for row in db.conn.execute(
|
||||||
'SELECT id, name, sort FROM authors'):
|
'SELECT id, name, sort, link FROM authors'):
|
||||||
self.id_map[row[0]] = row[1]
|
self.id_map[row[0]] = row[1]
|
||||||
self.extra_map[row[0]] = (row[2] if row[2] else
|
self.extra_map[row[0]] = (row[2] if row[2] else
|
||||||
author_to_author_sort(row[1]))
|
author_to_author_sort(row[1]))
|
||||||
|
self.alink_map[row[0]] = row[3]
|
||||||
|
|
||||||
class FormatsTable(ManyToManyTable):
|
class FormatsTable(ManyToManyTable):
|
||||||
|
|
||||||
|
@ -86,6 +86,8 @@ CALIBRE_METADATA_FIELDS = frozenset([
|
|||||||
# a dict of user category names, where the value is a list of item names
|
# a dict of user category names, where the value is a list of item names
|
||||||
# from the book that are in that category
|
# from the book that are in that category
|
||||||
'user_categories',
|
'user_categories',
|
||||||
|
# a dict of author to an associated hyperlink
|
||||||
|
'author_link_map',
|
||||||
|
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
@ -34,6 +34,7 @@ NULL_VALUES = {
|
|||||||
'authors' : [_('Unknown')],
|
'authors' : [_('Unknown')],
|
||||||
'title' : _('Unknown'),
|
'title' : _('Unknown'),
|
||||||
'user_categories' : {},
|
'user_categories' : {},
|
||||||
|
'author_link_map' : {},
|
||||||
'language' : 'und'
|
'language' : 'und'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -474,7 +474,7 @@ def serialize_user_metadata(metadata_elem, all_user_metadata, tail='\n'+(' '*8))
|
|||||||
metadata_elem.append(meta)
|
metadata_elem.append(meta)
|
||||||
|
|
||||||
|
|
||||||
def dump_user_categories(cats):
|
def dump_dict(cats):
|
||||||
if not cats:
|
if not cats:
|
||||||
cats = {}
|
cats = {}
|
||||||
from calibre.ebooks.metadata.book.json_codec import object_to_unicode
|
from calibre.ebooks.metadata.book.json_codec import object_to_unicode
|
||||||
@ -537,8 +537,9 @@ class OPF(object): # {{{
|
|||||||
formatter=parse_date, renderer=isoformat)
|
formatter=parse_date, renderer=isoformat)
|
||||||
user_categories = MetadataField('user_categories', is_dc=False,
|
user_categories = MetadataField('user_categories', is_dc=False,
|
||||||
formatter=json.loads,
|
formatter=json.loads,
|
||||||
renderer=dump_user_categories)
|
renderer=dump_dict)
|
||||||
|
author_link_map = MetadataField('author_link_map', is_dc=False,
|
||||||
|
formatter=json.loads, renderer=dump_dict)
|
||||||
|
|
||||||
def __init__(self, stream, basedir=os.getcwdu(), unquote_urls=True,
|
def __init__(self, stream, basedir=os.getcwdu(), unquote_urls=True,
|
||||||
populate_spine=True):
|
populate_spine=True):
|
||||||
@ -1039,7 +1040,7 @@ class OPF(object): # {{{
|
|||||||
for attr in ('title', 'authors', 'author_sort', 'title_sort',
|
for attr in ('title', 'authors', 'author_sort', 'title_sort',
|
||||||
'publisher', 'series', 'series_index', 'rating',
|
'publisher', 'series', 'series_index', 'rating',
|
||||||
'isbn', 'tags', 'category', 'comments',
|
'isbn', 'tags', 'category', 'comments',
|
||||||
'pubdate', 'user_categories'):
|
'pubdate', 'user_categories', 'author_link_map'):
|
||||||
val = getattr(mi, attr, None)
|
val = getattr(mi, attr, None)
|
||||||
if val is not None and val != [] and val != (None, None):
|
if val is not None and val != [] and val != (None, None):
|
||||||
setattr(self, attr, val)
|
setattr(self, attr, val)
|
||||||
@ -1336,6 +1337,8 @@ def metadata_to_opf(mi, as_string=True):
|
|||||||
for tag in mi.tags:
|
for tag in mi.tags:
|
||||||
factory(DC('subject'), tag)
|
factory(DC('subject'), tag)
|
||||||
meta = lambda n, c: factory('meta', name='calibre:'+n, content=c)
|
meta = lambda n, c: factory('meta', name='calibre:'+n, content=c)
|
||||||
|
if getattr(mi, 'author_link_map', None) is not None:
|
||||||
|
meta('author_link_map', dump_dict(mi.author_link_map))
|
||||||
if mi.series:
|
if mi.series:
|
||||||
meta('series', mi.series)
|
meta('series', mi.series)
|
||||||
if mi.series_index is not None:
|
if mi.series_index is not None:
|
||||||
@ -1349,7 +1352,7 @@ def metadata_to_opf(mi, as_string=True):
|
|||||||
if mi.title_sort:
|
if mi.title_sort:
|
||||||
meta('title_sort', mi.title_sort)
|
meta('title_sort', mi.title_sort)
|
||||||
if mi.user_categories:
|
if mi.user_categories:
|
||||||
meta('user_categories', dump_user_categories(mi.user_categories))
|
meta('user_categories', dump_dict(mi.user_categories))
|
||||||
|
|
||||||
serialize_user_metadata(metadata, mi.get_all_user_metadata(False))
|
serialize_user_metadata(metadata, mi.get_all_user_metadata(False))
|
||||||
|
|
||||||
|
@ -83,13 +83,14 @@ gprefs.defaults['tags_browser_partition_method'] = 'first letter'
|
|||||||
gprefs.defaults['tags_browser_collapse_at'] = 100
|
gprefs.defaults['tags_browser_collapse_at'] = 100
|
||||||
gprefs.defaults['edit_metadata_single_layout'] = 'default'
|
gprefs.defaults['edit_metadata_single_layout'] = 'default'
|
||||||
gprefs.defaults['book_display_fields'] = [
|
gprefs.defaults['book_display_fields'] = [
|
||||||
('title', False), ('authors', False), ('formats', True),
|
('title', False), ('authors', True), ('formats', True),
|
||||||
('series', True), ('identifiers', True), ('tags', True),
|
('series', True), ('identifiers', True), ('tags', True),
|
||||||
('path', True), ('publisher', False), ('rating', False),
|
('path', True), ('publisher', False), ('rating', False),
|
||||||
('author_sort', False), ('sort', False), ('timestamp', False),
|
('author_sort', False), ('sort', False), ('timestamp', False),
|
||||||
('uuid', False), ('comments', True), ('id', False), ('pubdate', False),
|
('uuid', False), ('comments', True), ('id', False), ('pubdate', False),
|
||||||
('last_modified', False), ('size', False),
|
('last_modified', False), ('size', False),
|
||||||
]
|
]
|
||||||
|
gprefs.defaults['default_author_link'] = 'http://en.wikipedia.org/w/index.php?search={author}'
|
||||||
|
|
||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
|
@ -5,6 +5,7 @@ __license__ = 'GPL v3'
|
|||||||
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||||
__docformat__ = 'restructuredtext en'
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
|
import urllib2
|
||||||
|
|
||||||
from PyQt4.Qt import (QPixmap, QSize, QWidget, Qt, pyqtSignal, QUrl,
|
from PyQt4.Qt import (QPixmap, QSize, QWidget, Qt, pyqtSignal, QUrl,
|
||||||
QPropertyAnimation, QEasingCurve, QApplication, QFontInfo,
|
QPropertyAnimation, QEasingCurve, QApplication, QFontInfo,
|
||||||
@ -23,6 +24,7 @@ from calibre.library.comments import comments_to_html
|
|||||||
from calibre.gui2 import (config, open_local_file, open_url, pixmap_to_data,
|
from calibre.gui2 import (config, open_local_file, open_url, pixmap_to_data,
|
||||||
gprefs)
|
gprefs)
|
||||||
from calibre.utils.icu import sort_key
|
from calibre.utils.icu import sort_key
|
||||||
|
from calibre.utils.formatter import EvalFormatter
|
||||||
|
|
||||||
def render_html(mi, css, vertical, widget, all_fields=False): # {{{
|
def render_html(mi, css, vertical, widget, all_fields=False): # {{{
|
||||||
table = render_data(mi, all_fields=all_fields,
|
table = render_data(mi, all_fields=all_fields,
|
||||||
@ -121,6 +123,27 @@ def render_data(mi, use_roman_numbers=True, all_fields=False):
|
|||||||
if links:
|
if links:
|
||||||
ans.append((field, u'<td class="title">%s</td><td>%s</td>'%(
|
ans.append((field, u'<td class="title">%s</td><td>%s</td>'%(
|
||||||
_('Ids')+':', links)))
|
_('Ids')+':', links)))
|
||||||
|
elif field == 'authors' and not isdevice:
|
||||||
|
authors = []
|
||||||
|
formatter = EvalFormatter()
|
||||||
|
for aut in mi.authors:
|
||||||
|
if mi.author_link_map[aut]:
|
||||||
|
link = mi.author_link_map[aut]
|
||||||
|
elif gprefs.get('default_author_link'):
|
||||||
|
vals = {'author': aut.replace(' ', '+')}
|
||||||
|
try:
|
||||||
|
vals['author_sort'] = mi.author_sort_map[aut].replace(' ', '+')
|
||||||
|
except:
|
||||||
|
vals['author_sort'] = aut.replace(' ', '+')
|
||||||
|
link = formatter.safe_format(
|
||||||
|
gprefs.get('default_author_link'), vals, '', vals)
|
||||||
|
if link:
|
||||||
|
link = prepare_string_for_xml(link)
|
||||||
|
authors.append(u'<a href="%s">%s</a>'%(link, aut))
|
||||||
|
else:
|
||||||
|
authors.append(aut)
|
||||||
|
ans.append((field, u'<td class="title">%s</td><td>%s</td>'%(name,
|
||||||
|
u' & '.join(authors))))
|
||||||
else:
|
else:
|
||||||
val = mi.format_field(field)[-1]
|
val = mi.format_field(field)[-1]
|
||||||
if val is None:
|
if val is None:
|
||||||
|
@ -33,7 +33,7 @@ class EditAuthorsDialog(QDialog, Ui_EditAuthorsDialog):
|
|||||||
|
|
||||||
# Set up the column headings
|
# Set up the column headings
|
||||||
self.table.setSelectionMode(QAbstractItemView.SingleSelection)
|
self.table.setSelectionMode(QAbstractItemView.SingleSelection)
|
||||||
self.table.setColumnCount(2)
|
self.table.setColumnCount(3)
|
||||||
self.down_arrow_icon = QIcon(I('arrow-down.png'))
|
self.down_arrow_icon = QIcon(I('arrow-down.png'))
|
||||||
self.up_arrow_icon = QIcon(I('arrow-up.png'))
|
self.up_arrow_icon = QIcon(I('arrow-up.png'))
|
||||||
self.blank_icon = QIcon(I('blank.png'))
|
self.blank_icon = QIcon(I('blank.png'))
|
||||||
@ -43,26 +43,33 @@ class EditAuthorsDialog(QDialog, Ui_EditAuthorsDialog):
|
|||||||
self.aus_col = QTableWidgetItem(_('Author sort'))
|
self.aus_col = QTableWidgetItem(_('Author sort'))
|
||||||
self.table.setHorizontalHeaderItem(1, self.aus_col)
|
self.table.setHorizontalHeaderItem(1, self.aus_col)
|
||||||
self.aus_col.setIcon(self.up_arrow_icon)
|
self.aus_col.setIcon(self.up_arrow_icon)
|
||||||
|
self.aul_col = QTableWidgetItem(_('Link'))
|
||||||
|
self.table.setHorizontalHeaderItem(2, self.aul_col)
|
||||||
|
self.aus_col.setIcon(self.blank_icon)
|
||||||
|
|
||||||
# Add the data
|
# Add the data
|
||||||
self.authors = {}
|
self.authors = {}
|
||||||
auts = db.get_authors_with_ids()
|
auts = db.get_authors_with_ids()
|
||||||
self.table.setRowCount(len(auts))
|
self.table.setRowCount(len(auts))
|
||||||
select_item = None
|
select_item = None
|
||||||
for row, (id, author, sort) in enumerate(auts):
|
for row, (id, author, sort, link) in enumerate(auts):
|
||||||
author = author.replace('|', ',')
|
author = author.replace('|', ',')
|
||||||
self.authors[id] = (author, sort)
|
self.authors[id] = (author, sort, link)
|
||||||
aut = tableItem(author)
|
aut = tableItem(author)
|
||||||
aut.setData(Qt.UserRole, id)
|
aut.setData(Qt.UserRole, id)
|
||||||
sort = tableItem(sort)
|
sort = tableItem(sort)
|
||||||
|
link = tableItem(link)
|
||||||
self.table.setItem(row, 0, aut)
|
self.table.setItem(row, 0, aut)
|
||||||
self.table.setItem(row, 1, sort)
|
self.table.setItem(row, 1, sort)
|
||||||
|
self.table.setItem(row, 2, link)
|
||||||
if id == id_to_select:
|
if id == id_to_select:
|
||||||
if select_sort:
|
if select_sort:
|
||||||
select_item = sort
|
select_item = sort
|
||||||
else:
|
else:
|
||||||
select_item = aut
|
select_item = aut
|
||||||
self.table.resizeColumnsToContents()
|
self.table.resizeColumnsToContents()
|
||||||
|
if self.table.columnWidth(2) < 200:
|
||||||
|
self.table.setColumnWidth(2, 200)
|
||||||
|
|
||||||
# set up the cellChanged signal only after the table is filled
|
# set up the cellChanged signal only after the table is filled
|
||||||
self.table.cellChanged.connect(self.cell_changed)
|
self.table.cellChanged.connect(self.cell_changed)
|
||||||
@ -236,9 +243,10 @@ class EditAuthorsDialog(QDialog, Ui_EditAuthorsDialog):
|
|||||||
id = self.table.item(row, 0).data(Qt.UserRole).toInt()[0]
|
id = self.table.item(row, 0).data(Qt.UserRole).toInt()[0]
|
||||||
aut = unicode(self.table.item(row, 0).text()).strip()
|
aut = unicode(self.table.item(row, 0).text()).strip()
|
||||||
sort = unicode(self.table.item(row, 1).text()).strip()
|
sort = unicode(self.table.item(row, 1).text()).strip()
|
||||||
orig_aut,orig_sort = self.authors[id]
|
link = unicode(self.table.item(row, 2).text()).strip()
|
||||||
if orig_aut != aut or orig_sort != sort:
|
orig_aut,orig_sort,orig_link = self.authors[id]
|
||||||
self.result.append((id, orig_aut, aut, sort))
|
if orig_aut != aut or orig_sort != sort or orig_link != link:
|
||||||
|
self.result.append((id, orig_aut, aut, sort, link))
|
||||||
|
|
||||||
def do_recalc_author_sort(self):
|
def do_recalc_author_sort(self):
|
||||||
self.table.cellChanged.disconnect()
|
self.table.cellChanged.disconnect()
|
||||||
@ -276,6 +284,6 @@ class EditAuthorsDialog(QDialog, Ui_EditAuthorsDialog):
|
|||||||
c.setText(author_to_author_sort(aut))
|
c.setText(author_to_author_sort(aut))
|
||||||
item = c
|
item = c
|
||||||
else:
|
else:
|
||||||
item = self.table.item(row, 1)
|
item = self.table.item(row, col)
|
||||||
self.table.setCurrentItem(item)
|
self.table.setCurrentItem(item)
|
||||||
self.table.scrollToItem(item)
|
self.table.scrollToItem(item)
|
||||||
|
@ -54,7 +54,7 @@ class DBRestore(QDialog):
|
|||||||
def reject(self):
|
def reject(self):
|
||||||
self.rejected = True
|
self.rejected = True
|
||||||
self.restorer.progress_callback = lambda x, y: x
|
self.restorer.progress_callback = lambda x, y: x
|
||||||
QDialog.rejecet(self)
|
QDialog.reject(self)
|
||||||
|
|
||||||
def update(self):
|
def update(self):
|
||||||
if self.restorer.is_alive():
|
if self.restorer.is_alive():
|
||||||
|
@ -1092,11 +1092,12 @@ class IdentifiersEdit(QLineEdit): # {{{
|
|||||||
for x in parts:
|
for x in parts:
|
||||||
c = x.split(':')
|
c = x.split(':')
|
||||||
if len(c) > 1:
|
if len(c) > 1:
|
||||||
if c[0] == 'isbn':
|
itype = c[0].lower()
|
||||||
|
if itype == 'isbn':
|
||||||
v = check_isbn(c[1])
|
v = check_isbn(c[1])
|
||||||
if v is not None:
|
if v is not None:
|
||||||
c[1] = v
|
c[1] = v
|
||||||
ans[c[0]] = c[1]
|
ans[itype] = c[1]
|
||||||
return ans
|
return ans
|
||||||
def fset(self, val):
|
def fset(self, val):
|
||||||
if not val:
|
if not val:
|
||||||
@ -1112,7 +1113,7 @@ class IdentifiersEdit(QLineEdit): # {{{
|
|||||||
if v is not None:
|
if v is not None:
|
||||||
val[k] = v
|
val[k] = v
|
||||||
ids = sorted(val.iteritems(), key=keygen)
|
ids = sorted(val.iteritems(), key=keygen)
|
||||||
txt = ', '.join(['%s:%s'%(k, v) for k, v in ids])
|
txt = ', '.join(['%s:%s'%(k.lower(), v) for k, v in ids])
|
||||||
self.setText(txt.strip())
|
self.setText(txt.strip())
|
||||||
self.setCursorPosition(0)
|
self.setCursorPosition(0)
|
||||||
return property(fget=fget, fset=fset)
|
return property(fget=fget, fset=fset)
|
||||||
|
@ -138,6 +138,7 @@ class ConfigWidget(ConfigWidgetBase, Ui_Form):
|
|||||||
(_('Partitioned'), 'partition')]
|
(_('Partitioned'), 'partition')]
|
||||||
r('tags_browser_partition_method', gprefs, choices=choices)
|
r('tags_browser_partition_method', gprefs, choices=choices)
|
||||||
r('tags_browser_collapse_at', gprefs)
|
r('tags_browser_collapse_at', gprefs)
|
||||||
|
r('default_author_link', gprefs)
|
||||||
|
|
||||||
choices = set([k for k in db.field_metadata.all_field_keys()
|
choices = set([k for k in db.field_metadata.all_field_keys()
|
||||||
if db.field_metadata[k]['is_category'] and
|
if db.field_metadata[k]['is_category'] and
|
||||||
|
@ -192,7 +192,7 @@
|
|||||||
<string>Book Details</string>
|
<string>Book Details</string>
|
||||||
</attribute>
|
</attribute>
|
||||||
<layout class="QGridLayout" name="gridLayout_12">
|
<layout class="QGridLayout" name="gridLayout_12">
|
||||||
<item row="0" column="0" rowspan="2">
|
<item row="1" column="0" rowspan="2">
|
||||||
<widget class="QGroupBox" name="groupBox">
|
<widget class="QGroupBox" name="groupBox">
|
||||||
<property name="title">
|
<property name="title">
|
||||||
<string>Select displayed metadata</string>
|
<string>Select displayed metadata</string>
|
||||||
@ -243,6 +243,31 @@
|
|||||||
</layout>
|
</layout>
|
||||||
</widget>
|
</widget>
|
||||||
</item>
|
</item>
|
||||||
|
<item row="0" column="0">
|
||||||
|
<layout class="QHBoxLayout">
|
||||||
|
<item>
|
||||||
|
<widget class="QLabel" name="label">
|
||||||
|
<property name="text">
|
||||||
|
<string>Default author link template:</string>
|
||||||
|
</property>
|
||||||
|
<property name="buddy">
|
||||||
|
<cstring>opt_default_author_link</cstring>
|
||||||
|
</property>
|
||||||
|
</widget>
|
||||||
|
</item>
|
||||||
|
<item>
|
||||||
|
<widget class="QLineEdit" name="opt_default_author_link">
|
||||||
|
<property name="toolTip">
|
||||||
|
<string><p>Enter a template to be used to create a link for
|
||||||
|
an author in the books information dialog. This template will
|
||||||
|
be used when no link has been provided for the author using
|
||||||
|
Manage Authors. You can use the values {author} and
|
||||||
|
{author_sort}, and any template function.</string>
|
||||||
|
</property>
|
||||||
|
</widget>
|
||||||
|
</item>
|
||||||
|
</layout>
|
||||||
|
</item>
|
||||||
<item row="0" column="1">
|
<item row="0" column="1">
|
||||||
<widget class="QCheckBox" name="opt_use_roman_numerals_for_series_number">
|
<widget class="QCheckBox" name="opt_use_roman_numerals_for_series_number">
|
||||||
<property name="text">
|
<property name="text">
|
||||||
|
@ -270,12 +270,14 @@ class TagBrowserMixin(object): # {{{
|
|||||||
editor = EditAuthorsDialog(parent, db, id, select_sort)
|
editor = EditAuthorsDialog(parent, db, id, select_sort)
|
||||||
d = editor.exec_()
|
d = editor.exec_()
|
||||||
if d:
|
if d:
|
||||||
for (id, old_author, new_author, new_sort) in editor.result:
|
for (id, old_author, new_author, new_sort, new_link) in editor.result:
|
||||||
if old_author != new_author:
|
if old_author != new_author:
|
||||||
# The id might change if the new author already exists
|
# The id might change if the new author already exists
|
||||||
id = db.rename_author(id, new_author)
|
id = db.rename_author(id, new_author)
|
||||||
db.set_sort_field_for_author(id, unicode(new_sort),
|
db.set_sort_field_for_author(id, unicode(new_sort),
|
||||||
commit=False, notify=False)
|
commit=False, notify=False)
|
||||||
|
db.set_link_field_for_author(id, unicode(new_link),
|
||||||
|
commit=False, notify=False)
|
||||||
db.commit()
|
db.commit()
|
||||||
self.library_view.model().refresh()
|
self.library_view.model().refresh()
|
||||||
self.tags_view.recount()
|
self.tags_view.recount()
|
||||||
|
@ -136,7 +136,7 @@ class TagsView(QTreeView): # {{{
|
|||||||
return expanded_categories, state_map
|
return expanded_categories, state_map
|
||||||
|
|
||||||
def reread_collapse_parameters(self):
|
def reread_collapse_parameters(self):
|
||||||
self._model.reread_collapse_parameters(self.get_state()[1])
|
self._model.reread_collapse_model(self.get_state()[1])
|
||||||
|
|
||||||
def set_database(self, db, tag_match, sort_by):
|
def set_database(self, db, tag_match, sort_by):
|
||||||
self._model.set_database(db)
|
self._model.set_database(db)
|
||||||
|
@ -367,7 +367,7 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
'uuid',
|
'uuid',
|
||||||
'has_cover',
|
'has_cover',
|
||||||
('au_map', 'authors', 'author',
|
('au_map', 'authors', 'author',
|
||||||
'aum_sortconcat(link.id, authors.name, authors.sort)'),
|
'aum_sortconcat(link.id, authors.name, authors.sort, authors.link)'),
|
||||||
'last_modified',
|
'last_modified',
|
||||||
'(SELECT identifiers_concat(type, val) FROM identifiers WHERE identifiers.book=books.id) identifiers',
|
'(SELECT identifiers_concat(type, val) FROM identifiers WHERE identifiers.book=books.id) identifiers',
|
||||||
]
|
]
|
||||||
@ -894,13 +894,17 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
aut_list = []
|
aut_list = []
|
||||||
aum = []
|
aum = []
|
||||||
aus = {}
|
aus = {}
|
||||||
for (author, author_sort) in aut_list:
|
aul = {}
|
||||||
aum.append(author.replace('|', ','))
|
for (author, author_sort, link) in aut_list:
|
||||||
aus[author] = author_sort.replace('|', ',')
|
aut = author.replace('|', ',')
|
||||||
|
aum.append(aut)
|
||||||
|
aus[aut] = author_sort.replace('|', ',')
|
||||||
|
aul[aut] = link
|
||||||
mi.title = row[fm['title']]
|
mi.title = row[fm['title']]
|
||||||
mi.authors = aum
|
mi.authors = aum
|
||||||
mi.author_sort = row[fm['author_sort']]
|
mi.author_sort = row[fm['author_sort']]
|
||||||
mi.author_sort_map = aus
|
mi.author_sort_map = aus
|
||||||
|
mi.author_link_map = aul
|
||||||
mi.comments = row[fm['comments']]
|
mi.comments = row[fm['comments']]
|
||||||
mi.publisher = row[fm['publisher']]
|
mi.publisher = row[fm['publisher']]
|
||||||
mi.timestamp = row[fm['timestamp']]
|
mi.timestamp = row[fm['timestamp']]
|
||||||
@ -1245,6 +1249,8 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
ret = tempfile.SpooledTemporaryFile(max_size=SPOOL_SIZE)
|
ret = tempfile.SpooledTemporaryFile(max_size=SPOOL_SIZE)
|
||||||
shutil.copyfileobj(f, ret)
|
shutil.copyfileobj(f, ret)
|
||||||
ret.seek(0)
|
ret.seek(0)
|
||||||
|
# Various bits of code try to use the name as the default
|
||||||
|
# title when reading metadata, so set it
|
||||||
ret.name = f.name
|
ret.name = f.name
|
||||||
else:
|
else:
|
||||||
ret = f.read()
|
ret = f.read()
|
||||||
@ -2039,13 +2045,13 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
def authors_with_sort_strings(self, id, index_is_id=False):
|
def authors_with_sort_strings(self, id, index_is_id=False):
|
||||||
id = id if index_is_id else self.id(id)
|
id = id if index_is_id else self.id(id)
|
||||||
aut_strings = self.conn.get('''
|
aut_strings = self.conn.get('''
|
||||||
SELECT authors.id, authors.name, authors.sort
|
SELECT authors.id, authors.name, authors.sort, authors.link
|
||||||
FROM authors, books_authors_link as bl
|
FROM authors, books_authors_link as bl
|
||||||
WHERE bl.book=? and authors.id=bl.author
|
WHERE bl.book=? and authors.id=bl.author
|
||||||
ORDER BY bl.id''', (id,))
|
ORDER BY bl.id''', (id,))
|
||||||
result = []
|
result = []
|
||||||
for (id_, author, sort,) in aut_strings:
|
for (id_, author, sort, link) in aut_strings:
|
||||||
result.append((id_, author.replace('|', ','), sort))
|
result.append((id_, author.replace('|', ','), sort, link))
|
||||||
return result
|
return result
|
||||||
|
|
||||||
# Given a book, return the author_sort string for authors of the book
|
# Given a book, return the author_sort string for authors of the book
|
||||||
@ -2085,7 +2091,8 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
|
|
||||||
aum = self.authors_with_sort_strings(id_, index_is_id=True)
|
aum = self.authors_with_sort_strings(id_, index_is_id=True)
|
||||||
self.data.set(id_, self.FIELD_MAP['au_map'],
|
self.data.set(id_, self.FIELD_MAP['au_map'],
|
||||||
':#:'.join([':::'.join((au.replace(',', '|'), aus)) for (_, au, aus) in aum]),
|
':#:'.join([':::'.join((au.replace(',', '|'), aus, aul))
|
||||||
|
for (_, au, aus, aul) in aum]),
|
||||||
row_is_id=True)
|
row_is_id=True)
|
||||||
|
|
||||||
def _set_authors(self, id, authors, allow_case_change=False):
|
def _set_authors(self, id, authors, allow_case_change=False):
|
||||||
@ -2436,7 +2443,7 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
|
|
||||||
def get_authors_with_ids(self):
|
def get_authors_with_ids(self):
|
||||||
result = self.conn.get('SELECT id,name,sort FROM authors')
|
result = self.conn.get('SELECT id,name,sort,link FROM authors')
|
||||||
if not result:
|
if not result:
|
||||||
return []
|
return []
|
||||||
return result
|
return result
|
||||||
@ -2447,6 +2454,13 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
(author,), all=False)
|
(author,), all=False)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
def set_link_field_for_author(self, aid, link, commit=True, notify=False):
|
||||||
|
if not link:
|
||||||
|
link = ''
|
||||||
|
self.conn.execute('UPDATE authors SET link=? WHERE id=?', (link.strip(), aid))
|
||||||
|
if commit:
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
def set_sort_field_for_author(self, old_id, new_sort, commit=True, notify=False):
|
def set_sort_field_for_author(self, old_id, new_sort, commit=True, notify=False):
|
||||||
self.conn.execute('UPDATE authors SET sort=? WHERE id=?', \
|
self.conn.execute('UPDATE authors SET sort=? WHERE id=?', \
|
||||||
(new_sort.strip(), old_id))
|
(new_sort.strip(), old_id))
|
||||||
|
@ -53,6 +53,7 @@ class Restore(Thread):
|
|||||||
self.mismatched_dirs = []
|
self.mismatched_dirs = []
|
||||||
self.successes = 0
|
self.successes = 0
|
||||||
self.tb = None
|
self.tb = None
|
||||||
|
self.authors_links = {}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def errors_occurred(self):
|
def errors_occurred(self):
|
||||||
@ -160,6 +161,12 @@ class Restore(Thread):
|
|||||||
else:
|
else:
|
||||||
self.mismatched_dirs.append(dirpath)
|
self.mismatched_dirs.append(dirpath)
|
||||||
|
|
||||||
|
alm = mi.get('author_link_map', {})
|
||||||
|
for author, link in alm.iteritems():
|
||||||
|
existing_link, timestamp = self.authors_links.get(author, (None, None))
|
||||||
|
if existing_link is None or existing_link != link and timestamp < mi.timestamp:
|
||||||
|
self.authors_links[author] = (link, mi.timestamp)
|
||||||
|
|
||||||
def create_cc_metadata(self):
|
def create_cc_metadata(self):
|
||||||
self.books.sort(key=itemgetter('timestamp'))
|
self.books.sort(key=itemgetter('timestamp'))
|
||||||
self.custom_columns = {}
|
self.custom_columns = {}
|
||||||
@ -206,6 +213,11 @@ class Restore(Thread):
|
|||||||
self.failed_restores.append((book, traceback.format_exc()))
|
self.failed_restores.append((book, traceback.format_exc()))
|
||||||
self.progress_callback(book['mi'].title, i+1)
|
self.progress_callback(book['mi'].title, i+1)
|
||||||
|
|
||||||
|
for author in self.authors_links.iterkeys():
|
||||||
|
link, ign = self.authors_links[author]
|
||||||
|
db.conn.execute('UPDATE authors SET link=? WHERE name=?',
|
||||||
|
(link, author.replace(',', '|')))
|
||||||
|
db.conn.commit()
|
||||||
db.conn.close()
|
db.conn.close()
|
||||||
|
|
||||||
def restore_book(self, book, db):
|
def restore_book(self, book, db):
|
||||||
|
@ -600,4 +600,14 @@ class SchemaUpgrade(object):
|
|||||||
with open(os.path.join(bdir, fname), 'wb') as f:
|
with open(os.path.join(bdir, fname), 'wb') as f:
|
||||||
f.write(script)
|
f.write(script)
|
||||||
|
|
||||||
|
def upgrade_version_20(self):
|
||||||
|
'''
|
||||||
|
Add a link column to the authors table.
|
||||||
|
'''
|
||||||
|
|
||||||
|
script = '''
|
||||||
|
ALTER TABLE authors ADD COLUMN link TEXT NOT NULL DEFAULT "";
|
||||||
|
'''
|
||||||
|
self.conn.executescript(script)
|
||||||
|
|
||||||
|
|
||||||
|
@ -144,9 +144,9 @@ class AumSortedConcatenate(object):
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.ans = {}
|
self.ans = {}
|
||||||
|
|
||||||
def step(self, ndx, author, sort):
|
def step(self, ndx, author, sort, link):
|
||||||
if author is not None:
|
if author is not None:
|
||||||
self.ans[ndx] = author + ':::' + sort
|
self.ans[ndx] = ':::'.join((author, sort, link))
|
||||||
|
|
||||||
def finalize(self):
|
def finalize(self):
|
||||||
keys = self.ans.keys()
|
keys = self.ans.keys()
|
||||||
@ -229,7 +229,7 @@ class DBThread(Thread):
|
|||||||
load_c_extensions(self.conn)
|
load_c_extensions(self.conn)
|
||||||
self.conn.row_factory = sqlite.Row if self.row_factory else lambda cursor, row : list(row)
|
self.conn.row_factory = sqlite.Row if self.row_factory else lambda cursor, row : list(row)
|
||||||
self.conn.create_aggregate('concat', 1, Concatenate)
|
self.conn.create_aggregate('concat', 1, Concatenate)
|
||||||
self.conn.create_aggregate('aum_sortconcat', 3, AumSortedConcatenate)
|
self.conn.create_aggregate('aum_sortconcat', 4, AumSortedConcatenate)
|
||||||
self.conn.create_collation('PYNOCASE', partial(pynocase,
|
self.conn.create_collation('PYNOCASE', partial(pynocase,
|
||||||
encoding=encoding))
|
encoding=encoding))
|
||||||
self.conn.create_function('title_sort', 1, title_sort)
|
self.conn.create_function('title_sort', 1, title_sort)
|
||||||
|
@ -558,11 +558,16 @@ Most readers do not support this. You should complain to the manufacturer about
|
|||||||
|
|
||||||
Another alternative is to create a catalog in ebook form containing a listing of all the books in your calibre library, with their metadata. Click the arrow next to the convert button to access the catalog creation tool. And before you ask, no you cannot have the catalog "link directly to" books on your reader.
|
Another alternative is to create a catalog in ebook form containing a listing of all the books in your calibre library, with their metadata. Click the arrow next to the convert button to access the catalog creation tool. And before you ask, no you cannot have the catalog "link directly to" books on your reader.
|
||||||
|
|
||||||
|
How do I get |app| to use my HTTP proxy?
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
By default, |app| uses whatever proxy settings are set in your OS. Sometimes these are incorrect, for example, on windows if you don't use Internet Explorer then the proxy settings may not be up to date. You can tell |app| to use a particular proxy server by setting the http_proxy environment variable. The format of the variable is: http://username:password@servername you should ask your network admin to give you the correct value for this variable. Note that |app| only supports HTTP proxies not SOCKS proxies. You can see the current proxies used by |app| in Preferences->Miscellaneous.
|
||||||
|
|
||||||
I want some feature added to |app|. What can I do?
|
I want some feature added to |app|. What can I do?
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
You have two choices:
|
You have two choices:
|
||||||
1. Create a patch by hacking on |app| and send it to me for review and inclusion. See `Development <http://calibre-ebook.com/get-involved>`_.
|
1. Create a patch by hacking on |app| and send it to me for review and inclusion. See `Development <http://calibre-ebook.com/get-involved>`_.
|
||||||
2. `Open a ticket <http://calibre-ebook.com/bugs>`_ (you have to register and login first). Remember that |app| development is done by volunteers, so if you get no response to your feature request, it means no one feels like implementing it.
|
2. `Open a bug requesting the feature <http://calibre-ebook.com/bugs>`_ . Remember that |app| development is done by volunteers, so if you get no response to your feature request, it means no one feels like implementing it.
|
||||||
|
|
||||||
Why doesn't |app| have an automatic update?
|
Why doesn't |app| have an automatic update?
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
Loading…
x
Reference in New Issue
Block a user