This commit is contained in:
GRiker 2012-12-14 03:42:10 -07:00
commit 4b1915f976
112 changed files with 19797 additions and 19106 deletions

View File

@ -19,6 +19,37 @@
# new recipes:
# - title:
- version: 0.9.10
date: 2012-12-14
new features:
- title: "Drivers for Nextbook Premium 8 se, HTC Desire X and Emerson EM 543"
tickets: [1088149, 1088112, 1087978]
bug fixes:
- title: "Fix rich text delegate not working with Qt compiled in debug mode."
tickets: [1089011]
- title: "When deleting all books in the library, blank the book details panel"
- title: "Conversion: Fix malformed values in the bgcolor attribute causing conversion to abort"
- title: "Conversion: Fix heuristics applying incorrect style in some circumstances"
tickets: [1066507]
- title: "Possible fix for 64bit calibre not starting up on some Windows systems"
tickets: [1087816]
improved recipes:
- Sivil Dusunce
- Anchorage Daily News
- Le Monde
- Harpers
new recipes:
- title: Titanic
author: Krittika Goyal
- version: 0.9.9
date: 2012-12-07

View File

@ -9,11 +9,12 @@ class Adventure_zone(BasicNewsRecipe):
no_stylesheets = True
oldest_article = 20
max_articles_per_feed = 100
cover_url = 'http://www.adventure-zone.info/inne/logoaz_2012.png'
index='http://www.adventure-zone.info/fusion/'
use_embedded_content=False
preprocess_regexps = [(re.compile(r"<td class='capmain'>Komentarze</td>", re.IGNORECASE), lambda m: ''),
(re.compile(r'\<table .*?\>'), lambda match: ''),
(re.compile(r'\<tbody\>'), lambda match: '')]
(re.compile(r'</?table.*?>'), lambda match: ''),
(re.compile(r'</?tbody.*?>'), lambda match: '')]
remove_tags_before= dict(name='td', attrs={'class':'main-bg'})
remove_tags= [dict(name='img', attrs={'alt':'Drukuj'})]
remove_tags_after= dict(id='comments')
@ -36,11 +37,11 @@ class Adventure_zone(BasicNewsRecipe):
return feeds
def get_cover_url(self):
'''def get_cover_url(self):
soup = self.index_to_soup('http://www.adventure-zone.info/fusion/news.php')
cover=soup.find(id='box_OstatninumerAZ')
self.cover_url='http://www.adventure-zone.info/fusion/'+ cover.center.a.img['src']
return getattr(self, 'cover_url', self.cover_url)
return getattr(self, 'cover_url', self.cover_url)'''
def skip_ad_pages(self, soup):

View File

@ -5,14 +5,16 @@ class AdvancedUserRecipe1278347258(BasicNewsRecipe):
__author__ = 'rty'
oldest_article = 7
max_articles_per_feed = 100
auto_cleanup = True
feeds = [(u'Alaska News', u'http://www.adn.com/news/alaska/index.xml'),
(u'Business', u'http://www.adn.com/money/index.xml'),
(u'Sports', u'http://www.adn.com/sports/index.xml'),
(u'Politics', u'http://www.adn.com/politics/index.xml'),
(u'Lifestyles', u'http://www.adn.com/life/index.xml'),
(u'Iditarod', u'http://www.adn.com/iditarod/index.xml')
]
(u'Business', u'http://www.adn.com/money/index.xml'),
(u'Sports', u'http://www.adn.com/sports/index.xml'),
(u'Politics', u'http://www.adn.com/politics/index.xml'),
(u'Lifestyles', u'http://www.adn.com/life/index.xml'),
(u'Iditarod', u'http://www.adn.com/iditarod/index.xml')
]
description = ''''Alaska's Newspaper'''
publisher = 'http://www.adn.com'
category = 'news, Alaska, Anchorage'
@ -28,13 +30,13 @@ class AdvancedUserRecipe1278347258(BasicNewsRecipe):
conversion_options = {'linearize_tables':True}
masthead_url = 'http://media.adn.com/includes/assets/images/adn_logo.2.gif'
keep_only_tags = [
dict(name='div', attrs={'class':'left_col story_mainbar'}),
]
remove_tags = [
dict(name='div', attrs={'class':'story_tools'}),
dict(name='p', attrs={'class':'ad_label'}),
]
remove_tags_after = [
dict(name='div', attrs={'class':'advertisement'}),
]
#keep_only_tags = [
#dict(name='div', attrs={'class':'left_col story_mainbar'}),
#]
#remove_tags = [
#dict(name='div', attrs={'class':'story_tools'}),
#dict(name='p', attrs={'class':'ad_label'}),
#]
#remove_tags_after = [
#dict(name='div', attrs={'class':'advertisement'}),
#]

View File

@ -3,11 +3,11 @@ from calibre.web.feeds.news import BasicNewsRecipe
class Android_com_pl(BasicNewsRecipe):
title = u'Android.com.pl'
__author__ = 'fenuks'
description = 'Android.com.pl - biggest polish Android site'
description = u'Android.com.pl - to największe w Polsce centrum Android OS. Znajdziesz tu: nowości, forum, pomoc, recenzje, gry, aplikacje.'
category = 'Android, mobile'
language = 'pl'
use_embedded_content=True
cover_url =u'http://upload.wikimedia.org/wikipedia/commons/thumb/d/d7/Android_robot.svg/220px-Android_robot.svg.png'
cover_url =u'http://android.com.pl/wp-content/themes/android/images/logo.png'
oldest_article = 8
max_articles_per_feed = 100
feeds = [(u'Android', u'http://android.com.pl/component/content/frontpage/frontpage.feed?type=rss')]
feeds = [(u'Android', u'http://android.com.pl/feed/')]

View File

@ -7,18 +7,64 @@ class Dzieje(BasicNewsRecipe):
cover_url = 'http://www.dzieje.pl/sites/default/files/dzieje_logo.png'
category = 'history'
language = 'pl'
index='http://dzieje.pl'
ignore_duplicate_articles = {'title', 'url'}
index = 'http://dzieje.pl'
oldest_article = 8
max_articles_per_feed = 100
remove_javascript=True
no_stylesheets= True
keep_only_tags = [dict(name='h1', attrs={'class':'title'}), dict(id='content-area')]
remove_tags = [dict(attrs={'class':'field field-type-computed field-field-tagi'}), dict(id='dogory')]
feeds = [(u'Dzieje', u'http://dzieje.pl/rss.xml')]
#feeds = [(u'Dzieje', u'http://dzieje.pl/rss.xml')]
def append_page(self, soup, appendtag):
tag = appendtag.find('li', attrs={'class':'pager-next'})
if tag:
while tag:
url = tag.a['href']
if not url.startswith('http'):
url = 'http://dzieje.pl'+tag.a['href']
soup2 = self.index_to_soup(url)
pagetext = soup2.find(id='content-area').find(attrs={'class':'content'})
for r in pagetext.findAll(attrs={'class':['fieldgroup group-groupkul', 'fieldgroup group-zdjeciekult', 'fieldgroup group-zdjecieciekaw', 'fieldgroup group-zdjecieksiazka', 'fieldgroup group-zdjeciedu', 'field field-type-filefield field-field-zdjecieglownawyd']}):
r.extract()
pos = len(appendtag.contents)
appendtag.insert(pos, pagetext)
tag = soup2.find('li', attrs={'class':'pager-next'})
for r in appendtag.findAll(attrs={'class':['item-list', 'field field-type-computed field-field-tagi', ]}):
r.extract()
def find_articles(self, url):
articles = []
soup=self.index_to_soup(url)
tag=soup.find(id='content-area').div.div
for i in tag.findAll('div', recursive=False):
temp = i.find(attrs={'class':'views-field-title'}).span.a
title = temp.string
url = self.index + temp['href']
date = '' #i.find(attrs={'class':'views-field-created'}).span.string
articles.append({'title' : title,
'url' : url,
'date' : date,
'description' : ''
})
return articles
def parse_index(self):
feeds = []
feeds.append((u"Wiadomości", self.find_articles('http://dzieje.pl/wiadomosci')))
feeds.append((u"Kultura i sztuka", self.find_articles('http://dzieje.pl/kulturaisztuka')))
feeds.append((u"Film", self.find_articles('http://dzieje.pl/kino')))
feeds.append((u"Rozmaitości historyczne", self.find_articles('http://dzieje.pl/rozmaitości')))
feeds.append((u"Książka", self.find_articles('http://dzieje.pl/ksiazka')))
feeds.append((u"Wystawa", self.find_articles('http://dzieje.pl/wystawa')))
feeds.append((u"Edukacja", self.find_articles('http://dzieje.pl/edukacja')))
feeds.append((u"Dzieje się", self.find_articles('http://dzieje.pl/wydarzenia')))
return feeds
def preprocess_html(self, soup):
for a in soup('a'):
if a.has_key('href') and 'http://' not in a['href'] and 'https://' not in a['href']:
a['href']=self.index + a['href']
self.append_page(soup, soup.body)
return soup

View File

@ -17,6 +17,7 @@ class FilmWebPl(BasicNewsRecipe):
preprocess_regexps = [(re.compile(u'\(kliknij\,\ aby powiększyć\)', re.IGNORECASE), lambda m: ''), ]#(re.compile(ur' | ', re.IGNORECASE), lambda m: '')]
extra_css = '.hdrBig {font-size:22px;} ul {list-style-type:none; padding: 0; margin: 0;}'
remove_tags= [dict(name='div', attrs={'class':['recommendOthers']}), dict(name='ul', attrs={'class':'fontSizeSet'}), dict(attrs={'class':'userSurname anno'})]
remove_attributes = ['style',]
keep_only_tags= [dict(name='h1', attrs={'class':['hdrBig', 'hdrEntity']}), dict(name='div', attrs={'class':['newsInfo', 'newsInfoSmall', 'reviewContent description']})]
feeds = [(u'News / Filmy w produkcji', 'http://www.filmweb.pl/feed/news/category/filminproduction'),
(u'News / Festiwale, nagrody i przeglądy', u'http://www.filmweb.pl/feed/news/category/festival'),
@ -50,4 +51,9 @@ class FilmWebPl(BasicNewsRecipe):
for i in soup.findAll('sup'):
if not i.string or i.string.startswith('(kliknij'):
i.extract()
tag = soup.find(name='ul', attrs={'class':'inline sep-line'})
if tag:
tag.name = 'div'
for t in tag.findAll('li'):
t.name = 'div'
return soup

View File

@ -4,9 +4,10 @@ import re
class Gildia(BasicNewsRecipe):
title = u'Gildia.pl'
__author__ = 'fenuks'
description = 'Gildia - cultural site'
description = u'Fantastyczny Portal Kulturalny - newsy, recenzje, galerie, wywiady. Literatura, film, gry komputerowe i planszowe, komiks, RPG, sklep. Nie lekceważ potęgi wyobraźni!'
cover_url = 'http://www.film.gildia.pl/_n_/portal/redakcja/logo/logo-gildia.pl-500.jpg'
category = 'culture'
cover_url = 'http://gildia.pl/images/logo-main.png'
language = 'pl'
oldest_article = 8
max_articles_per_feed = 100
@ -23,10 +24,13 @@ class Gildia(BasicNewsRecipe):
content = soup.find('div', attrs={'class':'news'})
if 'recenzj' in soup.title.string.lower():
for link in content.findAll(name='a'):
if 'recenzj' in link['href']:
self.log.warn('odnosnik')
self.log.warn(link['href'])
if 'recenzj' in link['href'] or 'muzyka/plyty' in link['href']:
return self.index_to_soup(link['href'], raw=True)
if 'fragmen' in soup.title.string.lower():
for link in content.findAll(name='a'):
if 'fragment' in link['href']:
return self.index_to_soup(link['href'], raw=True)
def preprocess_html(self, soup):
for a in soup('a'):

View File

@ -1,19 +1,20 @@
from calibre.web.feeds.news import BasicNewsRecipe
from calibre.ebooks.BeautifulSoup import BeautifulSoup
class Gram_pl(BasicNewsRecipe):
title = u'Gram.pl'
__author__ = 'fenuks'
description = 'Gram.pl - site about computer games'
description = u'Serwis społecznościowy o grach: recenzje, newsy, zapowiedzi, encyklopedia gier, forum. Gry PC, PS3, X360, PS Vita, sprzęt dla graczy.'
category = 'games'
language = 'pl'
oldest_article = 8
index='http://www.gram.pl'
max_articles_per_feed = 100
ignore_duplicate_articles = {'title', 'url'}
no_stylesheets= True
extra_css = 'h2 {font-style: italic; font-size:20px;} .picbox div {float: left;}'
#extra_css = 'h2 {font-style: italic; font-size:20px;} .picbox div {float: left;}'
cover_url=u'http://www.gram.pl/www/01/img/grampl_zima.png'
remove_tags= [dict(name='p', attrs={'class':['extraText', 'must-log-in']}), dict(attrs={'class':['el', 'headline', 'post-info', 'entry-footer clearfix']}), dict(name='div', attrs={'class':['twojaOcena', 'comment-body', 'comment-author vcard', 'comment-meta commentmetadata', 'tw_button', 'entry-comment-counter', 'snap_nopreview sharing robots-nocontent', 'sharedaddy sd-sharing-enabled']}), dict(id=['igit_rpwt_css', 'comments', 'reply-title', 'igit_title'])]
keep_only_tags= [dict(name='div', attrs={'class':['main', 'arkh-postmetadataheader', 'arkh-postcontent', 'post', 'content', 'news_header', 'news_subheader', 'news_text']}), dict(attrs={'class':['contentheading', 'contentpaneopen']}), dict(name='article')]
keep_only_tags= [dict(id='articleModule')]
remove_tags = [dict(attrs={'class':['breadCrump', 'dymek', 'articleFooter']})]
feeds = [(u'Informacje', u'http://www.gram.pl/feed_news.asp'),
(u'Publikacje', u'http://www.gram.pl/feed_news.asp?type=articles'),
(u'Kolektyw- Indie Games', u'http://indie.gram.pl/feed/'),
@ -28,35 +29,21 @@ class Gram_pl(BasicNewsRecipe):
feed.articles.remove(article)
return feeds
def append_page(self, soup, appendtag):
nexturl = appendtag.find('a', attrs={'class':'cpn'})
while nexturl:
soup2 = self.index_to_soup('http://www.gram.pl'+ nexturl['href'])
r=appendtag.find(id='pgbox')
if r:
r.extract()
pagetext = soup2.find(attrs={'class':'main'})
r=pagetext.find('h1')
if r:
r.extract()
r=pagetext.find('h2')
if r:
r.extract()
for r in pagetext.findAll('script'):
r.extract()
pos = len(appendtag.contents)
appendtag.insert(pos, pagetext)
nexturl = appendtag.find('a', attrs={'class':'cpn'})
r=appendtag.find(id='pgbox')
if r:
r.extract()
def preprocess_html(self, soup):
self.append_page(soup, soup.body)
tag=soup.findAll(name='div', attrs={'class':'picbox'})
for t in tag:
t['style']='float: left;'
tag=soup.find(name='div', attrs={'class':'summary'})
if tag:
tag.find(attrs={'class':'pros'}).insert(0, BeautifulSoup('<h2>Plusy:</h2>').h2)
tag.find(attrs={'class':'cons'}).insert(0, BeautifulSoup('<h2>Minusy:</h2>').h2)
tag = soup.find(name='section', attrs={'class':'cenzurka'})
if tag:
rate = tag.p.img['data-ocena']
tag.p.img.extract()
tag.p.insert(len(tag.p.contents)-2, BeautifulSoup('<h2>Ocena: {0}</h2>'.format(rate)).h2)
for a in soup('a'):
if a.has_key('href') and 'http://' not in a['href'] and 'https://' not in a['href']:
a['href']=self.index + a['href']
tag=soup.find(name='span', attrs={'class':'platforma'})
if tag:
tag.name = 'p'
return soup

View File

@ -1,5 +1,5 @@
__license__ = 'GPL v3'
__copyright__ = '2008-2010, Darko Miletic <darko.miletic at gmail.com>'
__copyright__ = '2008-2012, Darko Miletic <darko.miletic at gmail.com>'
'''
harpers.org
'''
@ -16,7 +16,7 @@ class Harpers(BasicNewsRecipe):
max_articles_per_feed = 100
no_stylesheets = True
use_embedded_content = False
auto_cleanup = True
masthead_url = 'http://harpers.org/wp-content/themes/harpers/images/pheader.gif'
conversion_options = {
'comment' : description
@ -32,27 +32,9 @@ class Harpers(BasicNewsRecipe):
.caption{font-family:Verdana,sans-serif;font-size:x-small;color:#666666;}
'''
#keep_only_tags = [ dict(name='div', attrs={'id':'cached'}) ]
#remove_tags = [
#dict(name='table', attrs={'class':['rcnt','rcnt topline']})
#,dict(name=['link','object','embed','meta','base'])
#]
#remove_attributes = ['width','height']
keep_only_tags = [ dict(name='div', attrs={'class':['postdetailFull', 'articlePost']}) ]
remove_tags = [dict(name=['link','object','embed','meta','base'])]
remove_attributes = ['width','height']
feeds = [(u"Harper's Magazine", u'http://harpers.org/feed/')]
def get_cover_url(self):
cover_url = None
index = 'http://harpers.org/'
soup = self.index_to_soup(index)
link_item = soup.find(name = 'img',attrs= {'class':"cover"})
if link_item:
cover_url = 'http://harpers.org' + link_item['src']
return cover_url
#def preprocess_html(self, soup):
#for item in soup.findAll(style=True):
#del item['style']
#for item in soup.findAll(xmlns=True):
#del item['xmlns']
#return soup

View File

@ -3,7 +3,7 @@ from calibre.web.feeds.news import BasicNewsRecipe
class Historia_org_pl(BasicNewsRecipe):
title = u'Historia.org.pl'
__author__ = 'fenuks'
description = u'history site'
description = u'Artykuły dotyczące historii w układzie epok i tematów, forum. Najlepsza strona historii. Matura z historii i egzamin gimnazjalny z historii.'
cover_url = 'http://lh3.googleusercontent.com/_QeRQus12wGg/TOvHsZ2GN7I/AAAAAAAAD_o/LY1JZDnq7ro/logo5.jpg'
category = 'history'
language = 'pl'
@ -12,16 +12,15 @@ class Historia_org_pl(BasicNewsRecipe):
no_stylesheets = True
use_embedded_content = True
max_articles_per_feed = 100
ignore_duplicate_articles = {'title', 'url'}
feeds = [(u'Wszystkie', u'http://www.historia.org.pl/index.php?format=feed&type=atom'),
(u'Wiadomości', u'http://www.historia.org.pl/index.php/wiadomosci.feed?type=atom'),
(u'Publikacje', u'http://www.historia.org.pl/index.php/publikacje.feed?type=atom'),
(u'Publicystyka', u'http://www.historia.org.pl/index.php/publicystyka.feed?type=atom'),
(u'Recenzje', u'http://historia.org.pl/index.php/recenzje.feed?type=atom'),
(u'Kultura i sztuka', u'http://www.historia.org.pl/index.php/kultura-i-sztuka.feed?type=atom'),
(u'Rekonstykcje', u'http://www.historia.org.pl/index.php/rekonstrukcje.feed?type=atom'),
(u'Projekty', u'http://www.historia.org.pl/index.php/projekty.feed?type=atom'),
(u'Konkursy'), (u'http://www.historia.org.pl/index.php/konkursy.feed?type=atom')]
feeds = [(u'Wszystkie', u'http://historia.org.pl/feed/'),
(u'Wiadomości', u'http://historia.org.pl/Kategoria/wiadomosci/feed/'),
(u'Publikacje', u'http://historia.org.pl/Kategoria/artykuly/feed/'),
(u'Publicystyka', u'http://historia.org.pl/Kategoria/publicystyka/feed/'),
(u'Recenzje', u'http://historia.org.pl/Kategoria/recenzje/feed/'),
(u'Projekty', u'http://historia.org.pl/Kategoria/projekty/feed/'),]
def print_version(self, url):

View File

@ -9,6 +9,21 @@ class Kosmonauta(BasicNewsRecipe):
language = 'pl'
cover_url='http://bi.gazeta.pl/im/4/10393/z10393414X,Kosmonauta-net.jpg'
no_stylesheets = True
INDEX = 'http://www.kosmonauta.net'
oldest_article = 7
no_stylesheets = True
max_articles_per_feed = 100
feeds = [(u'Kosmonauta.net', u'http://www.kosmonauta.net/index.php/feed/rss.html')]
keep_only_tags = [dict(name='div', attrs={'class':'item-page'})]
remove_tags = [dict(attrs={'class':['article-tools clearfix', 'cedtag', 'nav clearfix', 'jwDisqusForm']})]
remove_tags_after = dict(name='div', attrs={'class':'cedtag'})
feeds = [(u'Kosmonauta.net', u'http://www.kosmonauta.net/?format=feed&type=atom')]
def preprocess_html(self, soup):
for a in soup.findAll(name='a'):
if a.has_key('href'):
href = a['href']
if not href.startswith('http'):
a['href'] = self.INDEX + href
print '%%%%%%%%%%%%%%%%%%%%%%%%%', a['href']
return soup

View File

@ -1,15 +1,16 @@
from calibre.web.feeds.news import BasicNewsRecipe
import re
class Ksiazka_net_pl(BasicNewsRecipe):
title = u'ksiazka.net.pl'
title = u'książka.net.pl'
__author__ = 'fenuks'
description = u'Ksiazka.net.pl - book vortal'
description = u'Portal Księgarski - tematyczny serwis o książkach. Wydarzenia z rynku księgarsko-wydawniczego, nowości, zapowiedzi, bestsellery, setki recenzji. Niezbędne informacje dla każdego miłośnika książek, księgarza, bibliotekarza i wydawcy.'
cover_url = 'http://www.ksiazka.net.pl/fileadmin/templates/ksiazka.net.pl/images/1PortalKsiegarski-logo.jpg'
category = 'books'
language = 'pl'
oldest_article = 8
max_articles_per_feed = 100
no_stylesheets= True
remove_empty_feeds = True
#extra_css = 'img {float: right;}'
preprocess_regexps = [(re.compile(ur'Podoba mi się, kupuję:'), lambda match: '<br />')]
remove_tags_before= dict(name='div', attrs={'class':'m-body'})

View File

@ -22,13 +22,15 @@ class LeMonde(BasicNewsRecipe):
#publication_type = 'newsportal'
extra_css = '''
h1{font-size:130%;}
h2{font-size:100%;}
blockquote.aside {background-color: #DDD; padding: 0.5em;}
.ariane{font-size:xx-small;}
.source{font-size:xx-small;}
#.href{font-size:xx-small;}
#.figcaption style{color:#666666; font-size:x-small;}
#.main-article-info{font-family:Arial,Helvetica,sans-serif;}
#full-contents{font-size:small; font-family:Arial,Helvetica,sans-serif;font-weight:normal;}
#match-stats-summary{font-size:small; font-family:Arial,Helvetica,sans-serif;font-weight:normal;}
/*.href{font-size:xx-small;}*/
/*.figcaption style{color:#666666; font-size:x-small;}*/
/*.main-article-info{font-family:Arial,Helvetica,sans-serif;}*/
/*full-contents{font-size:small; font-family:Arial,Helvetica,sans-serif;font-weight:normal;}*/
/*match-stats-summary{font-size:small; font-family:Arial,Helvetica,sans-serif;font-weight:normal;}*/
'''
#preprocess_regexps = [(re.compile(r'<!--.*?-->', re.DOTALL), lambda m: '')]
conversion_options = {
@ -44,6 +46,9 @@ class LeMonde(BasicNewsRecipe):
filterDuplicates = True
def preprocess_html(self, soup):
for aside in soup.findAll('aside'):
aside.name='blockquote'
aside['class'] = "aside"
for alink in soup.findAll('a'):
if alink.string is not None:
tstr = alink.string
@ -107,7 +112,9 @@ class LeMonde(BasicNewsRecipe):
]
remove_tags = [
dict(name='div', attrs={'class':['bloc_base meme_sujet']}),
dict(attrs={'class':['rubriques_liees']}),
dict(attrs={'class':['sociaux']}),
dict(attrs={'class':['bloc_base meme_sujet']}),
dict(name='p', attrs={'class':['lire']})
]

View File

@ -2,7 +2,7 @@
from calibre.web.feeds.news import BasicNewsRecipe
class Mlody_technik(BasicNewsRecipe):
title = u'Mlody technik'
title = u'Młody technik'
__author__ = 'fenuks'
description = u'Młody technik'
category = 'science'

View File

@ -1,12 +1,13 @@
from calibre.web.feeds.news import BasicNewsRecipe
# -*- coding: utf-8 -*-
class BasicUserRecipe1324913680(BasicNewsRecipe):
from calibre.web.feeds.news import BasicNewsRecipe
class AdvancedUserRecipe1355341662(BasicNewsRecipe):
title = u'Sivil Dusunce'
language = 'tr'
__author__ = 'asalet_r'
oldest_article = 7
max_articles_per_feed = 20
max_articles_per_feed = 50
auto_cleanup = True
feeds = [(u'Sivil Dusunce', u'http://www.sivildusunce.com/feed/')]
feeds = [(u'Sivil Dusunce', u'http://www.sivildusunce.com/?t=rss&xml=1')]

20
recipes/titanic_de.recipe Normal file
View File

@ -0,0 +1,20 @@
from calibre.web.feeds.news import BasicNewsRecipe
class Titanic(BasicNewsRecipe):
title = u'Titanic'
language = 'de'
__author__ = 'Krittika Goyal'
oldest_article = 14 #days
max_articles_per_feed = 25
#encoding = 'cp1252'
use_embedded_content = False
no_stylesheets = True
auto_cleanup = True
feeds = [
('News',
'http://www.titanic-magazin.de/ich.war.bei.der.waffen.rss'),
]

Binary file not shown.

View File

@ -598,6 +598,10 @@ class Win32Freeze(Command, WixMixIn):
# from files
'unrar.pyd', 'wpd.pyd', 'podofo.pyd',
'progress_indicator.pyd',
# As per this https://bugs.launchpad.net/bugs/1087816
# on some systems magick.pyd fails to load from memory
# on 64 bit
'magick.pyd',
}:
self.add_to_zipfile(zf, pyd, x)
os.remove(self.j(x, pyd))

File diff suppressed because it is too large Load Diff

View File

@ -9,35 +9,35 @@ msgstr ""
"Report-Msgid-Bugs-To: Debian iso-codes team <pkg-isocodes-"
"devel@lists.alioth.debian.org>\n"
"POT-Creation-Date: 2011-11-25 14:01+0000\n"
"PO-Revision-Date: 2011-09-27 16:27+0000\n"
"Last-Translator: Kovid Goyal <Unknown>\n"
"PO-Revision-Date: 2012-12-13 03:44+0000\n"
"Last-Translator: Fábio Malcher Miranda <mirand863@hotmail.com>\n"
"Language-Team: Brazilian Portuguese\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"X-Launchpad-Export-Date: 2011-11-26 05:47+0000\n"
"X-Generator: Launchpad (build 14381)\n"
"X-Launchpad-Export-Date: 2012-12-13 04:41+0000\n"
"X-Generator: Launchpad (build 16361)\n"
"Language: \n"
#. name for aaa
msgid "Ghotuo"
msgstr ""
msgstr "Ghotuo"
#. name for aab
msgid "Alumu-Tesu"
msgstr ""
msgstr "Alumu-Tesu"
#. name for aac
msgid "Ari"
msgstr ""
msgstr "Ari"
#. name for aad
msgid "Amal"
msgstr ""
msgstr "Amal"
#. name for aae
msgid "Albanian; Arbëreshë"
msgstr ""
msgstr "Albanês; Arbëreshë"
#. name for aaf
msgid "Aranadan"
@ -45,7 +45,7 @@ msgstr ""
#. name for aag
msgid "Ambrak"
msgstr ""
msgstr "Ambrak"
#. name for aah
msgid "Arapesh; Abu'"
@ -53,23 +53,23 @@ msgstr ""
#. name for aai
msgid "Arifama-Miniafia"
msgstr ""
msgstr "Arifama-Miniafia"
#. name for aak
msgid "Ankave"
msgstr ""
msgstr "Ankave"
#. name for aal
msgid "Afade"
msgstr ""
msgstr "Afade"
#. name for aam
msgid "Aramanik"
msgstr ""
msgstr "Aramaico"
#. name for aan
msgid "Anambé"
msgstr ""
msgstr "Anambé"
#. name for aao
msgid "Arabic; Algerian Saharan"
@ -77,7 +77,7 @@ msgstr ""
#. name for aap
msgid "Arára; Pará"
msgstr ""
msgstr "Arara; Pará"
#. name for aaq
msgid "Abnaki; Eastern"
@ -89,7 +89,7 @@ msgstr ""
#. name for aas
msgid "Aasáx"
msgstr ""
msgstr "Aasáx"
#. name for aat
msgid "Albanian; Arvanitika"
@ -97,27 +97,27 @@ msgstr ""
#. name for aau
msgid "Abau"
msgstr ""
msgstr "Abau"
#. name for aaw
msgid "Solong"
msgstr ""
msgstr "Solong"
#. name for aax
msgid "Mandobo Atas"
msgstr ""
msgstr "Mandobo Atas"
#. name for aaz
msgid "Amarasi"
msgstr ""
msgstr "Amarasi"
#. name for aba
msgid "Abé"
msgstr ""
msgstr "Abé"
#. name for abb
msgid "Bankon"
msgstr ""
msgstr "Bankon"
#. name for abc
msgid "Ayta; Ambala"
@ -125,7 +125,7 @@ msgstr ""
#. name for abd
msgid "Manide"
msgstr ""
msgstr "Manide"
#. name for abe
msgid "Abnaki; Western"
@ -145,11 +145,11 @@ msgstr ""
#. name for abi
msgid "Abidji"
msgstr ""
msgstr "Abidji"
#. name for abj
msgid "Aka-Bea"
msgstr ""
msgstr "Aka-Bea"
#. name for abk
msgid "Abkhazian"
@ -157,19 +157,19 @@ msgstr ""
#. name for abl
msgid "Lampung Nyo"
msgstr ""
msgstr "Lampung Nyo"
#. name for abm
msgid "Abanyom"
msgstr ""
msgstr "Abanyom"
#. name for abn
msgid "Abua"
msgstr ""
msgstr "Abua"
#. name for abo
msgid "Abon"
msgstr ""
msgstr "Abon"
#. name for abp
msgid "Ayta; Abellen"
@ -177,11 +177,11 @@ msgstr ""
#. name for abq
msgid "Abaza"
msgstr ""
msgstr "Abaza"
#. name for abr
msgid "Abron"
msgstr ""
msgstr "Abron"
#. name for abs
msgid "Malay; Ambonese"
@ -189,11 +189,11 @@ msgstr ""
#. name for abt
msgid "Ambulas"
msgstr ""
msgstr "Ambulas"
#. name for abu
msgid "Abure"
msgstr ""
msgstr "Abure"
#. name for abv
msgid "Arabic; Baharna"
@ -201,15 +201,15 @@ msgstr ""
#. name for abw
msgid "Pal"
msgstr ""
msgstr "Pal"
#. name for abx
msgid "Inabaknon"
msgstr ""
msgstr "Inabaknon"
#. name for aby
msgid "Aneme Wake"
msgstr ""
msgstr "Aneme Wake"
#. name for abz
msgid "Abui"
@ -225,7 +225,7 @@ msgstr ""
#. name for acd
msgid "Gikyode"
msgstr ""
msgstr "Gikyode"
#. name for ace
msgid "Achinese"
@ -241,15 +241,15 @@ msgstr ""
#. name for aci
msgid "Aka-Cari"
msgstr ""
msgstr "Aka-Cari"
#. name for ack
msgid "Aka-Kora"
msgstr ""
msgstr "Aka-Kora"
#. name for acl
msgid "Akar-Bale"
msgstr ""
msgstr "Akar-Bale"
#. name for acm
msgid "Arabic; Mesopotamian"
@ -257,7 +257,7 @@ msgstr ""
#. name for acn
msgid "Achang"
msgstr ""
msgstr "Achang"
#. name for acp
msgid "Acipa; Eastern"
@ -269,23 +269,23 @@ msgstr ""
#. name for acr
msgid "Achi"
msgstr ""
msgstr "Achi"
#. name for acs
msgid "Acroá"
msgstr ""
msgstr "Acroá"
#. name for act
msgid "Achterhoeks"
msgstr ""
msgstr "Achterhoeks"
#. name for acu
msgid "Achuar-Shiwiar"
msgstr ""
msgstr "Achuar-Shiwiar"
#. name for acv
msgid "Achumawi"
msgstr ""
msgstr "Achumawi"
#. name for acw
msgid "Arabic; Hijazi"
@ -301,23 +301,23 @@ msgstr ""
#. name for acz
msgid "Acheron"
msgstr ""
msgstr "Acheron"
#. name for ada
msgid "Adangme"
msgstr ""
msgstr "Adangme"
#. name for adb
msgid "Adabe"
msgstr ""
msgstr "Adabe"
#. name for add
msgid "Dzodinka"
msgstr ""
msgstr "Dzodinka"
#. name for ade
msgid "Adele"
msgstr ""
msgstr "Adele"
#. name for adf
msgid "Arabic; Dhofari"
@ -325,59 +325,59 @@ msgstr ""
#. name for adg
msgid "Andegerebinha"
msgstr ""
msgstr "Andegerebinha"
#. name for adh
msgid "Adhola"
msgstr ""
msgstr "Adhola"
#. name for adi
msgid "Adi"
msgstr ""
msgstr "Adi"
#. name for adj
msgid "Adioukrou"
msgstr ""
msgstr "Adioukrou"
#. name for adl
msgid "Galo"
msgstr ""
msgstr "Gaulês"
#. name for adn
msgid "Adang"
msgstr ""
msgstr "Adang"
#. name for ado
msgid "Abu"
msgstr ""
msgstr "Abu"
#. name for adp
msgid "Adap"
msgstr ""
msgstr "Adap"
#. name for adq
msgid "Adangbe"
msgstr ""
msgstr "Adangbe"
#. name for adr
msgid "Adonara"
msgstr ""
msgstr "Adonara"
#. name for ads
msgid "Adamorobe Sign Language"
msgstr ""
msgstr "Idiomas de Sinais Adamorobe"
#. name for adt
msgid "Adnyamathanha"
msgstr ""
msgstr "Adnyamathanha"
#. name for adu
msgid "Aduge"
msgstr ""
msgstr "Aduge"
#. name for adw
msgid "Amundava"
msgstr ""
msgstr "Amundava"
#. name for adx
msgid "Tibetan; Amdo"
@ -385,11 +385,11 @@ msgstr ""
#. name for ady
msgid "Adyghe"
msgstr ""
msgstr "Adigue"
#. name for adz
msgid "Adzera"
msgstr ""
msgstr "Adzera"
#. name for aea
msgid "Areba"
@ -405,7 +405,7 @@ msgstr ""
#. name for aed
msgid "Argentine Sign Language"
msgstr ""
msgstr "Idiomas de Sinais Argentino"
#. name for aee
msgid "Pashayi; Northeast"
@ -413,23 +413,23 @@ msgstr ""
#. name for aek
msgid "Haeke"
msgstr ""
msgstr "Haeke"
#. name for ael
msgid "Ambele"
msgstr ""
msgstr "Ambele"
#. name for aem
msgid "Arem"
msgstr ""
msgstr "Arem"
#. name for aen
msgid "Armenian Sign Language"
msgstr ""
msgstr "Idiomas de Sinais Americano"
#. name for aeq
msgid "Aer"
msgstr ""
msgstr "Aer"
#. name for aer
msgid "Arrernte; Eastern"
@ -437,23 +437,23 @@ msgstr ""
#. name for aes
msgid "Alsea"
msgstr ""
msgstr "Alsea"
#. name for aeu
msgid "Akeu"
msgstr ""
msgstr "Akeu"
#. name for aew
msgid "Ambakich"
msgstr ""
msgstr "Ambakich"
#. name for aey
msgid "Amele"
msgstr ""
msgstr "Amele"
#. name for aez
msgid "Aeka"
msgstr ""
msgstr "Aeka"
#. name for afb
msgid "Arabic; Gulf"
@ -461,11 +461,11 @@ msgstr ""
#. name for afd
msgid "Andai"
msgstr ""
msgstr "Andai"
#. name for afe
msgid "Putukwam"
msgstr ""
msgstr "Putukwam"
#. name for afg
msgid "Afghan Sign Language"
@ -473,27 +473,27 @@ msgstr ""
#. name for afh
msgid "Afrihili"
msgstr ""
msgstr "Afrihili"
#. name for afi
msgid "Akrukay"
msgstr ""
msgstr "Akrukay"
#. name for afk
msgid "Nanubae"
msgstr ""
msgstr "Nanubae"
#. name for afn
msgid "Defaka"
msgstr ""
msgstr "Defaka"
#. name for afo
msgid "Eloyi"
msgstr ""
msgstr "Eloyi"
#. name for afp
msgid "Tapei"
msgstr ""
msgstr "Tapei"
#. name for afr
msgid "Afrikaans"
@ -505,55 +505,55 @@ msgstr ""
#. name for aft
msgid "Afitti"
msgstr ""
msgstr "Afitti"
#. name for afu
msgid "Awutu"
msgstr ""
msgstr "Awutu"
#. name for afz
msgid "Obokuitai"
msgstr ""
msgstr "Obokuitai"
#. name for aga
msgid "Aguano"
msgstr ""
msgstr "Aguano"
#. name for agb
msgid "Legbo"
msgstr ""
msgstr "Legbo"
#. name for agc
msgid "Agatu"
msgstr ""
msgstr "Agatu"
#. name for agd
msgid "Agarabi"
msgstr ""
msgstr "Agarabi"
#. name for age
msgid "Angal"
msgstr ""
msgstr "Angal"
#. name for agf
msgid "Arguni"
msgstr ""
msgstr "Arguni"
#. name for agg
msgid "Angor"
msgstr ""
msgstr "Angor"
#. name for agh
msgid "Ngelima"
msgstr ""
msgstr "Ngelima"
#. name for agi
msgid "Agariya"
msgstr ""
msgstr "Agariya"
#. name for agj
msgid "Argobba"
msgstr ""
msgstr "Argobba"
#. name for agk
msgid "Agta; Isarog"
@ -561,31 +561,31 @@ msgstr ""
#. name for agl
msgid "Fembe"
msgstr ""
msgstr "Fembe"
#. name for agm
msgid "Angaataha"
msgstr ""
msgstr "Angaataha"
#. name for agn
msgid "Agutaynen"
msgstr ""
msgstr "Agutaynen"
#. name for ago
msgid "Tainae"
msgstr ""
msgstr "Tainae"
#. name for agq
msgid "Aghem"
msgstr ""
msgstr "Aghem"
#. name for agr
msgid "Aguaruna"
msgstr ""
msgstr "Aguaruna"
#. name for ags
msgid "Esimbi"
msgstr ""
msgstr "Esimbi"
#. name for agt
msgid "Agta; Central Cagayan"
@ -593,7 +593,7 @@ msgstr ""
#. name for agu
msgid "Aguacateco"
msgstr ""
msgstr "Aguacateco"
#. name for agv
msgid "Dumagat; Remontado"
@ -601,11 +601,11 @@ msgstr ""
#. name for agw
msgid "Kahua"
msgstr ""
msgstr "Kahua"
#. name for agx
msgid "Aghul"
msgstr ""
msgstr "Aghul"
#. name for agy
msgid "Alta; Southern"
@ -617,19 +617,19 @@ msgstr ""
#. name for aha
msgid "Ahanta"
msgstr ""
msgstr "Ahanta"
#. name for ahb
msgid "Axamb"
msgstr ""
msgstr "Axamb"
#. name for ahg
msgid "Qimant"
msgstr ""
msgstr "Qimant"
#. name for ahh
msgid "Aghu"
msgstr ""
msgstr "Aghu"
#. name for ahi
msgid "Aizi; Tiagbamrin"
@ -637,11 +637,11 @@ msgstr ""
#. name for ahk
msgid "Akha"
msgstr ""
msgstr "Akha"
#. name for ahl
msgid "Igo"
msgstr ""
msgstr "Igo"
#. name for ahm
msgid "Aizi; Mobumrin"
@ -649,11 +649,11 @@ msgstr ""
#. name for ahn
msgid "Àhàn"
msgstr ""
msgstr "Àhàn"
#. name for aho
msgid "Ahom"
msgstr ""
msgstr "Ahom"
#. name for ahp
msgid "Aizi; Aproumu"
@ -661,39 +661,39 @@ msgstr ""
#. name for ahr
msgid "Ahirani"
msgstr ""
msgstr "Ahirani"
#. name for ahs
msgid "Ashe"
msgstr ""
msgstr "Ashe"
#. name for aht
msgid "Ahtena"
msgstr ""
msgstr "Ahtena"
#. name for aia
msgid "Arosi"
msgstr ""
msgstr "Arosi"
#. name for aib
msgid "Ainu (China)"
msgstr ""
msgstr "Ainu (China)"
#. name for aic
msgid "Ainbai"
msgstr ""
msgstr "Ainbai"
#. name for aid
msgid "Alngith"
msgstr ""
msgstr "Alngith"
#. name for aie
msgid "Amara"
msgstr ""
msgstr "Amara"
#. name for aif
msgid "Agi"
msgstr ""
msgstr "Agi"
#. name for aig
msgid "Creole English; Antigua and Barbuda"
@ -701,7 +701,7 @@ msgstr ""
#. name for aih
msgid "Ai-Cham"
msgstr ""
msgstr "Ai-Cham"
#. name for aii
msgid "Neo-Aramaic; Assyrian"
@ -709,35 +709,35 @@ msgstr ""
#. name for aij
msgid "Lishanid Noshan"
msgstr ""
msgstr "Lishanid Noshan"
#. name for aik
msgid "Ake"
msgstr ""
msgstr "Ake"
#. name for ail
msgid "Aimele"
msgstr ""
msgstr "Aimele"
#. name for aim
msgid "Aimol"
msgstr ""
msgstr "Aimol"
#. name for ain
msgid "Ainu (Japan)"
msgstr ""
msgstr "Ainu (Japão)"
#. name for aio
msgid "Aiton"
msgstr ""
msgstr "Aiton"
#. name for aip
msgid "Burumakok"
msgstr ""
msgstr "Burumakok"
#. name for aiq
msgid "Aimaq"
msgstr ""
msgstr "Aimaq"
#. name for air
msgid "Airoran"
@ -2021,7 +2021,7 @@ msgstr ""
#. name for aze
msgid "Azerbaijani"
msgstr "Azerbaidjani"
msgstr "Azerbaijano"
#. name for azg
msgid "Amuzgo; San Pedro Amuzgos"

View File

@ -4,7 +4,7 @@ __license__ = 'GPL v3'
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
__docformat__ = 'restructuredtext en'
__appname__ = u'calibre'
numeric_version = (0, 9, 9)
numeric_version = (0, 9, 10)
__version__ = u'.'.join(map(unicode, numeric_version))
__author__ = u"Kovid Goyal <kovid@kovidgoyal.net>"

View File

@ -121,6 +121,8 @@ def debug(ioreg_to_tmp=False, buf=None, plugins=None,
out('\nDisabled plugins:', textwrap.fill(' '.join([x.__class__.__name__ for x in
disabled_plugins])))
out(' ')
else:
out('\nNo disabled plugins')
found_dev = False
for dev in devplugins:
if not dev.MANAGES_DEVICE_PRESENCE: continue

View File

@ -10,7 +10,7 @@ import cStringIO
from calibre.devices.usbms.driver import USBMS
HTC_BCDS = [0x100, 0x0222, 0x0226, 0x227, 0x228, 0x229, 0x9999]
HTC_BCDS = [0x100, 0x0222, 0x0226, 0x227, 0x228, 0x229, 0x0231, 0x9999]
class ANDROID(USBMS):
@ -92,7 +92,7 @@ class ANDROID(USBMS):
# Google
0x18d1 : {
0x0001 : [0x0223, 0x230, 0x9999],
0x0003 : [0x0230],
0x0003 : [0x0230, 0x9999],
0x4e11 : [0x0100, 0x226, 0x227],
0x4e12 : [0x0100, 0x226, 0x227],
0x4e21 : [0x0100, 0x226, 0x227, 0x231],
@ -212,7 +212,7 @@ class ANDROID(USBMS):
'VIZIO', 'GOOGLE', 'FREESCAL', 'KOBO_INC', 'LENOVO', 'ROCKCHIP',
'POCKET', 'ONDA_MID', 'ZENITHIN', 'INGENIC', 'PMID701C', 'PD',
'PMP5097C', 'MASS', 'NOVO7', 'ZEKI', 'COBY', 'SXZ', 'USB_2.0',
'COBY_MID', 'VS', 'AINOL', 'TOPWISE', 'PAD703']
'COBY_MID', 'VS', 'AINOL', 'TOPWISE', 'PAD703', 'NEXT8D12']
WINDOWS_MAIN_MEM = ['ANDROID_PHONE', 'A855', 'A853', 'INC.NEXUS_ONE',
'__UMS_COMPOSITE', '_MB200', 'MASS_STORAGE', '_-_CARD', 'SGH-I897',
'GT-I9000', 'FILE-STOR_GADGET', 'SGH-T959_CARD', 'SGH-T959', 'SAMSUNG_ANDROID',
@ -232,7 +232,7 @@ class ANDROID(USBMS):
'THINKPAD_TABLET', 'SGH-T989', 'YP-G70', 'STORAGE_DEVICE',
'ADVANCED', 'SGH-I727', 'USB_FLASH_DRIVER', 'ANDROID',
'S5830I_CARD', 'MID7042', 'LINK-CREATE', '7035', 'VIEWPAD_7E',
'NOVO7', 'MB526', '_USB#WYK7MSF8KE', 'TABLET_PC']
'NOVO7', 'MB526', '_USB#WYK7MSF8KE', 'TABLET_PC', 'F']
WINDOWS_CARD_A_MEM = ['ANDROID_PHONE', 'GT-I9000_CARD', 'SGH-I897',
'FILE-STOR_GADGET', 'SGH-T959_CARD', 'SGH-T959', 'SAMSUNG_ANDROID', 'GT-P1000_CARD',
'A70S', 'A101IT', '7', 'INCREDIBLE', 'A7EB', 'SGH-T849_CARD',
@ -243,7 +243,7 @@ class ANDROID(USBMS):
'FILE-CD_GADGET', 'GT-I9001_CARD', 'USB_2.0', 'XT875',
'UMS_COMPOSITE', 'PRO', '.KOBO_VOX', 'SGH-T989_CARD', 'SGH-I727',
'USB_FLASH_DRIVER', 'ANDROID', 'MID7042', '7035', 'VIEWPAD_7E',
'NOVO7', 'ADVANCED', 'TABLET_PC']
'NOVO7', 'ADVANCED', 'TABLET_PC', 'F']
OSX_MAIN_MEM = 'Android Device Main Memory'

View File

@ -2440,13 +2440,13 @@ class ITUNES(DriverBase):
as_binding = "dynamic"
try:
# Try dynamic binding - works with iTunes <= 10.6.1
foo = self.iTunes.name()
self.iTunes.name()
except:
# Try static binding
import itunes
self.iTunes = appscript.app('iTunes', terms=itunes)
try:
foo = self.iTunes.name()
self.iTunes.name()
as_binding = "static"
except:
self.iTunes = None

View File

@ -335,14 +335,26 @@ class HeuristicProcessor(object):
This function intentionally leaves hyphenated content alone as that is handled by the
dehyphenate routine in a separate step
'''
def style_unwrap(match):
style_close = match.group('style_close')
style_open = match.group('style_open')
if style_open and style_close:
return style_close+' '+style_open
elif style_open and not style_close:
return ' '+style_open
elif not style_open and style_close:
return style_close+' '
else:
return ' '
# define the pieces of the regex
lookahead = "(?<=.{"+str(length)+u"}([a-zäëïöüàèìòùáćéíĺóŕńśúýâêîôûçąężıãõñæøþðßěľščťžňďřů,:)\IA\u00DF]|(?<!\&\w{4});))" # (?<!\&\w{4});) is a semicolon not part of an entity
em_en_lookahead = "(?<=.{"+str(length)+u"}[\u2013\u2014])"
soft_hyphen = u"\xad"
line_ending = "\s*</(span|[iubp]|div)>\s*(</(span|[iubp]|div)>)?"
line_ending = "\s*(?P<style_close></(span|[iub])>)?\s*(</(p|div)>)?"
blanklines = "\s*(?P<up2threeblanks><(p|span|div)[^>]*>\s*(<(p|span|div)[^>]*>\s*</(span|p|div)>\s*)</(span|p|div)>\s*){0,3}\s*"
line_opening = "<(span|[iubp]|div)[^>]*>\s*(<(span|[iubp]|div)[^>]*>)?\s*"
line_opening = "<(p|div)[^>]*>\s*(?P<style_open><(span|[iub])[^>]*>)?\s*"
txt_line_wrap = u"((\u0020|\u0009)*\n){1,4}"
unwrap_regex = lookahead+line_ending+blanklines+line_opening
@ -353,14 +365,17 @@ class HeuristicProcessor(object):
unwrap_regex = lookahead+txt_line_wrap
em_en_unwrap_regex = em_en_lookahead+txt_line_wrap
shy_unwrap_regex = soft_hyphen+txt_line_wrap
content = unwrap_regex.sub(' ', content)
content = em_en_unwrap_regex.sub('', content)
content = shy_unwrap_regex.sub('', content)
else:
unwrap = re.compile(u"%s" % unwrap_regex, re.UNICODE)
em_en_unwrap = re.compile(u"%s" % em_en_unwrap_regex, re.UNICODE)
shy_unwrap = re.compile(u"%s" % shy_unwrap_regex, re.UNICODE)
content = unwrap.sub(style_unwrap, content)
content = em_en_unwrap.sub(style_unwrap, content)
content = shy_unwrap.sub(style_unwrap, content)
unwrap = re.compile(u"%s" % unwrap_regex, re.UNICODE)
em_en_unwrap = re.compile(u"%s" % em_en_unwrap_regex, re.UNICODE)
shy_unwrap = re.compile(u"%s" % shy_unwrap_regex, re.UNICODE)
content = unwrap.sub(' ', content)
content = em_en_unwrap.sub('', content)
content = shy_unwrap.sub('', content)
return content
def txt_process(self, match):

View File

@ -356,7 +356,7 @@ class CSSFlattener(object):
if 'bgcolor' in node.attrib:
try:
cssdict['background-color'] = Property('background-color', node.attrib['bgcolor']).value
except ValueError:
except (ValueError, SyntaxErr):
pass
del node.attrib['bgcolor']
if cssdict.get('font-weight', '').lower() == 'medium':

View File

@ -0,0 +1,451 @@
#!/usr/bin/env python
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:fdm=marker:ai
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2012, Kovid Goyal <kovid at kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import sys, traceback
from math import sqrt
from collections import namedtuple
from future_builtins import map
from PyQt4.Qt import (QPaintEngine, QPaintDevice, Qt, QApplication, QPainter,
QTransform, QPoint, QPainterPath)
from reportlab.lib.units import inch
from reportlab.lib.pagesizes import A4
from reportlab.pdfgen.canvas import FILL_NON_ZERO, FILL_EVEN_ODD, Canvas
from reportlab.lib.colors import Color
from calibre.constants import DEBUG
XDPI = 1200
YDPI = 1200
Point = namedtuple('Point', 'x y')
def set_transform(transform, func):
func(transform.m11(), transform.m12(), transform.m21(), transform.m22(), transform.dx(), transform.dy())
class GraphicsState(object): # {{{
def __init__(self, state=None):
self.ops = {}
if state is not None:
self.read_state(state)
@property
def stack_reset_needed(self):
return 'transform' in self.ops or 'clip' in self.ops
def read_state(self, state):
flags = state.state()
if flags & QPaintEngine.DirtyTransform:
self.ops['transform'] = state.transform()
# TODO: Add support for brush patterns
if flags & QPaintEngine.DirtyBrush:
brush = state.brush()
color = brush.color()
alpha = color.alphaF()
if alpha == 1.0: alpha = None
self.ops['do_fill'] = 0 if (alpha == 0.0 or brush.style() == Qt.NoBrush) else 1
self.ops['fill_color'] = Color(color.red(), color.green(), color.blue(),
alpha=alpha)
if flags & QPaintEngine.DirtyPen:
pen = state.pen()
brush = pen.brush()
color = pen.color()
alpha = color.alphaF()
if alpha == 1.0: alpha = None
self.ops['do_stroke'] = 0 if (pen.style() == Qt.NoPen or brush.style() ==
Qt.NoBrush or alpha == 0.0) else 1
ps = {Qt.DashLine:[3], Qt.DotLine:[1,2], Qt.DashDotLine:[3,2,1,2],
Qt.DashDotDotLine:[3, 2, 1, 2, 1, 2]}.get(pen.style(), [])
self.ops['dash'] = ps
self.ops['line_width'] = pen.widthF()
self.ops['stroke_color'] = Color(color.red(), color.green(),
color.blue(), alpha=alpha)
self.ops['line_cap'] = {Qt.FlatCap:0, Qt.RoundCap:1,
Qt.SquareCap:2}.get(pen.capStyle(), 0)
self.ops['line_join'] = {Qt.MiterJoin:0, Qt.RoundJoin:1,
Qt.BevelJoin:2}.get(pen.joinStyle(), 0)
if flags & QPaintEngine.DirtyClipPath:
self.ops['clip'] = (state.clipOperation(), state.clipPath())
elif flags & QPaintEngine.DirtyClipRegion:
path = QPainterPath()
for rect in state.clipRegion().rects():
path.addRect(rect)
self.ops['clip'] = (state.clipOperation(), path)
# TODO: Add support for opacity
def __call__(self, engine):
canvas = engine.canvas
ops = self.ops
current_transform = ops.get('transform', None)
srn = self.stack_reset_needed
if srn:
canvas.restoreState()
canvas.saveState()
# Since we have reset the stack we need to re-apply all previous
# operations
ops = engine.graphics_state.ops.copy()
ops.pop('clip', None) # Prev clip is handled separately
ops.update(self.ops)
self.ops = ops
# We apply clip before transform as the clip may have to be merged with
# the previous clip path so it is easiest to work with clips that are
# pre-transformed
prev_clip_path = engine.graphics_state.ops.get('clip', (None, None))[1]
if 'clip' in ops:
op, path = ops['clip']
if current_transform is not None and path is not None:
# Pre transform the clip path
path = current_transform.map(path)
ops['clip'] = (op, path)
if op == Qt.ReplaceClip:
pass
elif op == Qt.IntersectClip:
if prev_clip_path is not None:
ops['clip'] = (op, path.intersected(prev_clip_path))
elif op == Qt.UniteClip:
if prev_clip_path is not None:
path.addPath(prev_clip_path)
else:
ops['clip'] = (Qt.NoClip, None)
path = ops['clip'][1]
if path is not None:
engine.set_clip(path)
elif prev_clip_path is not None:
# Re-apply the previous clip path since no clipping operation was
# specified
engine.set_clip(prev_clip_path)
ops['clip'] = (Qt.ReplaceClip, prev_clip_path)
# Apply transform
if current_transform is not None:
engine.qt_system = current_transform
set_transform(current_transform, canvas.transform)
if 'fill_color' in ops:
canvas.setFillColor(ops['fill_color'])
if 'stroke_color' in ops:
canvas.setStrokeColor(ops['stroke_color'])
for x in ('fill', 'stroke'):
x = 'do_'+x
if x in ops:
setattr(canvas, x, ops[x])
if 'dash' in ops:
canvas.setDash(ops['dash'])
if 'line_width' in ops:
canvas.setLineWidth(ops['line_width'])
if 'line_cap' in ops:
canvas.setLineCap(ops['line_cap'])
if 'line_join' in ops:
canvas.setLineJoin(ops['line_join'])
if not srn:
# Add the operations from the previous state object that were not
# updated in this state object. This is needed to allow stack
# resetting to work.
ops = canvas.graphics_state.ops.copy()
ops.update(self.ops)
self.ops = ops
return self
# }}}
class PdfEngine(QPaintEngine):
def __init__(self, file_object, page_width, page_height, left_margin,
top_margin, right_margin, bottom_margin, width, height):
QPaintEngine.__init__(self, self.features)
self.file_object = file_object
self.page_height, self.page_width = page_height, page_width
self.left_margin, self.top_margin = left_margin, top_margin
self.right_margin, self.bottom_margin = right_margin, bottom_margin
self.pixel_width, self.pixel_height = width, height
# Setup a co-ordinate transform that allows us to use co-ords
# from Qt's pixel based co-ordinate system with its origin at the top
# left corner. PDF's co-ordinate system is based on pts and has its
# origin in the bottom left corner. We also have to implement the page
# margins. Therefore, we need to translate, scale and reflect about the
# x-axis.
dy = self.page_height - self.top_margin
dx = self.left_margin
sx = (self.page_width - self.left_margin -
self.right_margin) / self.pixel_width
sy = (self.page_height - self.top_margin -
self.bottom_margin) / self.pixel_height
self.pdf_system = QTransform(sx, 0, 0, -sy, dx, dy)
self.qt_system = QTransform()
self.do_stroke = 1
self.do_fill = 0
self.scale = sqrt(sy**2 + sx**2)
self.yscale = sy
self.graphics_state = GraphicsState()
def init_page(self):
set_transform(self.pdf_system, self.canvas.transform)
self.canvas.saveState()
@property
def features(self):
# TODO: Remove unsupported features from this
return QPaintEngine.AllFeatures
def begin(self, device):
try:
self.canvas = Canvas(self.file_object,
pageCompression=0 if DEBUG else 1,
pagesize=(self.page_width, self.page_height))
self.init_page()
except:
traceback.print_exc()
return False
return True
def end_page(self, start_new=True):
self.canvas.restoreState()
self.canvas.showPage()
if start_new:
self.init_page()
def end(self):
try:
self.end_page(start_new=False)
self.canvas.save()
except:
traceback.print_exc()
return False
finally:
self.canvas = self.file_object = None
return True
def type(self):
return QPaintEngine.User
def drawPixmap(self, rect, pixmap, source_rect):
pass # TODO: Implement me
def drawImage(self, rect, image, source_rect, flags=Qt.AutoColor):
pass # TODO: Implement me
def updateState(self, state):
state = GraphicsState(state)
self.graphics_state = state(self)
def convert_path(self, path):
p = self.canvas.beginPath()
path = path.simplified()
i = 0
while i < path.elementCount():
elem = path.elementAt(i)
em = (elem.x, elem.y)
i += 1
if elem.isMoveTo():
p.moveTo(*em)
elif elem.isLineTo():
p.lineTo(*em)
elif elem.isCurveTo():
if path.elementCount() > i+1:
c1, c2 = map(lambda j:(
path.elementAt(j).x, path.elementAt(j)), (i, i+1))
i += 2
p.curveTo(*(c1 + c2 + em))
return p
def drawPath(self, path):
p = self.convert_path(path)
old = self.canvas._fillMode
self.canvas._fillMode = {Qt.OddEvenFill:FILL_EVEN_ODD,
Qt.WindingFill:FILL_NON_ZERO}[path.fillRule()]
self.canvas.drawPath(p, stroke=self.do_stroke,
fill=self.do_fill)
self.canvas._fillMode = old
def set_clip(self, path):
p = self.convert_path(path)
old = self.canvas._fillMode
self.canvas._fillMode = {Qt.OddEvenFill:FILL_EVEN_ODD,
Qt.WindingFill:FILL_NON_ZERO}[path.fillRule()]
self.canvas.clipPath(p, fill=0, stroke=0)
self.canvas._fillMode = old
def drawPoints(self, points):
for point in points:
point = self.current_transform.map(point)
self.canvas.circle(point.x(), point.y(), 0.1,
stroke=self.do_stroke, fill=self.do_fill)
def drawRects(self, rects):
for rect in rects:
bl = rect.topLeft()
self.canvas.rect(bl.x(), bl.y(), rect.width(), rect.height(),
stroke=self.do_stroke, fill=self.do_fill)
def drawTextItem(self, point, text_item):
# TODO: Add support for underline, overline, strike through and fonts
# super(PdfEngine, self).drawTextItem(point, text_item)
f = text_item.font()
px, pt = f.pixelSize(), f.pointSizeF()
if px == -1:
sz = pt/self.yscale
else:
sz = px
q = self.qt_system
if not q.isIdentity() and q.type() > q.TxShear:
# We cant map this transform to a PDF text transform operator
f, s = self.do_fill, self.do_stroke
self.do_fill, self.do_stroke = 1, 0
super(PdfEngine, self).drawTextItem(point, text_item)
self.do_fill, self.do_stroke = f, s
return
to = self.canvas.beginText()
set_transform(QTransform(1, 0, 0, -1, point.x(), point.y()), to.setTextTransform)
fontname = 'Times-Roman'
to.setFont(fontname, sz) # TODO: Embed font
stretch = f.stretch()
if stretch != 100:
to.setHorizontalScale(stretch)
ws = f.wordSpacing()
if ws != 0:
to.setWordSpacing(self.map_dx(ws))
spacing = f.letterSpacing()
st = f.letterSpacingType()
if st == f.AbsoluteSpacing and spacing != 0:
to.setCharSpace(spacing)
# TODO: Handle percentage letter spacing
text = type(u'')(text_item.text())
to.textOut(text)
# TODO: handle colors
self.canvas.drawText(to)
def draw_line(kind='underline'):
tw = self.canvas.stringWidth(text, fontname, sz)
p = self.canvas.beginPath()
if kind == 'underline':
dy = -text_item.descent()
elif kind == 'overline':
dy = text_item.ascent()
elif kind == 'strikeout':
dy = text_item.ascent()/2
p.moveTo(point.x, point.y+dy)
p.lineTo(point.x+tw, point.y+dy)
if f.underline():
draw_line()
if f.overline():
draw_line('overline')
if f.strikeOut():
draw_line('strikeout')
def drawPolygon(self, points, mode):
points = [Point(p.x(), p.y()) for p in points]
p = self.canvas.beginPath()
p.moveTo(*points[0])
for point in points[1:]:
p.lineTo(*point)
p.close()
old = self.canvas._fillMode
self.canvas._fillMode = {self.OddEvenMode:FILL_EVEN_ODD,
self.WindingMode:FILL_NON_ZERO}.get(mode,
FILL_EVEN_ODD)
self.canvas.drawPath(p, fill=(mode in (self.OddEvenMode,
self.WindingMode, self.ConvexMode)))
self.canvas._fillMode = old
def __enter__(self):
self.canvas.saveState()
def __exit__(self, *args):
self.canvas.restoreState()
class PdfDevice(QPaintDevice): # {{{
def __init__(self, file_object, page_size=A4, left_margin=inch,
top_margin=inch, right_margin=inch, bottom_margin=inch):
QPaintDevice.__init__(self)
self.page_width, self.page_height = page_size
self.body_width = self.page_width - left_margin - right_margin
self.body_height = self.page_height - top_margin - bottom_margin
self.engine = PdfEngine(file_object, self.page_width, self.page_height,
left_margin, top_margin, right_margin,
bottom_margin, self.width(), self.height())
def paintEngine(self):
return self.engine
def metric(self, m):
if m in (self.PdmDpiX, self.PdmPhysicalDpiX):
return XDPI
if m in (self.PdmDpiY, self.PdmPhysicalDpiY):
return YDPI
if m == self.PdmDepth:
return 32
if m == self.PdmNumColors:
return sys.maxint
if m == self.PdmWidthMM:
return int(round(self.body_width * 0.35277777777778))
if m == self.PdmHeightMM:
return int(round(self.body_height * 0.35277777777778))
if m == self.PdmWidth:
return int(round(self.body_width * XDPI / 72.0))
if m == self.PdmHeight:
return int(round(self.body_height * YDPI / 72.0))
return 0
# }}}
if __name__ == '__main__':
QPainterPath, QPoint
app = QApplication([])
p = QPainter()
with open('/tmp/painter.pdf', 'wb') as f:
dev = PdfDevice(f)
p.begin(dev)
xmax, ymax = p.viewport().width(), p.viewport().height()
try:
p.drawRect(0, 0, xmax, ymax)
p.drawPolyline(QPoint(0, 0), QPoint(xmax, 0), QPoint(xmax, ymax),
QPoint(0, ymax), QPoint(0, 0))
pp = QPainterPath()
pp.addRect(0, 0, xmax, ymax)
p.drawPath(pp)
p.save()
for i in xrange(3):
p.drawRect(0, 0, xmax/10, xmax/10)
p.translate(xmax/10, xmax/10)
p.scale(1, 1.5)
p.restore()
p.save()
p.drawLine(0, 0, 5000, 0)
p.rotate(45)
p.drawLine(0, 0, 5000, 0)
p.restore()
f = p.font()
f.setPointSize(24)
f.setFamily('Times New Roman')
p.setFont(f)
# p.scale(2, 2)
p.rotate(45)
p.drawText(QPoint(100, 300), 'Some text')
finally:
p.end()

View File

@ -8,7 +8,7 @@ __docformat__ = 'restructuredtext en'
from functools import partial
from collections import Counter
from PyQt4.Qt import QObject, QTimer
from PyQt4.Qt import QObject, QTimer, QModelIndex
from calibre.gui2 import error_dialog, question_dialog
from calibre.gui2.dialogs.delete_matching_from_device import DeleteMatchingFromDeviceDialog
@ -285,6 +285,8 @@ class DeleteAction(InterfaceAction):
# Current row is after the last row, set it to the last row
current_row = view.row_count() - 1
view.set_current_row(current_row)
if view.model().rowCount(QModelIndex()) < 1:
self.gui.book_details.reset_info()
def library_ids_deleted2(self, ids_deleted, next_id=None):
view = self.gui.library_view

View File

@ -17,10 +17,12 @@ from Queue import Queue, Empty
from io import BytesIO
from PyQt4.Qt import (QStyledItemDelegate, QTextDocument, QRectF, QIcon, Qt,
QApplication, QDialog, QVBoxLayout, QLabel, QDialogButtonBox, QStyle,
QStackedWidget, QWidget, QTableView, QGridLayout, QFontInfo, QPalette,
QTimer, pyqtSignal, QAbstractTableModel, QVariant, QSize, QListView,
QPixmap, QAbstractListModel, QColor, QRect, QTextBrowser, QModelIndex)
QApplication, QDialog, QVBoxLayout, QLabel,
QDialogButtonBox, QStyle, QStackedWidget, QWidget,
QTableView, QGridLayout, QFontInfo, QPalette, QTimer,
pyqtSignal, QAbstractTableModel, QVariant, QSize,
QListView, QPixmap, QAbstractListModel, QColor, QRect,
QTextBrowser, QStringListModel)
from PyQt4.QtWebKit import QWebView
from calibre.customize.ui import metadata_plugins
@ -44,6 +46,8 @@ class RichTextDelegate(QStyledItemDelegate): # {{{
def __init__(self, parent=None, max_width=160):
QStyledItemDelegate.__init__(self, parent)
self.max_width = max_width
self.dummy_model = QStringListModel([' '], self)
self.dummy_index = self.dummy_model.index(0)
def to_doc(self, index, option=None):
doc = QTextDocument()
@ -66,7 +70,7 @@ class RichTextDelegate(QStyledItemDelegate): # {{{
return ans
def paint(self, painter, option, index):
QStyledItemDelegate.paint(self, painter, option, QModelIndex())
QStyledItemDelegate.paint(self, painter, option, self.dummy_index)
painter.save()
painter.setClipRect(QRectF(option.rect))
painter.translate(option.rect.topLeft())

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -4,9 +4,9 @@
#
msgid ""
msgstr ""
"Project-Id-Version: calibre 0.9.9\n"
"POT-Creation-Date: 2012-12-07 09:25+IST\n"
"PO-Revision-Date: 2012-12-07 09:25+IST\n"
"Project-Id-Version: calibre 0.9.10\n"
"POT-Creation-Date: 2012-12-14 09:05+IST\n"
"PO-Revision-Date: 2012-12-14 09:05+IST\n"
"Last-Translator: Automatically generated\n"
"Language-Team: LANGUAGE\n"
"MIME-Version: 1.0\n"
@ -163,8 +163,8 @@ msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/basic_widgets.py:250
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/basic_widgets.py:261
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single.py:402
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:174
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:178
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:182
#: /home/kovid/work/calibre/src/calibre/gui2/store/search/models.py:202
#: /home/kovid/work/calibre/src/calibre/gui2/store/stores/google_books_plugin.py:107
#: /home/kovid/work/calibre/src/calibre/gui2/viewer/main.py:191
@ -1005,15 +1005,15 @@ msgstr ""
#: /home/kovid/work/calibre/src/calibre/devices/apple/driver.py:497
#: /home/kovid/work/calibre/src/calibre/devices/apple/driver.py:1143
#: /home/kovid/work/calibre/src/calibre/devices/apple/driver.py:1190
#: /home/kovid/work/calibre/src/calibre/devices/apple/driver.py:3293
#: /home/kovid/work/calibre/src/calibre/devices/apple/driver.py:3335
#: /home/kovid/work/calibre/src/calibre/devices/apple/driver.py:3292
#: /home/kovid/work/calibre/src/calibre/devices/apple/driver.py:3334
#, python-format
msgid "%(num)d of %(tot)d"
msgstr ""
#: /home/kovid/work/calibre/src/calibre/devices/apple/driver.py:505
#: /home/kovid/work/calibre/src/calibre/devices/apple/driver.py:1195
#: /home/kovid/work/calibre/src/calibre/devices/apple/driver.py:3342
#: /home/kovid/work/calibre/src/calibre/devices/apple/driver.py:3341
#: /home/kovid/work/calibre/src/calibre/gui2/ebook_download.py:110
msgid "finished"
msgstr ""
@ -1031,7 +1031,7 @@ msgid ""
"Click 'Show Details' for a list."
msgstr ""
#: /home/kovid/work/calibre/src/calibre/devices/apple/driver.py:2817
#: /home/kovid/work/calibre/src/calibre/devices/apple/driver.py:2816
#: /home/kovid/work/calibre/src/calibre/devices/nook/driver.py:106
#: /home/kovid/work/calibre/src/calibre/devices/prs505/sony_cache.py:448
#: /home/kovid/work/calibre/src/calibre/devices/prs505/sony_cache.py:471
@ -1054,13 +1054,13 @@ msgstr ""
msgid "News"
msgstr ""
#: /home/kovid/work/calibre/src/calibre/devices/apple/driver.py:2818
#: /home/kovid/work/calibre/src/calibre/devices/apple/driver.py:2817
#: /home/kovid/work/calibre/src/calibre/library/database2.py:3342
#: /home/kovid/work/calibre/src/calibre/library/database2.py:3360
msgid "Catalog"
msgstr ""
#: /home/kovid/work/calibre/src/calibre/devices/apple/driver.py:3185
#: /home/kovid/work/calibre/src/calibre/devices/apple/driver.py:3184
msgid "Communicate with iTunes."
msgstr ""
@ -3420,7 +3420,7 @@ msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/dialogs/template_dialog.py:222
#: /home/kovid/work/calibre/src/calibre/gui2/library/models.py:58
#: /home/kovid/work/calibre/src/calibre/gui2/library/models.py:1074
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:146
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:150
#: /home/kovid/work/calibre/src/calibre/gui2/preferences/metadata_sources.py:162
#: /home/kovid/work/calibre/src/calibre/gui2/store/search/models.py:39
#: /home/kovid/work/calibre/src/calibre/gui2/store/stores/mobileread/models.py:23
@ -3511,7 +3511,7 @@ msgstr ""
#: /home/kovid/work/calibre/src/calibre/ebooks/metadata/book/base.py:782
#: /home/kovid/work/calibre/src/calibre/ebooks/oeb/transforms/jacket.py:183
#: /home/kovid/work/calibre/src/calibre/gui2/library/models.py:63
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:146
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:150
#: /home/kovid/work/calibre/src/calibre/library/field_metadata.py:305
msgid "Published"
msgstr ""
@ -4768,19 +4768,19 @@ msgid "None of the selected books are on the device"
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/actions/delete.py:263
#: /home/kovid/work/calibre/src/calibre/gui2/actions/delete.py:361
#: /home/kovid/work/calibre/src/calibre/gui2/actions/delete.py:363
msgid "Deleting books from device."
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/actions/delete.py:319
#: /home/kovid/work/calibre/src/calibre/gui2/actions/delete.py:321
msgid "Some of the selected books are on the attached device. <b>Where</b> do you want the selected files deleted from?"
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/actions/delete.py:331
#: /home/kovid/work/calibre/src/calibre/gui2/actions/delete.py:333
msgid "The selected books will be <b>permanently deleted</b> and the files removed from your calibre library. Are you sure?"
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/actions/delete.py:353
#: /home/kovid/work/calibre/src/calibre/gui2/actions/delete.py:355
msgid "The selected books will be <b>permanently deleted</b> from your device. Are you sure?"
msgstr ""
@ -4931,8 +4931,8 @@ msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/actions/edit_metadata.py:101
#: /home/kovid/work/calibre/src/calibre/gui2/dnd.py:84
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:514
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:826
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:518
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:830
msgid "Download failed"
msgstr ""
@ -4964,7 +4964,7 @@ msgid "Download complete"
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/actions/edit_metadata.py:123
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:888
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:892
msgid "Download log"
msgstr ""
@ -8265,7 +8265,7 @@ msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/device_drivers/mtp_config.py:411
#: /home/kovid/work/calibre/src/calibre/gui2/dialogs/message_box.py:141
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:881
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:885
#: /home/kovid/work/calibre/src/calibre/gui2/preferences/tweaks.py:344
#: /home/kovid/work/calibre/src/calibre/gui2/viewer/main_ui.py:220
msgid "Copy to clipboard"
@ -8822,7 +8822,7 @@ msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/dialogs/edit_authors_dialog.py:122
#: /home/kovid/work/calibre/src/calibre/gui2/lrf_renderer/main.py:160
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:523
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:527
#: /home/kovid/work/calibre/src/calibre/gui2/viewer/main.py:721
msgid "No matches found"
msgstr ""
@ -9001,8 +9001,8 @@ msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/dialogs/message_box.py:196
#: /home/kovid/work/calibre/src/calibre/gui2/dialogs/message_box.py:251
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:946
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:1055
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:950
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:1059
#: /home/kovid/work/calibre/src/calibre/gui2/proceed.py:48
msgid "View log"
msgstr ""
@ -11581,7 +11581,7 @@ msgid "Previous Page"
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/lrf_renderer/main_ui.py:133
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:943
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:947
#: /home/kovid/work/calibre/src/calibre/gui2/store/web_store_dialog_ui.py:62
#: /home/kovid/work/calibre/src/calibre/gui2/viewer/main_ui.py:208
msgid "Back"
@ -12026,7 +12026,7 @@ msgid "Edit Metadata"
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single.py:63
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:936
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:940
#: /home/kovid/work/calibre/src/calibre/library/server/browse.py:108
#: /home/kovid/work/calibre/src/calibre/web/feeds/templates.py:219
#: /home/kovid/work/calibre/src/calibre/web/feeds/templates.py:410
@ -12163,15 +12163,15 @@ msgstr ""
msgid "Basic metadata"
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:146
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:150
msgid "Has cover"
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:146
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:150
msgid "Has summary"
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:203
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:207
msgid ""
"The has cover indication is not fully\n"
"reliable. Sometimes results marked as not\n"
@ -12179,62 +12179,62 @@ msgid ""
"cover stage, and vice versa."
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:288
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:292
msgid "See at"
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:442
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:446
msgid "calibre is downloading metadata from: "
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:464
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:468
msgid "Please wait"
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:496
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:500
msgid "Query: "
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:515
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:519
msgid "Failed to download metadata. Click Show Details to see details"
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:524
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:528
msgid "Failed to find any books that match your search. Try making the search <b>less specific</b>. For example, use only the author's last name and a single distinctive word from the title.<p>To see the full log, click Show Details."
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:632
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:636
msgid "Current cover"
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:635
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:639
msgid "Searching..."
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:796
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:800
#, python-format
msgid "Downloading covers for <b>%s</b>, please wait..."
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:827
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:831
msgid "Failed to download any covers, click \"Show details\" for details."
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:833
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:837
#, python-format
msgid "Could not find any covers for <b>%s</b>"
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:835
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:839
#, python-format
msgid "Found <b>%(num)d</b> covers of %(title)s. Pick the one you like best."
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:924
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:928
msgid "Downloading metadata..."
msgstr ""
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:1039
#: /home/kovid/work/calibre/src/calibre/gui2/metadata/single_download.py:1043
msgid "Downloading cover..."
msgstr ""

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More