mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
Merge from trunk
This commit is contained in:
commit
82e45728a9
@ -1,107 +1,90 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Mathieu Godlewski <mathieu at godlewski.fr>'
|
||||
'''
|
||||
lemonde.fr
|
||||
'''
|
||||
|
||||
import re
|
||||
from calibre.web.feeds.news import BasicNewsRecipe
|
||||
|
||||
from calibre.web.feeds.recipes import BasicNewsRecipe
|
||||
|
||||
class LeMonde(BasicNewsRecipe):
|
||||
title = 'LeMonde.fr'
|
||||
__author__ = 'Mathieu Godlewski and Sujata Raman'
|
||||
description = 'Global news in french'
|
||||
oldest_article = 3
|
||||
language = 'fr'
|
||||
|
||||
max_articles_per_feed = 30
|
||||
title = 'Le Monde'
|
||||
__author__ = 'veezh'
|
||||
description = 'Actualités'
|
||||
oldest_article = 1
|
||||
max_articles_per_feed = 100
|
||||
no_stylesheets = True
|
||||
remove_javascript = True
|
||||
#delay = 1
|
||||
use_embedded_content = False
|
||||
encoding = 'cp1252'
|
||||
publisher = 'lemonde.fr'
|
||||
language = 'fr'
|
||||
conversion_options = {
|
||||
'comments' : description
|
||||
,'language' : language
|
||||
,'publisher' : publisher
|
||||
,'linearize_tables': True
|
||||
}
|
||||
|
||||
remove_empty_feeds = True
|
||||
|
||||
filterDuplicates = True
|
||||
|
||||
def preprocess_html(self, soup):
|
||||
for alink in soup.findAll('a'):
|
||||
if alink.string is not None:
|
||||
tstr = alink.string
|
||||
alink.replaceWith(tstr)
|
||||
return soup
|
||||
|
||||
preprocess_regexps = [
|
||||
(re.compile(r' \''), lambda match: ' ‘'),
|
||||
(re.compile(r'\''), lambda match: '’'),
|
||||
(re.compile(r'"<'), lambda match: ' »<'),
|
||||
(re.compile(r'>"'), lambda match: '>« '),
|
||||
(re.compile(r'’"'), lambda match: '’« '),
|
||||
(re.compile(r' "'), lambda match: ' « '),
|
||||
(re.compile(r'" '), lambda match: ' » '),
|
||||
(re.compile(r'\("'), lambda match: '(« '),
|
||||
(re.compile(r'"\)'), lambda match: ' »)'),
|
||||
(re.compile(r'"\.'), lambda match: ' ».'),
|
||||
(re.compile(r'",'), lambda match: ' »,'),
|
||||
(re.compile(r'"\?'), lambda match: ' »?'),
|
||||
(re.compile(r'":'), lambda match: ' »:'),
|
||||
(re.compile(r'";'), lambda match: ' »;'),
|
||||
(re.compile(r'"\!'), lambda match: ' »!'),
|
||||
(re.compile(r' :'), lambda match: ' :'),
|
||||
(re.compile(r' ;'), lambda match: ' ;'),
|
||||
(re.compile(r' \?'), lambda match: ' ?'),
|
||||
(re.compile(r' \!'), lambda match: ' !'),
|
||||
(re.compile(r'\s»'), lambda match: ' »'),
|
||||
(re.compile(r'«\s'), lambda match: '« '),
|
||||
(re.compile(r' %'), lambda match: ' %'),
|
||||
(re.compile(r'\.jpg » border='), lambda match: '.jpg'),
|
||||
(re.compile(r'\.png » border='), lambda match: '.png'),
|
||||
]
|
||||
|
||||
keep_only_tags = [
|
||||
dict(name='div', attrs={'class':['contenu']})
|
||||
]
|
||||
|
||||
remove_tags_after = [dict(id='appel_temoignage')]
|
||||
|
||||
def get_article_url(self, article):
|
||||
link = article.get('link')
|
||||
if 'blog' not in link:
|
||||
return link
|
||||
|
||||
|
||||
# cover_url='http://abonnes.lemonde.fr/titresdumonde/'+date.today().strftime("%y%m%d")+'/1.jpg'
|
||||
|
||||
|
||||
extra_css = '''
|
||||
.dateline{color:#666666;font-family:verdana,sans-serif;font-size:x-small;}
|
||||
.author{font-family:verdana,sans-serif;font-size:x-small;color:#222222;}
|
||||
.articleImage{color:#666666;font-family:verdana,sans-serif;font-size:x-small;}
|
||||
.mainText{font-family:Georgia,serif;color:#222222;}
|
||||
.LM_articleText{font-family:Arial,Helvetica,sans-serif;}
|
||||
.LM_titleZone{font-family:Arial,Helvetica,sans-serif;}
|
||||
.mainContent{font-family:Georgia,serif;}
|
||||
.LM_content{font-family:Georgia,serif;}
|
||||
.LM_caption{font-family:Georgia,serif;font-size:-small;}
|
||||
.LM_imageSource{font-family:Arial,Helvetica,sans-serif;font-size:x-small;color:#666666;}
|
||||
h1{font-family:Arial,Helvetica,sans-serif;font-size:medium;color:#000000;}
|
||||
.post{font-family:Arial,Helvetica,sans-serif;}
|
||||
.mainTitle{font-family:Georgia,serif;}
|
||||
.content{font-family:Georgia,serif;}
|
||||
.entry{font-family:Georgia,serif;}
|
||||
h2{font-family:Arial,Helvetica,sans-serif;font-size:large;}
|
||||
small{font-family:Arial,Helvetica,sans-serif; color:#ED1B23;}
|
||||
'''
|
||||
|
||||
feeds = [
|
||||
('A la Une', 'http://www.lemonde.fr/rss/une.xml'),
|
||||
('International', 'http://www.lemonde.fr/rss/sequence/0,2-3210,1-0,0.xml'),
|
||||
('Europe', 'http://www.lemonde.fr/rss/sequence/0,2-3214,1-0,0.xml'),
|
||||
('Societe', 'http://www.lemonde.fr/rss/sequence/0,2-3224,1-0,0.xml'),
|
||||
('Economie', 'http://www.lemonde.fr/rss/sequence/0,2-3234,1-0,0.xml'),
|
||||
('Medias', 'http://www.lemonde.fr/rss/sequence/0,2-3236,1-0,0.xml'),
|
||||
('Rendez-vous', 'http://www.lemonde.fr/rss/sequence/0,2-3238,1-0,0.xml'),
|
||||
('Sports', 'http://www.lemonde.fr/rss/sequence/0,2-3242,1-0,0.xml'),
|
||||
('Planete', 'http://www.lemonde.fr/rss/sequence/0,2-3244,1-0,0.xml'),
|
||||
('Culture', 'http://www.lemonde.fr/rss/sequence/0,2-3246,1-0,0.xml'),
|
||||
('Technologies', 'http://www.lemonde.fr/rss/sequence/0,2-651865,1-0,0.xml'),
|
||||
('Cinema', 'http://www.lemonde.fr/rss/sequence/0,2-3476,1-0,0.xml'),
|
||||
('Voyages', 'http://www.lemonde.fr/rss/sequence/0,2-3546,1-0,0.xml'),
|
||||
('Livres', 'http://www.lemonde.fr/rss/sequence/0,2-3260,1-0,0.xml'),
|
||||
('Examens', 'http://www.lemonde.fr/rss/sequence/0,2-3404,1-0,0.xml'),
|
||||
('Opinions', 'http://www.lemonde.fr/rss/sequence/0,2-3232,1-0,0.xml')
|
||||
]
|
||||
keep_only_tags = [dict(name='div', attrs={'id':["mainTitle","mainContent","LM_content","content"]}),
|
||||
dict(name='div', attrs={'class':["post"]})
|
||||
]
|
||||
('A la une', 'http://www.lemonde.fr/rss/une.xml'),
|
||||
('International', 'http://www.lemonde.fr/rss/tag/international.xml'),
|
||||
('Europe', 'http://www.lemonde.fr/rss/tag/europe.xml'),
|
||||
(u'Société', 'http://www.lemonde.fr/rss/tag/societe.xml'),
|
||||
('Economie', 'http://www.lemonde.fr/rss/tag/economie.xml'),
|
||||
(u'Médias', 'http://www.lemonde.fr/rss/tag/actualite-medias.xml'),
|
||||
(u'Planète', 'http://www.lemonde.fr/rss/tag/planete.xml'),
|
||||
('Culture', 'http://www.lemonde.fr/rss/tag/culture.xml'),
|
||||
('Technologies', 'http://www.lemonde.fr/rss/tag/technologies.xml'),
|
||||
('Livres', 'http://www.lemonde.fr/rss/tag/livres.xml'),
|
||||
|
||||
remove_tags = [dict(name='img', attrs={'src':'http://medias.lemonde.fr/mmpub/img/lgo/lemondefr_pet.gif'}),
|
||||
dict(name='div', attrs={'id':'xiti-logo-noscript'}),
|
||||
dict(name='br', attrs={}),
|
||||
dict(name='iframe', attrs={}),
|
||||
dict(name='table', attrs={'id':["toolBox"]}),
|
||||
dict(name='table', attrs={'class':["bottomToolBox"]}),
|
||||
dict(name='div', attrs={'class':["pageNavigation","LM_pagination","fenetreBoxesContainer","breakingNews","LM_toolsBottom","LM_comments","LM_tools","pave_meme_sujet_hidden","boxMemeSujet"]}),
|
||||
dict(name='div', attrs={'id':["miniUne","LM_sideBar"]}),
|
||||
]
|
||||
|
||||
preprocess_regexps = [ (re.compile(i[0], re.IGNORECASE|re.DOTALL), i[1]) for i in
|
||||
[
|
||||
(r'<html.*(<div class="post".*?>.*?</div>.*?<div class="entry">.*?</div>).*You can start editing here.*</html>', lambda match : '<html><body>'+match.group(1)+'</body></html>'),
|
||||
(r'<p> </p>', lambda match : ''),
|
||||
(r'<img src="http://medias\.lemonde\.fr/mmpub/img/let/(.)\.gif"[^>]*><div class=ar-txt>', lambda match : '<div class=ar-txt>'+match.group(1).upper()),
|
||||
(r'<img src="http://medias\.lemonde\.fr/mmpub/img/let/q(.)\.gif"[^>]*><div class=ar-txt>', lambda match : '<div class=ar-txt>"'+match.group(1).upper()),
|
||||
(r'(<div class=desc><b>.*</b></div>).*</body>', lambda match : match.group(1)),
|
||||
]
|
||||
]
|
||||
|
||||
article_match_regexps = [ (re.compile(i)) for i in
|
||||
[
|
||||
(r'http://www\.lemonde\.fr/\S+/article/.*'),
|
||||
(r'http://www\.lemonde\.fr/\S+/portfolio/.*'),
|
||||
(r'http://www\.lemonde\.fr/\S+/article_interactif/.*'),
|
||||
(r'http://\S+\.blog\.lemonde\.fr/.*'),
|
||||
]
|
||||
]
|
||||
|
||||
# def print_version(self, url):
|
||||
# return re.sub('http://www\.lemonde\.fr/.*_([0-9]+)_[0-9]+\.html.*','http://www.lemonde.fr/web/imprimer_element/0,40-0,50-\\1,0.html' ,url)
|
||||
|
||||
# Used to filter duplicated articles
|
||||
articles_list = []
|
||||
|
||||
def get_cover_url(self):
|
||||
cover_url = None
|
||||
soup = self.index_to_soup('http://www.lemonde.fr/web/monde_pdf/0,33-0,1-0,0.html')
|
||||
@ -111,42 +94,3 @@ class LeMonde(BasicNewsRecipe):
|
||||
cover_url = link_item.img['src']
|
||||
|
||||
return cover_url
|
||||
|
||||
def get_article_url(self, article):
|
||||
url=article.get('link', None)
|
||||
url=url[0:url.find("#")]
|
||||
if url in self.articles_list:
|
||||
self.log_debug(_('Skipping duplicated article: %s')%url)
|
||||
return False
|
||||
if self.is_article_wanted(url):
|
||||
self.articles_list.append(url)
|
||||
if '/portfolio/' in url or '/video/' in url:
|
||||
url = None
|
||||
return url
|
||||
self.log_debug(_('Skipping filtered article: %s')%url)
|
||||
url = article.get('guid', None)
|
||||
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def is_article_wanted(self, url):
|
||||
if self.article_match_regexps:
|
||||
for m in self.article_match_regexps:
|
||||
if m.search(url):
|
||||
return True
|
||||
return False
|
||||
return False
|
||||
|
||||
def preprocess_html(self, soup):
|
||||
|
||||
for item in soup.findAll(style=True):
|
||||
del item['style']
|
||||
|
||||
for item in soup.findAll(face=True):
|
||||
del item['face']
|
||||
for tag in soup.findAll(name=['ul','li']):
|
||||
tag.name = 'div'
|
||||
|
||||
return soup
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
from calibre.web.feeds.news import BasicNewsRecipe
|
||||
import re
|
||||
|
||||
class MainichiDailyITNews(BasicNewsRecipe):
|
||||
title = u'\u6bce\u65e5\u65b0\u805e(IT&\u5bb6\u96fb)'
|
||||
@ -14,6 +15,7 @@ class MainichiDailyITNews(BasicNewsRecipe):
|
||||
|
||||
remove_tags_before = {'class':"NewsTitle"}
|
||||
remove_tags = [{'class':"RelatedArticle"}]
|
||||
remove_tags_after = {'class':"Credit"}
|
||||
|
||||
def parse_feeds(self):
|
||||
|
||||
@ -29,4 +31,4 @@ class MainichiDailyITNews(BasicNewsRecipe):
|
||||
index = curfeed.articles.index(d)
|
||||
curfeed.articles[index:index+1] = []
|
||||
|
||||
return feeds remove_tags_after = {'class':"Credit"}
|
||||
return feeds
|
||||
|
@ -1,8 +1,9 @@
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2010, Eddie Lau'
|
||||
'''
|
||||
modified from Singtao Toronto calibre recipe by rty
|
||||
Change Log:
|
||||
2010/12/07: add entertainment section, use newspaper front page as ebook cover, suppress date display in section list
|
||||
(to avoid wrong date display in case the user generates the ebook in a time zone different from HKT)
|
||||
2010/11/22: add English section, remove eco-news section which is not updated daily, correct
|
||||
ordering of articles
|
||||
2010/11/12: add news image and eco-news section
|
||||
@ -17,14 +18,15 @@ from calibre.web.feeds.recipes import BasicNewsRecipe
|
||||
from contextlib import nested
|
||||
|
||||
|
||||
from calibre import __appname__, strftime
|
||||
from calibre import __appname__
|
||||
from calibre.ebooks.BeautifulSoup import BeautifulSoup
|
||||
from calibre.ebooks.metadata.opf2 import OPFCreator
|
||||
from calibre.ebooks.metadata.toc import TOC
|
||||
from calibre.ebooks.metadata import MetaInformation
|
||||
from calibre.utils.date import now as nowf
|
||||
|
||||
class MPHKRecipe(BasicNewsRecipe):
|
||||
IsKindleUsed = True # to avoid generating periodical in which CJK characters can't be displayed in section/article view
|
||||
|
||||
title = 'Ming Pao - Hong Kong'
|
||||
oldest_article = 1
|
||||
max_articles_per_feed = 100
|
||||
@ -39,13 +41,13 @@ class MPHKRecipe(BasicNewsRecipe):
|
||||
encoding = 'Big5-HKSCS'
|
||||
recursions = 0
|
||||
conversion_options = {'linearize_tables':True}
|
||||
extra_css = 'img {display: block; margin-left: auto; margin-right: auto; margin-top: 10px; margin-bottom: 10px;}'
|
||||
#extra_css = 'img {float:right; margin:4px;}'
|
||||
timefmt = ''
|
||||
extra_css = 'img {display: block; margin-left: auto; margin-right: auto; margin-top: 10px; margin-bottom: 10px;} font>b {font-size:200%; font-weight:bold;}'
|
||||
masthead_url = 'http://news.mingpao.com/image/portals_top_logo_news.gif'
|
||||
keep_only_tags = [dict(name='h1'),
|
||||
#dict(name='font', attrs={'style':['font-size:14pt; line-height:160%;']}), # for entertainment page
|
||||
dict(name='font', attrs={'style':['font-size:14pt; line-height:160%;']}), # for entertainment page title
|
||||
dict(attrs={'class':['photo']}),
|
||||
dict(attrs={'id':['newscontent']}),
|
||||
dict(attrs={'id':['newscontent']}), # entertainment page content
|
||||
dict(attrs={'id':['newscontent01','newscontent02']})]
|
||||
remove_tags = [dict(name='style'),
|
||||
dict(attrs={'id':['newscontent135']})] # for the finance page
|
||||
@ -55,51 +57,68 @@ class MPHKRecipe(BasicNewsRecipe):
|
||||
lambda match: '<h1>'),
|
||||
(re.compile(r'</h5>', re.DOTALL|re.IGNORECASE),
|
||||
lambda match: '</h1>'),
|
||||
(re.compile(r'<p><a href=.+?</a></p>', re.DOTALL|re.IGNORECASE), # for entertainment page
|
||||
lambda match: '')
|
||||
]
|
||||
|
||||
def image_url_processor(cls, baseurl, url):
|
||||
# trick: break the url at the first occurance of digit, add an additional
|
||||
# '_' at the front
|
||||
# not working, may need to move this to preprocess_html() method
|
||||
#minIdx = 10000
|
||||
#i0 = url.find('0')
|
||||
#if i0 >= 0 and i0 < minIdx:
|
||||
# minIdx = i0
|
||||
#i1 = url.find('1')
|
||||
#if i1 >= 0 and i1 < minIdx:
|
||||
# minIdx = i1
|
||||
#i2 = url.find('2')
|
||||
#if i2 >= 0 and i2 < minIdx:
|
||||
# minIdx = i2
|
||||
#i3 = url.find('3')
|
||||
#if i3 >= 0 and i0 < minIdx:
|
||||
# minIdx = i3
|
||||
#i4 = url.find('4')
|
||||
#if i4 >= 0 and i4 < minIdx:
|
||||
# minIdx = i4
|
||||
#i5 = url.find('5')
|
||||
#if i5 >= 0 and i5 < minIdx:
|
||||
# minIdx = i5
|
||||
#i6 = url.find('6')
|
||||
#if i6 >= 0 and i6 < minIdx:
|
||||
# minIdx = i6
|
||||
#i7 = url.find('7')
|
||||
#if i7 >= 0 and i7 < minIdx:
|
||||
# minIdx = i7
|
||||
#i8 = url.find('8')
|
||||
#if i8 >= 0 and i8 < minIdx:
|
||||
# minIdx = i8
|
||||
#i9 = url.find('9')
|
||||
#if i9 >= 0 and i9 < minIdx:
|
||||
# minIdx = i9
|
||||
#return url[0:minIdx] + '_' + url[minIdx+1:]
|
||||
# minIdx = 10000
|
||||
# i0 = url.find('0')
|
||||
# if i0 >= 0 and i0 < minIdx:
|
||||
# minIdx = i0
|
||||
# i1 = url.find('1')
|
||||
# if i1 >= 0 and i1 < minIdx:
|
||||
# minIdx = i1
|
||||
# i2 = url.find('2')
|
||||
# if i2 >= 0 and i2 < minIdx:
|
||||
# minIdx = i2
|
||||
# i3 = url.find('3')
|
||||
# if i3 >= 0 and i0 < minIdx:
|
||||
# minIdx = i3
|
||||
# i4 = url.find('4')
|
||||
# if i4 >= 0 and i4 < minIdx:
|
||||
# minIdx = i4
|
||||
# i5 = url.find('5')
|
||||
# if i5 >= 0 and i5 < minIdx:
|
||||
# minIdx = i5
|
||||
# i6 = url.find('6')
|
||||
# if i6 >= 0 and i6 < minIdx:
|
||||
# minIdx = i6
|
||||
# i7 = url.find('7')
|
||||
# if i7 >= 0 and i7 < minIdx:
|
||||
# minIdx = i7
|
||||
# i8 = url.find('8')
|
||||
# if i8 >= 0 and i8 < minIdx:
|
||||
# minIdx = i8
|
||||
# i9 = url.find('9')
|
||||
# if i9 >= 0 and i9 < minIdx:
|
||||
# minIdx = i9
|
||||
return url
|
||||
|
||||
def get_fetchdate(self):
|
||||
def get_dtlocal(self):
|
||||
dt_utc = datetime.datetime.utcnow()
|
||||
# convert UTC to local hk time - at around HKT 6.00am, all news are available
|
||||
dt_local = dt_utc - datetime.timedelta(-2.0/24)
|
||||
return dt_local.strftime("%Y%m%d")
|
||||
return dt_local
|
||||
|
||||
def get_fetchdate(self):
|
||||
return self.get_dtlocal().strftime("%Y%m%d")
|
||||
|
||||
def get_fetchday(self):
|
||||
# convert UTC to local hk time - at around HKT 6.00am, all news are available
|
||||
return self.get_dtlocal().strftime("%d")
|
||||
|
||||
def get_cover_url(self):
|
||||
cover = 'http://news.mingpao.com/' + self.get_fetchdate() + '/' + self.get_fetchdate() + '_' + self.get_fetchday() + 'gacov.jpg'
|
||||
br = BasicNewsRecipe.get_browser()
|
||||
try:
|
||||
br.open(cover)
|
||||
except:
|
||||
cover = None
|
||||
return cover
|
||||
|
||||
def parse_index(self):
|
||||
feeds = []
|
||||
@ -127,9 +146,9 @@ class MPHKRecipe(BasicNewsRecipe):
|
||||
# if eco_articles:
|
||||
# feeds.append((u'\u74b0\u4fdd Eco News', eco_articles))
|
||||
# special - entertainment
|
||||
#ent_articles = self.parse_ent_section('http://ol.mingpao.com/cfm/star1.cfm')
|
||||
#if ent_articles:
|
||||
# feeds.append(('Entertainment', ent_articles))
|
||||
ent_articles = self.parse_ent_section('http://ol.mingpao.com/cfm/star1.cfm')
|
||||
if ent_articles:
|
||||
feeds.append((u'\u5f71\u8996 Entertainment', ent_articles))
|
||||
return feeds
|
||||
|
||||
def parse_section(self, url):
|
||||
@ -164,6 +183,7 @@ class MPHKRecipe(BasicNewsRecipe):
|
||||
return current_articles
|
||||
|
||||
def parse_eco_section(self, url):
|
||||
dateStr = self.get_fetchdate()
|
||||
soup = self.index_to_soup(url)
|
||||
divs = soup.findAll(attrs={'class': ['bullet']})
|
||||
current_articles = []
|
||||
@ -173,23 +193,25 @@ class MPHKRecipe(BasicNewsRecipe):
|
||||
title = self.tag_to_string(a)
|
||||
url = a.get('href', False)
|
||||
url = 'http://tssl.mingpao.com/htm/marketing/eco/cfm/' +url
|
||||
if url not in included_urls and url.rfind('Redirect') == -1:
|
||||
if url not in included_urls and url.rfind('Redirect') == -1 and not url.rfind('.txt') == -1 and not url.rfind(dateStr) == -1:
|
||||
current_articles.append({'title': title, 'url': url, 'description':''})
|
||||
included_urls.append(url)
|
||||
return current_articles
|
||||
|
||||
#def parse_ent_section(self, url):
|
||||
# dateStr = self.get_fetchdate()
|
||||
# soup = self.index_to_soup(url)
|
||||
# a = soup.findAll('a', href=True)
|
||||
# current_articles = []
|
||||
# included_urls = []
|
||||
# for i in a:
|
||||
# title = self.tag_to_string(i)
|
||||
# url = 'http://ol.mingpao.com/cfm/' + i.get('href', False)
|
||||
# if url not in included_urls and not url.rfind('.txt') == -1 and not url.rfind(dateStr) == -1 and not title == '':
|
||||
# current_articles.append({'title': title, 'url': url, 'description': ''})
|
||||
# return current_articles
|
||||
def parse_ent_section(self, url):
|
||||
soup = self.index_to_soup(url)
|
||||
a = soup.findAll('a', href=True)
|
||||
a.reverse()
|
||||
current_articles = []
|
||||
included_urls = []
|
||||
for i in a:
|
||||
title = self.tag_to_string(i)
|
||||
url = 'http://ol.mingpao.com/cfm/' + i.get('href', False)
|
||||
if (url not in included_urls) and (not url.rfind('.txt') == -1) and (not url.rfind('star') == -1):
|
||||
current_articles.append({'title': title, 'url': url, 'description': ''})
|
||||
included_urls.append(url)
|
||||
current_articles.reverse()
|
||||
return current_articles
|
||||
|
||||
def preprocess_html(self, soup):
|
||||
for item in soup.findAll(style=True):
|
||||
@ -201,21 +223,26 @@ class MPHKRecipe(BasicNewsRecipe):
|
||||
return soup
|
||||
|
||||
def create_opf(self, feeds, dir=None):
|
||||
#super(MPHKRecipe,self).create_opf(feeds, dir)
|
||||
if self.IsKindleUsed == False:
|
||||
super(MPHKRecipe,self).create_opf(feeds, dir)
|
||||
return
|
||||
if dir is None:
|
||||
dir = self.output_dir
|
||||
title = self.short_title()
|
||||
if self.output_profile.periodical_date_in_title:
|
||||
title += strftime(self.timefmt)
|
||||
title += ' ' + self.get_fetchdate()
|
||||
#if self.output_profile.periodical_date_in_title:
|
||||
# title += strftime(self.timefmt)
|
||||
mi = MetaInformation(title, [__appname__])
|
||||
mi.publisher = __appname__
|
||||
mi.author_sort = __appname__
|
||||
mi.publication_type = self.publication_type+':'+self.short_title()
|
||||
mi.timestamp = nowf()
|
||||
#mi.timestamp = nowf()
|
||||
mi.timestamp = self.get_dtlocal()
|
||||
mi.comments = self.description
|
||||
if not isinstance(mi.comments, unicode):
|
||||
mi.comments = mi.comments.decode('utf-8', 'replace')
|
||||
mi.pubdate = nowf()
|
||||
#mi.pubdate = nowf()
|
||||
mi.pubdate = self.get_dtlocal()
|
||||
opf_path = os.path.join(dir, 'index.opf')
|
||||
ncx_path = os.path.join(dir, 'index.ncx')
|
||||
opf = OPFCreator(dir, mi)
|
||||
|
@ -14,7 +14,7 @@ class TheHeiseOnline(BasicNewsRecipe):
|
||||
oldest_article = 3
|
||||
description = 'In association with Heise Online'
|
||||
publisher = 'Heise Media UK Ltd.'
|
||||
category = 'news, technology, security'
|
||||
category = 'news, technology, security, OSS, internet'
|
||||
max_articles_per_feed = 100
|
||||
language = 'en'
|
||||
encoding = 'utf-8'
|
||||
@ -27,6 +27,12 @@ class TheHeiseOnline(BasicNewsRecipe):
|
||||
feeds = [
|
||||
(u'The H News Feed', u'http://www.h-online.com/news/atom.xml')
|
||||
]
|
||||
cover_url = 'http://www.h-online.com/icons/logo_theH.gif'
|
||||
|
||||
remove_tags = [
|
||||
dict(id="logo"),
|
||||
dict(id="footer")
|
||||
]
|
||||
|
||||
def print_version(self, url):
|
||||
return url + '?view=print'
|
||||
|
68
resources/recipes/toyokeizai.recipe
Normal file
68
resources/recipes/toyokeizai.recipe
Normal file
@ -0,0 +1,68 @@
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2010, Hiroshi Miura <miurahr@linux.com>'
|
||||
'''
|
||||
www.toyokeizai.net
|
||||
'''
|
||||
|
||||
from calibre.web.feeds.news import BasicNewsRecipe
|
||||
import re
|
||||
|
||||
class Toyokeizai(BasicNewsRecipe):
|
||||
title = u'ToyoKeizai News'
|
||||
__author__ = 'Hiroshi Miura'
|
||||
oldest_article = 1
|
||||
max_articles_per_feed = 50
|
||||
description = 'Japanese traditional economy and business magazine, only for advanced subscribers supported'
|
||||
publisher = 'Toyokeizai Shinbun Sha'
|
||||
category = 'economy, magazine, japan'
|
||||
language = 'ja'
|
||||
encoding = 'euc-jp'
|
||||
index = 'http://member.toyokeizai.net/news/'
|
||||
remove_javascript = True
|
||||
no_stylesheets = True
|
||||
masthead_title = u'TOYOKEIZAI'
|
||||
needs_subscription = True
|
||||
timefmt = '[%y/%m/%d]'
|
||||
recursions = 5
|
||||
match_regexps =[ r'page/\d+']
|
||||
|
||||
keep_only_tags = [
|
||||
dict(name='div', attrs={'class':['news']}),
|
||||
dict(name='div', attrs={'class':["news_cont"]}),
|
||||
dict(name='div', attrs={'class':["news_con"]}),
|
||||
# dict(name='div', attrs={'class':["norightsMessage"]})
|
||||
]
|
||||
remove_tags = [{'class':"mt35 mgz"},
|
||||
{'class':"mt20 newzia"},
|
||||
{'class':"mt20 fontS"},
|
||||
{'class':"bk_btn_m"},
|
||||
dict(id='newzia_connect_member')
|
||||
]
|
||||
|
||||
def parse_index(self):
|
||||
feeds = []
|
||||
soup = self.index_to_soup(self.index)
|
||||
topstories = soup.find('ul',attrs={'class':'list6'})
|
||||
if topstories:
|
||||
newsarticles = []
|
||||
for itt in topstories.findAll('li'):
|
||||
itema = itt.find('a',href=True)
|
||||
itemd = itt.find('span')
|
||||
newsarticles.append({
|
||||
'title' :itema.string
|
||||
,'date' :re.compile(r"\- ").sub("",itemd.string)
|
||||
,'url' :'http://member.toyokeizai.net' + itema['href']
|
||||
,'description':itema['title']
|
||||
})
|
||||
feeds.append(('news', newsarticles))
|
||||
return feeds
|
||||
|
||||
def get_browser(self):
|
||||
br = BasicNewsRecipe.get_browser()
|
||||
if self.username is not None and self.password is not None:
|
||||
br.open('http://member.toyokeizai.net/norights/form/')
|
||||
br.select_form(nr=0)
|
||||
br['kaiin_id'] = self.username
|
||||
br['password'] = self.password
|
||||
res = br.submit()
|
||||
return br
|
@ -9,6 +9,7 @@ from uuid import uuid4
|
||||
|
||||
from calibre.constants import __appname__, __version__
|
||||
from calibre import strftime, prepare_string_for_xml as xml
|
||||
from calibre.utils.date import parse_date
|
||||
|
||||
SONY_METADATA = u'''\
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
@ -87,7 +88,8 @@ def sony_metadata(oeb):
|
||||
pass
|
||||
|
||||
try:
|
||||
date = unicode(m.date[0]).split('T')[0]
|
||||
date = parse_date(unicode(m.date[0]),
|
||||
as_utc=False).strftime('%Y-%m-%d')
|
||||
except:
|
||||
date = strftime('%Y-%m-%d')
|
||||
try:
|
||||
|
@ -544,7 +544,7 @@ class OEBReader(object):
|
||||
data = render_html_svg_workaround(path, self.logger)
|
||||
if not data:
|
||||
data = ''
|
||||
id, href = self.oeb.manifest.generate('cover', 'cover.jpeg')
|
||||
id, href = self.oeb.manifest.generate('cover', 'cover.jpg')
|
||||
item = self.oeb.manifest.add(id, href, JPEG_MIME, data=data)
|
||||
return item
|
||||
|
||||
|
@ -61,6 +61,7 @@ class AddAction(InterfaceAction):
|
||||
self._adder = Adder(self.gui,
|
||||
self.gui.library_view.model().db,
|
||||
self.Dispatcher(self._files_added), spare_server=self.gui.spare_server)
|
||||
self.gui.tags_view.disable_recounting = True
|
||||
self._adder.add_recursive(root, single)
|
||||
|
||||
def add_recursive_single(self, *args):
|
||||
@ -201,9 +202,11 @@ class AddAction(InterfaceAction):
|
||||
self._adder = Adder(self.gui,
|
||||
None if to_device else self.gui.library_view.model().db,
|
||||
self.Dispatcher(self.__adder_func), spare_server=self.gui.spare_server)
|
||||
self.gui.tags_view.disable_recounting = True
|
||||
self._adder.add(paths)
|
||||
|
||||
def _files_added(self, paths=[], names=[], infos=[], on_card=None):
|
||||
self.gui.tags_view.disable_recounting = False
|
||||
if paths:
|
||||
self.gui.upload_books(paths,
|
||||
list(map(ascii_filename, names)),
|
||||
@ -214,6 +217,7 @@ class AddAction(InterfaceAction):
|
||||
self.gui.library_view.model().books_added(self._adder.number_of_books_added)
|
||||
if hasattr(self.gui, 'db_images'):
|
||||
self.gui.db_images.reset()
|
||||
self.gui.tags_view.recount()
|
||||
if getattr(self._adder, 'merged_books', False):
|
||||
books = u'\n'.join([x if isinstance(x, unicode) else
|
||||
x.decode(preferred_encoding, 'replace') for x in
|
||||
|
@ -5,13 +5,67 @@ __license__ = 'GPL v3'
|
||||
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
|
||||
from PyQt4.Qt import QMenu
|
||||
from functools import partial
|
||||
|
||||
from PyQt4.Qt import QMenu, QObject, QTimer
|
||||
|
||||
from calibre.gui2 import error_dialog
|
||||
from calibre.gui2.dialogs.delete_matching_from_device import DeleteMatchingFromDeviceDialog
|
||||
from calibre.gui2.dialogs.confirm_delete import confirm
|
||||
from calibre.gui2.actions import InterfaceAction
|
||||
|
||||
single_shot = partial(QTimer.singleShot, 10)
|
||||
|
||||
class MultiDeleter(QObject):
|
||||
|
||||
def __init__(self, gui, rows, callback):
|
||||
from calibre.gui2.dialogs.progress import ProgressDialog
|
||||
QObject.__init__(self, gui)
|
||||
self.model = gui.library_view.model()
|
||||
self.ids = list(map(self.model.id, rows))
|
||||
self.gui = gui
|
||||
self.failures = []
|
||||
self.deleted_ids = []
|
||||
self.callback = callback
|
||||
single_shot(self.delete_one)
|
||||
self.pd = ProgressDialog(_('Deleting...'), parent=gui,
|
||||
cancelable=False, min=0, max=len(self.ids))
|
||||
self.pd.setModal(True)
|
||||
self.pd.show()
|
||||
|
||||
def delete_one(self):
|
||||
if not self.ids:
|
||||
self.cleanup()
|
||||
return
|
||||
id_ = self.ids.pop()
|
||||
title = 'id:%d'%id_
|
||||
try:
|
||||
title_ = self.model.db.title(id_, index_is_id=True)
|
||||
if title_:
|
||||
title = title_
|
||||
self.model.db.delete_book(id_, notify=False, commit=False)
|
||||
self.deleted_ids.append(id_)
|
||||
except:
|
||||
import traceback
|
||||
self.failures.append((id_, title, traceback.format_exc()))
|
||||
single_shot(self.delete_one)
|
||||
self.pd.value += 1
|
||||
self.pd.set_msg(_('Deleted') + ' ' + title)
|
||||
|
||||
def cleanup(self):
|
||||
self.pd.hide()
|
||||
self.pd = None
|
||||
self.model.db.commit()
|
||||
self.model.db.clean()
|
||||
self.model.books_deleted()
|
||||
self.gui.tags_view.recount()
|
||||
self.callback(self.deleted_ids)
|
||||
if self.failures:
|
||||
msg = ['==> '+x[1]+'\n'+x[2] for x in self.failures]
|
||||
error_dialog(self.gui, _('Failed to delete'),
|
||||
_('Failed to delete some books, click the Show Details button'
|
||||
' for details.'), det_msg='\n\n'.join(msg), show=True)
|
||||
|
||||
class DeleteAction(InterfaceAction):
|
||||
|
||||
name = 'Remove Books'
|
||||
@ -179,8 +233,13 @@ class DeleteAction(InterfaceAction):
|
||||
row = None
|
||||
if ci.isValid():
|
||||
row = ci.row()
|
||||
if len(rows) < 5:
|
||||
ids_deleted = view.model().delete_books(rows)
|
||||
self.library_ids_deleted(ids_deleted, row)
|
||||
else:
|
||||
self.__md = MultiDeleter(self.gui, rows,
|
||||
partial(self.library_ids_deleted, current_row=row))
|
||||
|
||||
else:
|
||||
if not confirm('<p>'+_('The selected books will be '
|
||||
'<b>permanently deleted</b> '
|
||||
|
@ -3,41 +3,55 @@ UI for adding books to the database and saving books to disk
|
||||
'''
|
||||
import os, shutil, time
|
||||
from Queue import Queue, Empty
|
||||
from threading import Thread
|
||||
from functools import partial
|
||||
|
||||
from PyQt4.Qt import QThread, SIGNAL, QObject, QTimer, Qt, \
|
||||
QProgressDialog
|
||||
from PyQt4.Qt import QThread, QObject, Qt, QProgressDialog, pyqtSignal, QTimer
|
||||
|
||||
from calibre.gui2.dialogs.progress import ProgressDialog
|
||||
from calibre.gui2 import question_dialog, error_dialog, info_dialog
|
||||
from calibre.ebooks.metadata.opf2 import OPF
|
||||
from calibre.ebooks.metadata import MetaInformation
|
||||
from calibre.constants import preferred_encoding, filesystem_encoding
|
||||
from calibre.constants import preferred_encoding, filesystem_encoding, DEBUG
|
||||
from calibre.utils.config import prefs
|
||||
from calibre import prints
|
||||
|
||||
single_shot = partial(QTimer.singleShot, 75)
|
||||
|
||||
class DuplicatesAdder(QObject): # {{{
|
||||
|
||||
added = pyqtSignal(object)
|
||||
adding_done = pyqtSignal()
|
||||
|
||||
class DuplicatesAdder(QThread): # {{{
|
||||
# Add duplicate books
|
||||
def __init__(self, parent, db, duplicates, db_adder):
|
||||
QThread.__init__(self, parent)
|
||||
QObject.__init__(self, parent)
|
||||
self.db, self.db_adder = db, db_adder
|
||||
self.duplicates = duplicates
|
||||
self.duplicates = list(duplicates)
|
||||
self.count = 0
|
||||
single_shot(self.add_one)
|
||||
|
||||
def run(self):
|
||||
count = 1
|
||||
for mi, cover, formats in self.duplicates:
|
||||
def add_one(self):
|
||||
if not self.duplicates:
|
||||
self.adding_done.emit()
|
||||
return
|
||||
|
||||
mi, cover, formats = self.duplicates.pop()
|
||||
formats = [f for f in formats if not f.lower().endswith('.opf')]
|
||||
id = self.db.create_book_entry(mi, cover=cover,
|
||||
add_duplicates=True)
|
||||
# here we add all the formats for dupe book record created above
|
||||
self.db_adder.add_formats(id, formats)
|
||||
self.db_adder.number_of_books_added += 1
|
||||
self.emit(SIGNAL('added(PyQt_PyObject)'), count)
|
||||
count += 1
|
||||
self.emit(SIGNAL('adding_done()'))
|
||||
self.count += 1
|
||||
self.added.emit(self.count)
|
||||
single_shot(self.add_one)
|
||||
|
||||
# }}}
|
||||
|
||||
class RecursiveFind(QThread): # {{{
|
||||
|
||||
update = pyqtSignal(object)
|
||||
found = pyqtSignal(object)
|
||||
|
||||
def __init__(self, parent, db, root, single):
|
||||
QThread.__init__(self, parent)
|
||||
self.db = db
|
||||
@ -50,7 +64,7 @@ class RecursiveFind(QThread): # {{{
|
||||
for dirpath in os.walk(root):
|
||||
if self.canceled:
|
||||
return
|
||||
self.emit(SIGNAL('update(PyQt_PyObject)'),
|
||||
self.update.emit(
|
||||
_('Searching in')+' '+dirpath[0])
|
||||
self.books += list(self.db.find_books_in_directory(dirpath[0],
|
||||
self.single_book_per_directory))
|
||||
@ -71,39 +85,45 @@ class RecursiveFind(QThread): # {{{
|
||||
msg = unicode(err)
|
||||
except:
|
||||
msg = repr(err)
|
||||
self.emit(SIGNAL('found(PyQt_PyObject)'), msg)
|
||||
self.found.emit(msg)
|
||||
return
|
||||
|
||||
self.books = [formats for formats in self.books if formats]
|
||||
|
||||
if not self.canceled:
|
||||
self.emit(SIGNAL('found(PyQt_PyObject)'), self.books)
|
||||
self.found.emit(self.books)
|
||||
|
||||
# }}}
|
||||
|
||||
class DBAdder(Thread): # {{{
|
||||
class DBAdder(QObject): # {{{
|
||||
|
||||
def __init__(self, parent, db, ids, nmap):
|
||||
QObject.__init__(self, parent)
|
||||
|
||||
def __init__(self, db, ids, nmap):
|
||||
self.db, self.ids, self.nmap = db, dict(**ids), dict(**nmap)
|
||||
self.end = False
|
||||
self.critical = {}
|
||||
self.number_of_books_added = 0
|
||||
self.duplicates = []
|
||||
self.names, self.paths, self.infos = [], [], []
|
||||
Thread.__init__(self)
|
||||
self.daemon = True
|
||||
self.input_queue = Queue()
|
||||
self.output_queue = Queue()
|
||||
self.merged_books = set([])
|
||||
|
||||
def run(self):
|
||||
while not self.end:
|
||||
def end(self):
|
||||
self.input_queue.put((None, None, None))
|
||||
|
||||
def start(self):
|
||||
try:
|
||||
id, opf, cover = self.input_queue.get(True, 0.2)
|
||||
id, opf, cover = self.input_queue.get_nowait()
|
||||
except Empty:
|
||||
continue
|
||||
single_shot(self.start)
|
||||
return
|
||||
if id is None and opf is None and cover is None:
|
||||
return
|
||||
name = self.nmap.pop(id)
|
||||
title = None
|
||||
if DEBUG:
|
||||
st = time.time()
|
||||
try:
|
||||
title = self.add(id, opf, cover, name)
|
||||
except:
|
||||
@ -111,6 +131,9 @@ class DBAdder(Thread): # {{{
|
||||
self.critical[name] = traceback.format_exc()
|
||||
title = name
|
||||
self.output_queue.put(title)
|
||||
if DEBUG:
|
||||
prints('Added', title, 'to db in:', time.time() - st, 'seconds')
|
||||
single_shot(self.start)
|
||||
|
||||
def process_formats(self, opf, formats):
|
||||
imp = opf[:-4]+'.import'
|
||||
@ -201,10 +224,10 @@ class Adder(QObject): # {{{
|
||||
self.pd.setModal(True)
|
||||
self.pd.show()
|
||||
self._parent = parent
|
||||
self.rfind = self.worker = self.timer = None
|
||||
self.rfind = self.worker = None
|
||||
self.callback = callback
|
||||
self.callback_called = False
|
||||
self.connect(self.pd, SIGNAL('canceled()'), self.canceled)
|
||||
self.pd.canceled_signal.connect(self.canceled)
|
||||
|
||||
def add_recursive(self, root, single=True):
|
||||
self.path = root
|
||||
@ -213,10 +236,8 @@ class Adder(QObject): # {{{
|
||||
self.pd.set_max(0)
|
||||
self.pd.value = 0
|
||||
self.rfind = RecursiveFind(self, self.db, root, single)
|
||||
self.connect(self.rfind, SIGNAL('update(PyQt_PyObject)'),
|
||||
self.pd.set_msg, Qt.QueuedConnection)
|
||||
self.connect(self.rfind, SIGNAL('found(PyQt_PyObject)'),
|
||||
self.add, Qt.QueuedConnection)
|
||||
self.rfind.update.connect(self.pd.set_msg, type=Qt.QueuedConnection)
|
||||
self.rfind.found.connect(self.add, type=Qt.QueuedConnection)
|
||||
self.rfind.start()
|
||||
|
||||
def add(self, books):
|
||||
@ -246,12 +267,12 @@ class Adder(QObject): # {{{
|
||||
self.pd.set_min(0)
|
||||
self.pd.set_max(len(self.ids))
|
||||
self.pd.value = 0
|
||||
self.db_adder = DBAdder(self.db, self.ids, self.nmap)
|
||||
self.db_adder = DBAdder(self, self.db, self.ids, self.nmap)
|
||||
self.db_adder.start()
|
||||
self.last_added_at = time.time()
|
||||
self.entry_count = len(self.ids)
|
||||
self.continue_updating = True
|
||||
QTimer.singleShot(200, self.update)
|
||||
single_shot(self.update)
|
||||
|
||||
def canceled(self):
|
||||
self.continue_updating = False
|
||||
@ -260,14 +281,14 @@ class Adder(QObject): # {{{
|
||||
if self.worker is not None:
|
||||
self.worker.canceled = True
|
||||
if hasattr(self, 'db_adder'):
|
||||
self.db_adder.end = True
|
||||
self.db_adder.end()
|
||||
self.pd.hide()
|
||||
if not self.callback_called:
|
||||
self.callback(self.paths, self.names, self.infos)
|
||||
self.callback_called = True
|
||||
|
||||
def duplicates_processed(self):
|
||||
self.db_adder.end = True
|
||||
self.db_adder.end()
|
||||
if not self.callback_called:
|
||||
self.callback(self.paths, self.names, self.infos)
|
||||
self.callback_called = True
|
||||
@ -300,7 +321,7 @@ class Adder(QObject): # {{{
|
||||
if (time.time() - self.last_added_at) > self.ADD_TIMEOUT:
|
||||
self.continue_updating = False
|
||||
self.pd.hide()
|
||||
self.db_adder.end = True
|
||||
self.db_adder.end()
|
||||
if not self.callback_called:
|
||||
self.callback([], [], [])
|
||||
self.callback_called = True
|
||||
@ -311,7 +332,7 @@ class Adder(QObject): # {{{
|
||||
'find the problem book.'), show=True)
|
||||
|
||||
if self.continue_updating:
|
||||
QTimer.singleShot(200, self.update)
|
||||
single_shot(self.update)
|
||||
|
||||
|
||||
def process_duplicates(self):
|
||||
@ -332,11 +353,8 @@ class Adder(QObject): # {{{
|
||||
self.__p_d = pd
|
||||
self.__d_a = DuplicatesAdder(self._parent, self.db, duplicates,
|
||||
self.db_adder)
|
||||
self.connect(self.__d_a, SIGNAL('added(PyQt_PyObject)'),
|
||||
pd.setValue)
|
||||
self.connect(self.__d_a, SIGNAL('adding_done()'),
|
||||
self.duplicates_processed)
|
||||
self.__d_a.start()
|
||||
self.__d_a.added.connect(pd.setValue)
|
||||
self.__d_a.adding_done.connect(self.duplicates_processed)
|
||||
else:
|
||||
return self.duplicates_processed()
|
||||
|
||||
@ -407,14 +425,12 @@ class Saver(QObject): # {{{
|
||||
self.worker = SaveWorker(self.rq, db, self.ids, path, self.opts,
|
||||
spare_server=self.spare_server)
|
||||
self.pd.canceled_signal.connect(self.canceled)
|
||||
self.timer = QTimer(self)
|
||||
self.connect(self.timer, SIGNAL('timeout()'), self.update)
|
||||
self.timer.start(200)
|
||||
self.continue_updating = True
|
||||
single_shot(self.update)
|
||||
|
||||
|
||||
def canceled(self):
|
||||
if self.timer is not None:
|
||||
self.timer.stop()
|
||||
self.continue_updating = False
|
||||
if self.worker is not None:
|
||||
self.worker.canceled = True
|
||||
self.pd.hide()
|
||||
@ -424,27 +440,35 @@ class Saver(QObject): # {{{
|
||||
|
||||
|
||||
def update(self):
|
||||
if not self.ids or not self.worker.is_alive():
|
||||
self.timer.stop()
|
||||
self.pd.hide()
|
||||
if not self.continue_updating:
|
||||
return
|
||||
if not self.worker.is_alive():
|
||||
# Check that all ids were processed
|
||||
while self.ids:
|
||||
# Get all queued results since worker is dead
|
||||
before = len(self.ids)
|
||||
self.get_result()
|
||||
if before == len(self.ids):
|
||||
# No results available => worker died unexpectedly
|
||||
for i in list(self.ids):
|
||||
self.failures.add(('id:%d'%i, 'Unknown error'))
|
||||
self.ids.remove(i)
|
||||
break
|
||||
|
||||
if not self.ids:
|
||||
self.continue_updating = False
|
||||
self.pd.hide()
|
||||
if not self.callback_called:
|
||||
try:
|
||||
self.worker.join(1.5)
|
||||
# Give the worker time to clean up and set worker.error
|
||||
self.worker.join(2)
|
||||
except:
|
||||
pass # The worker was not yet started
|
||||
self.callback(self.worker.path, self.failures, self.worker.error)
|
||||
self.callback_called = True
|
||||
return
|
||||
self.callback(self.worker.path, self.failures, self.worker.error)
|
||||
|
||||
if self.continue_updating:
|
||||
self.get_result()
|
||||
single_shot(self.update)
|
||||
|
||||
|
||||
def get_result(self):
|
||||
|
@ -223,21 +223,22 @@ class BooksModel(QAbstractTableModel): # {{{
|
||||
def by_author(self):
|
||||
return self.sorted_on[0] == 'authors'
|
||||
|
||||
def books_deleted(self):
|
||||
self.count_changed()
|
||||
self.clear_caches()
|
||||
self.reset()
|
||||
|
||||
def delete_books(self, indices):
|
||||
ids = map(self.id, indices)
|
||||
for id in ids:
|
||||
self.db.delete_book(id, notify=False)
|
||||
self.count_changed()
|
||||
self.clear_caches()
|
||||
self.reset()
|
||||
self.books_deleted()
|
||||
return ids
|
||||
|
||||
def delete_books_by_id(self, ids):
|
||||
for id in ids:
|
||||
self.db.delete_book(id)
|
||||
self.count_changed()
|
||||
self.clear_caches()
|
||||
self.reset()
|
||||
self.books_deleted()
|
||||
|
||||
def books_added(self, num):
|
||||
if num > 0:
|
||||
|
@ -73,6 +73,7 @@ class TagsView(QTreeView): # {{{
|
||||
def __init__(self, parent=None):
|
||||
QTreeView.__init__(self, parent=None)
|
||||
self.tag_match = None
|
||||
self.disable_recounting = False
|
||||
self.setUniformRowHeights(True)
|
||||
self.setCursor(Qt.PointingHandCursor)
|
||||
self.setIconSize(QSize(30, 30))
|
||||
@ -299,6 +300,8 @@ class TagsView(QTreeView): # {{{
|
||||
return self.isExpanded(idx)
|
||||
|
||||
def recount(self, *args):
|
||||
if self.disable_recounting:
|
||||
return
|
||||
self.refresh_signal_processed = True
|
||||
ci = self.currentIndex()
|
||||
if not ci.isValid():
|
||||
|
@ -129,6 +129,7 @@ class CoverCache(Thread): # {{{
|
||||
self.keep_running = True
|
||||
self.cache = {}
|
||||
self.lock = RLock()
|
||||
self.allowed_ids = frozenset([])
|
||||
self.null_image = QImage()
|
||||
|
||||
def stop(self):
|
||||
@ -175,6 +176,11 @@ class CoverCache(Thread): # {{{
|
||||
break
|
||||
for id_ in ids:
|
||||
time.sleep(0.050) # Limit 20/second to not overwhelm the GUI
|
||||
if not self.keep_running:
|
||||
return
|
||||
with self.lock:
|
||||
if id_ not in self.allowed_ids:
|
||||
continue
|
||||
try:
|
||||
img = self._image_for_id(id_)
|
||||
except:
|
||||
@ -193,6 +199,7 @@ class CoverCache(Thread): # {{{
|
||||
|
||||
def set_cache(self, ids):
|
||||
with self.lock:
|
||||
self.allowed_ids = frozenset(ids)
|
||||
already_loaded = set([])
|
||||
for id in self.cache.keys():
|
||||
if id in ids:
|
||||
@ -213,7 +220,8 @@ class CoverCache(Thread): # {{{
|
||||
def refresh(self, ids):
|
||||
with self.lock:
|
||||
for id_ in ids:
|
||||
self.cache.pop(id_, None)
|
||||
cover = self.cache.pop(id_, None)
|
||||
if cover is not None:
|
||||
self.load_queue.put(id_)
|
||||
# }}}
|
||||
|
||||
|
@ -953,21 +953,22 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
||||
self.notify('metadata', [id])
|
||||
return True
|
||||
|
||||
def delete_book(self, id, notify=True):
|
||||
def delete_book(self, id, notify=True, commit=True):
|
||||
'''
|
||||
Removes book from the result cache and the underlying database.
|
||||
If you set commit to False, you must call clean() manually afterwards
|
||||
'''
|
||||
try:
|
||||
path = os.path.join(self.library_path, self.path(id, index_is_id=True))
|
||||
except:
|
||||
path = None
|
||||
self.data.remove(id)
|
||||
if path and os.path.exists(path):
|
||||
self.rmtree(path)
|
||||
parent = os.path.dirname(path)
|
||||
if len(os.listdir(parent)) == 0:
|
||||
self.rmtree(parent)
|
||||
self.conn.execute('DELETE FROM books WHERE id=?', (id,))
|
||||
if commit:
|
||||
self.conn.commit()
|
||||
self.clean()
|
||||
self.data.books_deleted([id])
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -292,12 +292,12 @@ class Server(Thread):
|
||||
except:
|
||||
pass
|
||||
time.sleep(0.2)
|
||||
for worker in self.workers:
|
||||
for worker in list(self.workers):
|
||||
try:
|
||||
worker.kill()
|
||||
except:
|
||||
pass
|
||||
for worker in self.pool:
|
||||
for worker in list(self.pool):
|
||||
try:
|
||||
worker.kill()
|
||||
except:
|
||||
|
@ -54,12 +54,15 @@ def save_cover_data_to(data, path, bgcolor='#ffffff', resize_to=None,
|
||||
changed = True
|
||||
if not changed:
|
||||
changed = fmt != orig_fmt
|
||||
|
||||
ret = None
|
||||
if return_data:
|
||||
ret = data
|
||||
if changed:
|
||||
if hasattr(img, 'set_compression_quality') and fmt == 'jpg':
|
||||
img.set_compression_quality(compression_quality)
|
||||
return img.export(fmt)
|
||||
return data
|
||||
ret = img.export(fmt)
|
||||
else:
|
||||
if changed:
|
||||
if hasattr(img, 'set_compression_quality') and fmt == 'jpg':
|
||||
img.set_compression_quality(compression_quality)
|
||||
@ -67,6 +70,7 @@ def save_cover_data_to(data, path, bgcolor='#ffffff', resize_to=None,
|
||||
else:
|
||||
with lopen(path, 'wb') as f:
|
||||
f.write(data)
|
||||
return ret
|
||||
|
||||
def thumbnail(data, width=120, height=120, bgcolor='#ffffff', fmt='jpg'):
|
||||
img = Image()
|
||||
|
@ -5,6 +5,9 @@
|
||||
|
||||
#include "magick_constants.h"
|
||||
|
||||
// Ensure that the underlying MagickWand has not been deleted
|
||||
#define NULL_CHECK(x) if(self->wand == NULL) {PyErr_SetString(PyExc_ValueError, "Underlying ImageMagick Wand has been destroyed"); return x; }
|
||||
|
||||
// magick_set_exception {{{
|
||||
PyObject* magick_set_exception(MagickWand *wand) {
|
||||
ExceptionType ext;
|
||||
@ -54,6 +57,7 @@ magick_PixelWand_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||
static PyObject *
|
||||
magick_PixelWand_color_getter(magick_PixelWand *self, void *closure) {
|
||||
const char *fp;
|
||||
NULL_CHECK(NULL);
|
||||
fp = PixelGetColorAsNormalizedString(self->wand);
|
||||
return Py_BuildValue("s", fp);
|
||||
}
|
||||
@ -62,6 +66,8 @@ static int
|
||||
magick_PixelWand_color_setter(magick_PixelWand *self, PyObject *val, void *closure) {
|
||||
char *fmt;
|
||||
|
||||
NULL_CHECK(-1);
|
||||
|
||||
if (val == NULL) {
|
||||
PyErr_SetString(PyExc_TypeError, "Cannot delete PixelWand color");
|
||||
return -1;
|
||||
@ -80,8 +86,21 @@ magick_PixelWand_color_setter(magick_PixelWand *self, PyObject *val, void *closu
|
||||
|
||||
// }}}
|
||||
|
||||
// PixelWand.destroy {{{
|
||||
|
||||
static PyObject *
|
||||
magick_PixelWand_destroy(magick_PixelWand *self, PyObject *args, PyObject *kwargs) {
|
||||
NULL_CHECK(NULL)
|
||||
self->wand = DestroyPixelWand(self->wand);
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
// }}}
|
||||
|
||||
// PixelWand attr list {{{
|
||||
static PyMethodDef magick_PixelWand_methods[] = {
|
||||
{"destroy", (PyCFunction)magick_PixelWand_destroy, METH_VARARGS,
|
||||
"Destroy the underlying ImageMagick Wand. WARNING: After using this method, all methods on this object will raise an exception."},
|
||||
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
@ -175,10 +194,21 @@ magick_DrawingWand_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||
return (PyObject *)self;
|
||||
}
|
||||
|
||||
// DrawingWand.destroy {{{
|
||||
|
||||
static PyObject *
|
||||
magick_DrawingWand_destroy(magick_DrawingWand *self, PyObject *args, PyObject *kwargs) {
|
||||
NULL_CHECK(NULL)
|
||||
self->wand = DestroyDrawingWand(self->wand);
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
// }}}
|
||||
|
||||
// DrawingWand.font {{{
|
||||
static PyObject *
|
||||
magick_DrawingWand_font_getter(magick_DrawingWand *self, void *closure) {
|
||||
const char *fp;
|
||||
NULL_CHECK(NULL);
|
||||
fp = DrawGetFont(self->wand);
|
||||
return Py_BuildValue("s", fp);
|
||||
}
|
||||
@ -186,6 +216,7 @@ magick_DrawingWand_font_getter(magick_DrawingWand *self, void *closure) {
|
||||
static int
|
||||
magick_DrawingWand_font_setter(magick_DrawingWand *self, PyObject *val, void *closure) {
|
||||
char *fmt;
|
||||
NULL_CHECK(-1);
|
||||
|
||||
if (val == NULL) {
|
||||
PyErr_SetString(PyExc_TypeError, "Cannot delete DrawingWand font");
|
||||
@ -208,11 +239,13 @@ magick_DrawingWand_font_setter(magick_DrawingWand *self, PyObject *val, void *cl
|
||||
// DrawingWand.font_size {{{
|
||||
static PyObject *
|
||||
magick_DrawingWand_fontsize_getter(magick_DrawingWand *self, void *closure) {
|
||||
NULL_CHECK(NULL)
|
||||
return Py_BuildValue("d", DrawGetFontSize(self->wand));
|
||||
}
|
||||
|
||||
static int
|
||||
magick_DrawingWand_fontsize_setter(magick_DrawingWand *self, PyObject *val, void *closure) {
|
||||
NULL_CHECK(-1)
|
||||
if (val == NULL) {
|
||||
PyErr_SetString(PyExc_TypeError, "Cannot delete DrawingWand fontsize");
|
||||
return -1;
|
||||
@ -233,12 +266,14 @@ magick_DrawingWand_fontsize_setter(magick_DrawingWand *self, PyObject *val, void
|
||||
// DrawingWand.text_antialias {{{
|
||||
static PyObject *
|
||||
magick_DrawingWand_textantialias_getter(magick_DrawingWand *self, void *closure) {
|
||||
NULL_CHECK(NULL);
|
||||
if (DrawGetTextAntialias(self->wand)) Py_RETURN_TRUE;
|
||||
Py_RETURN_FALSE;
|
||||
}
|
||||
|
||||
static int
|
||||
magick_DrawingWand_textantialias_setter(magick_DrawingWand *self, PyObject *val, void *closure) {
|
||||
NULL_CHECK(-1);
|
||||
if (val == NULL) {
|
||||
PyErr_SetString(PyExc_TypeError, "Cannot delete DrawingWand textantialias");
|
||||
return -1;
|
||||
@ -253,6 +288,7 @@ magick_DrawingWand_textantialias_setter(magick_DrawingWand *self, PyObject *val,
|
||||
// DrawingWand.gravity {{{
|
||||
static PyObject *
|
||||
magick_DrawingWand_gravity_getter(magick_DrawingWand *self, void *closure) {
|
||||
NULL_CHECK(NULL);
|
||||
return Py_BuildValue("n", DrawGetGravity(self->wand));
|
||||
}
|
||||
|
||||
@ -260,6 +296,8 @@ static int
|
||||
magick_DrawingWand_gravity_setter(magick_DrawingWand *self, PyObject *val, void *closure) {
|
||||
int grav;
|
||||
|
||||
NULL_CHECK(-1);
|
||||
|
||||
if (val == NULL) {
|
||||
PyErr_SetString(PyExc_TypeError, "Cannot delete DrawingWand gravity");
|
||||
return -1;
|
||||
@ -281,6 +319,9 @@ magick_DrawingWand_gravity_setter(magick_DrawingWand *self, PyObject *val, void
|
||||
|
||||
// DrawingWand attr list {{{
|
||||
static PyMethodDef magick_DrawingWand_methods[] = {
|
||||
{"destroy", (PyCFunction)magick_DrawingWand_destroy, METH_VARARGS,
|
||||
"Destroy the underlying ImageMagick Wand. WARNING: After using this method, all methods on this object will raise an exception."},
|
||||
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
@ -402,6 +443,7 @@ magick_Image_load(magick_Image *self, PyObject *args, PyObject *kwargs) {
|
||||
Py_ssize_t dlen;
|
||||
MagickBooleanType res;
|
||||
|
||||
NULL_CHECK(NULL)
|
||||
if (!PyArg_ParseTuple(args, "s#", &data, &dlen)) return NULL;
|
||||
|
||||
res = MagickReadImageBlob(self->wand, data, dlen);
|
||||
@ -420,6 +462,7 @@ magick_Image_read(magick_Image *self, PyObject *args, PyObject *kwargs) {
|
||||
const char *data;
|
||||
MagickBooleanType res;
|
||||
|
||||
NULL_CHECK(NULL)
|
||||
if (!PyArg_ParseTuple(args, "s", &data)) return NULL;
|
||||
|
||||
res = MagickReadImage(self->wand, data);
|
||||
@ -441,6 +484,8 @@ magick_Image_create_canvas(magick_Image *self, PyObject *args, PyObject *kwargs)
|
||||
PixelWand *pw;
|
||||
MagickBooleanType res = MagickFalse;
|
||||
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
if (!PyArg_ParseTuple(args, "nns", &width, &height, &bgcolor)) return NULL;
|
||||
|
||||
pw = NewPixelWand();
|
||||
@ -464,6 +509,8 @@ magick_Image_font_metrics(magick_Image *self, PyObject *args, PyObject *kwargs)
|
||||
DrawingWand *dw;
|
||||
double *metrics;
|
||||
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O!s", &magick_DrawingWandType, &dw_, &text)) return NULL;
|
||||
dw = ((magick_DrawingWand*)dw_)->wand;
|
||||
if (!IsDrawingWand(dw)) { PyErr_SetString(PyExc_TypeError, "Invalid drawing wand"); return NULL; }
|
||||
@ -491,6 +538,8 @@ magick_Image_annotate(magick_Image *self, PyObject *args, PyObject *kwargs) {
|
||||
DrawingWand *dw;
|
||||
double x, y, angle;
|
||||
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O!ddds", &magick_DrawingWandType, &dw_, &x, &y, &angle, &text)) return NULL;
|
||||
dw = ((magick_DrawingWand*)dw_)->wand;
|
||||
if (!IsDrawingWand(dw)) { PyErr_SetString(PyExc_TypeError, "Invalid drawing wand"); return NULL; }
|
||||
@ -510,6 +559,8 @@ magick_Image_export(magick_Image *self, PyObject *args, PyObject *kwargs) {
|
||||
PyObject *ans;
|
||||
size_t len = 0;
|
||||
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
if (!PyArg_ParseTuple(args, "s", &fmt)) return NULL;
|
||||
|
||||
if (!MagickSetFormat(self->wand, fmt)) {
|
||||
@ -533,6 +584,8 @@ magick_Image_export(magick_Image *self, PyObject *args, PyObject *kwargs) {
|
||||
static PyObject *
|
||||
magick_Image_size_getter(magick_Image *self, void *closure) {
|
||||
size_t width, height;
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
width = MagickGetImageWidth(self->wand);
|
||||
height = MagickGetImageHeight(self->wand);
|
||||
return Py_BuildValue("nn", width, height);
|
||||
@ -545,6 +598,9 @@ magick_Image_size_setter(magick_Image *self, PyObject *val, void *closure) {
|
||||
double blur;
|
||||
MagickBooleanType res;
|
||||
|
||||
NULL_CHECK(-1)
|
||||
|
||||
|
||||
if (val == NULL) {
|
||||
PyErr_SetString(PyExc_TypeError, "Cannot delete image size");
|
||||
return -1;
|
||||
@ -592,6 +648,8 @@ magick_Image_size_setter(magick_Image *self, PyObject *val, void *closure) {
|
||||
static PyObject *
|
||||
magick_Image_format_getter(magick_Image *self, void *closure) {
|
||||
const char *fmt;
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
fmt = MagickGetImageFormat(self->wand);
|
||||
return Py_BuildValue("s", fmt);
|
||||
}
|
||||
@ -599,6 +657,8 @@ magick_Image_format_getter(magick_Image *self, void *closure) {
|
||||
static int
|
||||
magick_Image_format_setter(magick_Image *self, PyObject *val, void *closure) {
|
||||
char *fmt;
|
||||
NULL_CHECK(-1)
|
||||
|
||||
|
||||
if (val == NULL) {
|
||||
PyErr_SetString(PyExc_TypeError, "Cannot delete image format");
|
||||
@ -628,6 +688,8 @@ magick_Image_distort(magick_Image *self, PyObject *args, PyObject *kwargs) {
|
||||
MagickBooleanType res;
|
||||
double *arguments = NULL;
|
||||
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
if (!PyArg_ParseTuple(args, "iOO", &method, &argv, &bestfit)) return NULL;
|
||||
|
||||
if (!PySequence_Check(argv)) { PyErr_SetString(PyExc_TypeError, "arguments must be a sequence"); return NULL; }
|
||||
@ -658,6 +720,8 @@ static PyObject *
|
||||
magick_Image_trim(magick_Image *self, PyObject *args, PyObject *kwargs) {
|
||||
double fuzz;
|
||||
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
if (!PyArg_ParseTuple(args, "d", &fuzz)) return NULL;
|
||||
|
||||
if (!MagickTrimImage(self->wand, fuzz)) return magick_set_exception(self->wand);
|
||||
@ -672,6 +736,8 @@ static PyObject *
|
||||
magick_Image_thumbnail(magick_Image *self, PyObject *args, PyObject *kwargs) {
|
||||
Py_ssize_t width, height;
|
||||
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
if (!PyArg_ParseTuple(args, "nn", &width, &height)) return NULL;
|
||||
|
||||
if (!MagickThumbnailImage(self->wand, width, height)) return magick_set_exception(self->wand);
|
||||
@ -686,6 +752,8 @@ static PyObject *
|
||||
magick_Image_crop(magick_Image *self, PyObject *args, PyObject *kwargs) {
|
||||
Py_ssize_t width, height, x, y;
|
||||
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
if (!PyArg_ParseTuple(args, "nnnn", &width, &height, &x, &y)) return NULL;
|
||||
|
||||
if (!MagickCropImage(self->wand, width, height, x, y)) return magick_set_exception(self->wand);
|
||||
@ -701,6 +769,8 @@ magick_Image_set_border_color(magick_Image *self, PyObject *args, PyObject *kwar
|
||||
PyObject *obj;
|
||||
magick_PixelWand *pw;
|
||||
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O!", &magick_PixelWandType, &obj)) return NULL;
|
||||
pw = (magick_PixelWand*)obj;
|
||||
if (!IsPixelWand(pw->wand)) { PyErr_SetString(PyExc_TypeError, "Invalid PixelWand"); return NULL; }
|
||||
@ -719,6 +789,8 @@ magick_Image_rotate(magick_Image *self, PyObject *args, PyObject *kwargs) {
|
||||
magick_PixelWand *pw;
|
||||
double degrees;
|
||||
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O!d", &magick_PixelWandType, &obj, °rees)) return NULL;
|
||||
pw = (magick_PixelWand*)obj;
|
||||
if (!IsPixelWand(pw->wand)) { PyErr_SetString(PyExc_TypeError, "Invalid PixelWand"); return NULL; }
|
||||
@ -735,6 +807,8 @@ static PyObject *
|
||||
magick_Image_set_page(magick_Image *self, PyObject *args, PyObject *kwargs) {
|
||||
Py_ssize_t width, height, x, y;
|
||||
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
if (!PyArg_ParseTuple(args, "nnnn", &width, &height, &x, &y)) return NULL;
|
||||
|
||||
if (!MagickSetImagePage(self->wand, width, height, x, y)) return magick_set_exception(self->wand);
|
||||
@ -749,6 +823,8 @@ static PyObject *
|
||||
magick_Image_set_compression_quality(magick_Image *self, PyObject *args, PyObject *kwargs) {
|
||||
Py_ssize_t quality;
|
||||
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
if (!PyArg_ParseTuple(args, "n", &quality)) return NULL;
|
||||
|
||||
if (!MagickSetImageCompressionQuality(self->wand, quality)) return magick_set_exception(self->wand);
|
||||
@ -767,6 +843,8 @@ magick_Image_has_transparent_pixels(magick_Image *self, PyObject *args, PyObject
|
||||
size_t r, c, width, height;
|
||||
double alpha;
|
||||
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
height = MagickGetImageHeight(self->wand);
|
||||
pi = NewPixelIterator(self->wand);
|
||||
|
||||
@ -790,6 +868,8 @@ magick_Image_has_transparent_pixels(magick_Image *self, PyObject *args, PyObject
|
||||
|
||||
static PyObject *
|
||||
magick_Image_normalize(magick_Image *self, PyObject *args, PyObject *kwargs) {
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
if (!MagickNormalizeImage(self->wand)) return magick_set_exception(self->wand);
|
||||
|
||||
Py_RETURN_NONE;
|
||||
@ -804,6 +884,8 @@ magick_Image_add_border(magick_Image *self, PyObject *args, PyObject *kwargs) {
|
||||
PyObject *obj;
|
||||
magick_PixelWand *pw;
|
||||
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O!nn", &magick_PixelWandType, &obj, &dx, &dy)) return NULL;
|
||||
pw = (magick_PixelWand*)obj;
|
||||
if (!IsPixelWand(pw->wand)) { PyErr_SetString(PyExc_TypeError, "Invalid PixelWand"); return NULL; }
|
||||
@ -820,6 +902,8 @@ static PyObject *
|
||||
magick_Image_sharpen(magick_Image *self, PyObject *args, PyObject *kwargs) {
|
||||
double radius, sigma;
|
||||
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
if (!PyArg_ParseTuple(args, "dd", &radius, &sigma)) return NULL;
|
||||
|
||||
if (!MagickSharpenImage(self->wand, radius, sigma)) return magick_set_exception(self->wand);
|
||||
@ -836,6 +920,9 @@ magick_Image_quantize(magick_Image *self, PyObject *args, PyObject *kwargs) {
|
||||
int colorspace;
|
||||
PyObject *dither, *measure_error;
|
||||
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
|
||||
if (!PyArg_ParseTuple(args, "ninOO", &number_colors, &colorspace, &treedepth, &dither, &measure_error)) return NULL;
|
||||
|
||||
if (!MagickQuantizeImage(self->wand, number_colors, colorspace, treedepth, PyObject_IsTrue(dither), PyObject_IsTrue(measure_error))) return magick_set_exception(self->wand);
|
||||
@ -848,6 +935,8 @@ magick_Image_quantize(magick_Image *self, PyObject *args, PyObject *kwargs) {
|
||||
|
||||
static PyObject *
|
||||
magick_Image_despeckle(magick_Image *self, PyObject *args, PyObject *kwargs) {
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
if (!MagickDespeckleImage(self->wand)) return magick_set_exception(self->wand);
|
||||
|
||||
Py_RETURN_NONE;
|
||||
@ -857,6 +946,8 @@ magick_Image_despeckle(magick_Image *self, PyObject *args, PyObject *kwargs) {
|
||||
// Image.type {{{
|
||||
static PyObject *
|
||||
magick_Image_type_getter(magick_Image *self, void *closure) {
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
return Py_BuildValue("n", MagickGetImageType(self->wand));
|
||||
}
|
||||
|
||||
@ -864,6 +955,8 @@ static int
|
||||
magick_Image_type_setter(magick_Image *self, PyObject *val, void *closure) {
|
||||
int type;
|
||||
|
||||
NULL_CHECK(-1)
|
||||
|
||||
if (val == NULL) {
|
||||
PyErr_SetString(PyExc_TypeError, "Cannot delete image type");
|
||||
return -1;
|
||||
@ -885,8 +978,21 @@ magick_Image_type_setter(magick_Image *self, PyObject *val, void *closure) {
|
||||
|
||||
// }}}
|
||||
|
||||
// Image.destroy {{{
|
||||
|
||||
static PyObject *
|
||||
magick_Image_destroy(magick_Image *self, PyObject *args, PyObject *kwargs) {
|
||||
NULL_CHECK(NULL)
|
||||
self->wand = DestroyMagickWand(self->wand);
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
// }}}
|
||||
|
||||
// Image attr list {{{
|
||||
static PyMethodDef magick_Image_methods[] = {
|
||||
{"destroy", (PyCFunction)magick_Image_destroy, METH_VARARGS,
|
||||
"Destroy the underlying ImageMagick Wand. WARNING: After using this method, all methods on this object will raise an exception."},
|
||||
|
||||
{"load", (PyCFunction)magick_Image_load, METH_VARARGS,
|
||||
"Load an image from a byte buffer (string)"
|
||||
},
|
||||
@ -1001,6 +1107,7 @@ static PyGetSetDef magick_Image_getsetters[] = {
|
||||
|
||||
// }}}
|
||||
|
||||
|
||||
static PyTypeObject magick_ImageType = { // {{{
|
||||
PyObject_HEAD_INIT(NULL)
|
||||
0, /*ob_size*/
|
||||
@ -1053,6 +1160,9 @@ magick_Image_compose(magick_Image *self, PyObject *args, PyObject *kwargs)
|
||||
magick_Image *src;
|
||||
MagickBooleanType res = MagickFalse;
|
||||
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O!nnO", &magick_ImageType, &img, &left, &top, &op_)) return NULL;
|
||||
src = (magick_Image*)img;
|
||||
if (!IsMagickWand(src->wand)) {PyErr_SetString(PyExc_TypeError, "Not a valid ImageMagick wand"); return NULL;}
|
||||
@ -1078,6 +1188,8 @@ magick_Image_copy(magick_Image *self, PyObject *args, PyObject *kwargs)
|
||||
PyObject *img;
|
||||
magick_Image *src;
|
||||
|
||||
NULL_CHECK(NULL)
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O!", &magick_ImageType, &img)) return NULL;
|
||||
src = (magick_Image*)img;
|
||||
if (!IsMagickWand(src->wand)) {PyErr_SetString(PyExc_TypeError, "Not a valid ImageMagick wand"); return NULL;}
|
||||
@ -1153,3 +1265,4 @@ initmagick(void)
|
||||
MagickWandGenesis();
|
||||
}
|
||||
// }}}
|
||||
|
||||
|
@ -166,7 +166,7 @@ class Feed(object):
|
||||
self.articles.append(article)
|
||||
else:
|
||||
t = strftime(u'%a, %d %b, %Y %H:%M', article.localtime.timetuple())
|
||||
self.logger.debug('Skipping article %s (%s) from feed %s as it is too old.'%
|
||||
self.logger.debug(u'Skipping article %s (%s) from feed %s as it is too old.'%
|
||||
(title, t, self.title))
|
||||
d = item.get('date', '')
|
||||
article.formatted_date = d
|
||||
|
Loading…
x
Reference in New Issue
Block a user