mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-06-23 15:30:45 -04:00
Merge branch 'master' of https://github.com/kovidgoyal/calibre
This commit is contained in:
commit
c01a7c7812
@ -20,6 +20,73 @@
|
||||
# new recipes:
|
||||
# - title:
|
||||
|
||||
- version: 1.7.0
|
||||
date: 2013-10-18
|
||||
|
||||
new features:
|
||||
- title: "Cover grid: Allow using images as the background for the cover grid. To choose an image, go to Preferences->Look & Feel->Cover Grid."
|
||||
tickets: [1239194]
|
||||
|
||||
- title: "An option to mark newly added books with a temporary mark. Option is in Preferences->Adding books."
|
||||
tickets: [1238609]
|
||||
|
||||
- title: "Edit metadata dialog: Allow turning off the cover size displayed in the bottom right corner of the cover by right clicking the cover and choosing 'Hide cover size'. It can be restored the same way."
|
||||
|
||||
bug fixes:
|
||||
- title: "Conversion: If both embed font family and the filter css option to remove fonts are set, do not remove the font specified by the embed font family option."
|
||||
|
||||
- title: "Fix a few remaining situations that could cause formats column to show an error message about SHLock"
|
||||
|
||||
- title: "Make deleting books to recycle bin more robust. Ensure that the temporary directory created during the move to recycle bin process is not left behind in case of errors."
|
||||
|
||||
- title: "Windows: Check if the books' files are in use before deleting"
|
||||
|
||||
- title: "Fix custom device driver swap main and card option not working. Also fix swapping not happening for a few devices on linux"
|
||||
tickets: [1240504]
|
||||
|
||||
- title: "Edit metadata dialog: The Edit metadata dialog currently limits its max size based on the geometry of the smallest attached screen. Change that to use the geometry of the screen on which it will be shown."
|
||||
tickets: [1239597]
|
||||
|
||||
- title: "HTMLZ Output: Fix <style> tag placed inside <body> instead of <head>."
|
||||
tickets: [1239530]
|
||||
|
||||
- title: "HTMLZ Output: Fix inline styles not escaping quotes properly."
|
||||
tickets: [1239527]
|
||||
|
||||
- title: "HTMLZ Output: Fix incorrect handling of some self closing tags like <br>."
|
||||
tickets: [1239555]
|
||||
|
||||
- title: "Content server: Fix single item categories not working with reverse proxy setup."
|
||||
tickets: [1238987]
|
||||
|
||||
- title: "Fix a bug that could cause calibre to crash when switching from a large library to a smaller library with marked books."
|
||||
tickets: [1239210]
|
||||
|
||||
- title: "Get Books: Fix downloading of some books in formats that do not have metadata yielding nonsense titles"
|
||||
|
||||
- title: "Allow marked book button to be added to main toolbar when device is connected"
|
||||
tickets: [1239163]
|
||||
|
||||
- title: "Fix error if a marked book is deleted/merged."
|
||||
tickets: [1239161]
|
||||
|
||||
- title: "Template language: Fix formatter function days_between to compute the right value when the answer is negative."
|
||||
|
||||
- title: "Windows: Fix spurious file in use by other process error if the book's folder contained multiple hard links pointing to the same file"
|
||||
tickets: [1240788, 1240194]
|
||||
|
||||
- title: "Windows: Fix duplicate files being created in very special circumstances when changing title and/or author. (the title or author had to be between 31 and 35 characters long and the book entry had to have been created by a pre 1.x version of calibre). You can check if your library has any such duplicates and remove them, by using the Check Library tool (Right click the calibre button on the toolbar and select Library Maintenance->Check Library)."
|
||||
|
||||
improved recipes:
|
||||
- Wall Street Journal
|
||||
- Newsweek Polska
|
||||
- Wired Magazine
|
||||
- cracked.com
|
||||
- Television Without Pity
|
||||
- Carta
|
||||
- Diagonales
|
||||
|
||||
|
||||
- version: 1.6.0
|
||||
date: 2013-10-11
|
||||
|
||||
|
@ -12,7 +12,7 @@ class Carta(BasicNewsRecipe):
|
||||
|
||||
title = u'Carta'
|
||||
description = 'News about electronic publishing'
|
||||
__author__ = 'Oliver Niesner'
|
||||
__author__ = 'Oliver Niesner' # AGe Update 2013-10-13
|
||||
use_embedded_content = False
|
||||
timefmt = ' [%a %d %b %Y]'
|
||||
oldest_article = 7
|
||||
@ -25,7 +25,7 @@ class Carta(BasicNewsRecipe):
|
||||
|
||||
|
||||
|
||||
remove_tags_after = [dict(name='p', attrs={'class':'tags-blog'})]
|
||||
remove_tags_after = [dict(name='div', attrs={'id':'BlogContent'})] # AGe
|
||||
|
||||
remove_tags = [dict(name='p', attrs={'class':'print'}),
|
||||
dict(name='p', attrs={'class':'tags-blog'}),
|
||||
|
@ -22,12 +22,8 @@ class Cracked(BasicNewsRecipe):
|
||||
'comment': description, 'tags': category, 'publisher': publisher, 'language': language
|
||||
}
|
||||
|
||||
# remove_tags_before = dict(id='PrimaryContent')
|
||||
|
||||
keep_only_tags = dict(name='article', attrs={
|
||||
'class': 'module article dropShadowBottomCurved'})
|
||||
|
||||
# remove_tags_after = dict(name='div', attrs={'class':'shareBar'})
|
||||
keep_only_tags = [dict(name='article', attrs={'class': 'module article dropShadowBottomCurved'}),
|
||||
dict(name='article', attrs={'class': 'module blog dropShadowBottomCurved'})]
|
||||
|
||||
remove_tags = [
|
||||
dict(name='section', attrs={'class': ['socialTools', 'quickFixModule']})]
|
||||
|
@ -1,72 +1,50 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2009, Darko Miletic <darko.miletic at gmail.com>'
|
||||
__copyright__ = '2009-2013, Darko Miletic <darko.miletic at gmail.com>'
|
||||
'''
|
||||
elargentino.com
|
||||
diagonales.infonews.com
|
||||
'''
|
||||
|
||||
from calibre.web.feeds.news import BasicNewsRecipe
|
||||
from calibre.ebooks.BeautifulSoup import Tag
|
||||
|
||||
class Diagonales(BasicNewsRecipe):
|
||||
title = 'Diagonales'
|
||||
__author__ = 'Darko Miletic'
|
||||
description = 'El nuevo diario de La Plata'
|
||||
publisher = 'ElArgentino.com'
|
||||
description = 'Para estar bien informado sobre los temas de actualidad. Conoce sobre pais, economia, deportes, mundo, espectaculos, sociedad, entrevistas y tecnologia.'
|
||||
publisher = 'INFOFIN S.A.'
|
||||
category = 'news, politics, Argentina, La Plata'
|
||||
oldest_article = 2
|
||||
max_articles_per_feed = 100
|
||||
no_stylesheets = True
|
||||
use_embedded_content = False
|
||||
encoding = 'utf-8'
|
||||
language = 'es_AR'
|
||||
|
||||
lang = 'es-AR'
|
||||
direction = 'ltr'
|
||||
INDEX = 'http://www.elargentino.com/medios/122/Diagonales.html'
|
||||
language = 'es_AR'
|
||||
publication_type = 'newspaper'
|
||||
delay = 1
|
||||
remove_empty_feeds = True
|
||||
extra_css = ' .titulo{font-size: x-large; font-weight: bold} .volantaImp{font-size: small; font-weight: bold} '
|
||||
|
||||
html2lrf_options = [
|
||||
'--comment' , description
|
||||
, '--category' , category
|
||||
, '--publisher', publisher
|
||||
]
|
||||
|
||||
html2epub_options = 'publisher="' + publisher + '"\ncomments="' + description + '"\ntags="' + category + '"\noverride_css=" p {text-indent: 0cm; margin-top: 0em; margin-bottom: 0.5em} "'
|
||||
conversion_options = {
|
||||
'comment' : description
|
||||
, 'tags' : category
|
||||
, 'publisher' : publisher
|
||||
, 'language' : language
|
||||
}
|
||||
|
||||
keep_only_tags = [dict(name='div', attrs={'class':'ContainerPop'})]
|
||||
remove_tags = [dict(name='link')]
|
||||
|
||||
remove_tags = [dict(name='link')]
|
||||
|
||||
feeds = [(u'Articulos', u'http://www.elargentino.com/Highlights.aspx?ParentType=Section&ParentId=122&Content-Type=text/xml&ChannelDesc=Diagonales')]
|
||||
feeds = [
|
||||
(u'Pais' , u'http://diagonales.infonews.com/Highlights.aspx?ParentType=Section&ParentId=112&Content-Type=text/xml&ChannelDesc=Pa%C3%ADs')
|
||||
,(u'Deportes' , u'http://diagonales.infonews.com/Highlights.aspx?ParentType=Section&ParentId=106&Content-Type=text/xml&ChannelDesc=Deportes')
|
||||
,(u'Economia' , u'http://diagonales.infonews.com/Highlights.aspx?ParentType=Section&ParentId=107&Content-Type=text/xml&ChannelDesc=Econom%C3%ADa')
|
||||
,(u'Sociedad' , u'http://diagonales.infonews.com/Highlights.aspx?ParentType=Section&ParentId=109&Content-Type=text/xml&ChannelDesc=Sociedad')
|
||||
,(u'Mundo' , u'http://diagonales.infonews.com/Highlights.aspx?ParentType=Section&ParentId=113&Content-Type=text/xml&ChannelDesc=Mundo')
|
||||
,(u'Espectaculos', u'http://diagonales.infonews.com/Highlights.aspx?ParentType=Section&ParentId=114&Content-Type=text/xml&ChannelDesc=Espect%C3%A1culos')
|
||||
,(u'Entrevistas' , u'http://diagonales.infonews.com/Highlights.aspx?ParentType=Section&ParentId=115&Content-Type=text/xml&ChannelDesc=Entrevistas')
|
||||
,(u'Tecnologia' , u'http://diagonales.infonews.com/Highlights.aspx?ParentType=Section&ParentId=118&Content-Type=text/xml&ChannelDesc=Tecnolog%C3%ADa')
|
||||
]
|
||||
|
||||
def print_version(self, url):
|
||||
main, sep, article_part = url.partition('/nota-')
|
||||
article_id, rsep, rrest = article_part.partition('-')
|
||||
return u'http://www.elargentino.com/Impresion.aspx?Id=' + article_id
|
||||
|
||||
def preprocess_html(self, soup):
|
||||
for item in soup.findAll(style=True):
|
||||
del item['style']
|
||||
soup.html['lang'] = self.lang
|
||||
soup.html['dir' ] = self.direction
|
||||
mlang = Tag(soup,'meta',[("http-equiv","Content-Language"),("content",self.lang)])
|
||||
mcharset = Tag(soup,'meta',[("http-equiv","Content-Type"),("content","text/html; charset=utf-8")])
|
||||
soup.head.insert(0,mlang)
|
||||
soup.head.insert(1,mcharset)
|
||||
return soup
|
||||
|
||||
def get_cover_url(self):
|
||||
cover_url = None
|
||||
soup = self.index_to_soup(self.INDEX)
|
||||
cover_item = soup.find('div',attrs={'class':'colder'})
|
||||
if cover_item:
|
||||
clean_url = self.image_url_processor(None,cover_item.div.img['src'])
|
||||
cover_url = 'http://www.elargentino.com' + clean_url + '&height=600'
|
||||
return cover_url
|
||||
|
||||
def image_url_processor(self, baseurl, url):
|
||||
base, sep, rest = url.rpartition('?Id=')
|
||||
img, sep2, rrest = rest.partition('&')
|
||||
return base + sep + img
|
||||
return u'http://diagonales.infonews.com/Impresion.aspx?Id=' + article_id
|
||||
|
BIN
recipes/icons/diagonales.png
Normal file
BIN
recipes/icons/diagonales.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 4.8 KiB |
@ -2,173 +2,263 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2010, matek09, matek09@gmail.com; 2012, admroz, a.rozewicki@gmail.com'
|
||||
__copyright__ = '2010, matek09, matek09@gmail.com; 2012-2013, admroz, a.rozewicki@gmail.com'
|
||||
|
||||
import re
|
||||
from calibre.web.feeds.news import BasicNewsRecipe
|
||||
from calibre.ptempfile import PersistentTemporaryFile
|
||||
from string import capwords
|
||||
import datetime
|
||||
from calibre.ebooks.BeautifulSoup import BeautifulSoup
|
||||
|
||||
|
||||
class Newsweek(BasicNewsRecipe):
|
||||
|
||||
# how many issues to go back, 0 means get the most current one
|
||||
BACK_ISSUES = 2
|
||||
# how many issues to go back, 0 means get the most current one
|
||||
BACK_ISSUES = 1
|
||||
|
||||
EDITION = '0'
|
||||
DATE = None
|
||||
YEAR = datetime.datetime.now().year
|
||||
EDITION = '0'
|
||||
DATE = None
|
||||
YEAR = datetime.datetime.now().year
|
||||
|
||||
title = u'Newsweek Polska'
|
||||
__author__ = 'matek09, admroz'
|
||||
description = 'Weekly magazine'
|
||||
encoding = 'utf-8'
|
||||
language = 'pl'
|
||||
remove_javascript = True
|
||||
title = u'Newsweek Polska'
|
||||
__author__ = 'matek09, admroz'
|
||||
description = 'Weekly magazine'
|
||||
encoding = 'utf-8'
|
||||
language = 'pl'
|
||||
remove_javascript = True
|
||||
|
||||
temp_files = []
|
||||
articles_are_obfuscated = True
|
||||
temp_files = []
|
||||
articles_are_obfuscated = True
|
||||
|
||||
|
||||
#
|
||||
# Parses each article
|
||||
#
|
||||
def get_obfuscated_article(self, url):
|
||||
br = self.get_browser()
|
||||
br.open(url)
|
||||
source = br.response().read()
|
||||
page = self.index_to_soup(source)
|
||||
#
|
||||
# Parses article contents from one page
|
||||
#
|
||||
def get_article_divs(self, css, main_section):
|
||||
strs = []
|
||||
|
||||
main_section = page.find(id='mainSection')
|
||||
# get all divs with given css class
|
||||
article_divs = main_section.findAll('div', attrs={'class' : css})
|
||||
for article_div in article_divs:
|
||||
|
||||
title = main_section.find('h1')
|
||||
info = main_section.find('ul', attrs={'class' : 'articleInfo'})
|
||||
authors = info.find('li').find('h4')
|
||||
article = main_section.find('div', attrs={'id' : 'article'})
|
||||
# remove sections like 'read more...' etc.
|
||||
for p in article_div.findAll('p'):
|
||||
|
||||
# remove related articles box
|
||||
related = article.find('div', attrs={'class' : 'relatedBox'})
|
||||
if related is not None:
|
||||
related.extract()
|
||||
if p.find('span', attrs={'style' : 'color: #800000; font-size: medium;'}):
|
||||
p.extract()
|
||||
continue
|
||||
|
||||
# remove div with social networking links and links to
|
||||
# other articles in web version
|
||||
for div in article.findAll('div'):
|
||||
if div.find('span', attrs={'class' : 'google-plus'}):
|
||||
div.extract()
|
||||
if p.find('span', attrs={'style' : 'font-size: medium; color: #800000;'}):
|
||||
p.extract()
|
||||
continue
|
||||
|
||||
for p in div.findAll('p'):
|
||||
if p.find('span', attrs={'style' : 'color: rgb(255, 0, 0);'}):
|
||||
p.extract()
|
||||
continue
|
||||
for a in p.findAll('a'):
|
||||
if a.find('span', attrs={'style' : 'font-size: larger;'}):
|
||||
a.extract()
|
||||
if p.find('span', attrs={'style' : 'font-size: medium;'}):
|
||||
p.extract()
|
||||
continue
|
||||
|
||||
if p.find('span', attrs={'style' : 'color: #800000;'}):
|
||||
p.extract()
|
||||
continue
|
||||
|
||||
obj = p.find('object')
|
||||
if obj:
|
||||
obj.extract()
|
||||
continue
|
||||
|
||||
strong = p.find('strong')
|
||||
if strong:
|
||||
newest = re.compile("Tekst pochodzi z najnowszego numeru Tygodnika Newsweek")
|
||||
if newest.search(str(strong)):
|
||||
strong.extract()
|
||||
continue
|
||||
|
||||
itunes = p.find('a')
|
||||
if itunes:
|
||||
reurl = re.compile("itunes.apple.com")
|
||||
if reurl.search(str(itunes['href'])):
|
||||
p.extract()
|
||||
continue
|
||||
|
||||
imagedesc = p.find('div', attrs={'class' : 'image-desc'})
|
||||
if imagedesc:
|
||||
redesc = re.compile("Okładka numeru")
|
||||
if (redesc.search(str(imagedesc))):
|
||||
p.extract()
|
||||
continue
|
||||
|
||||
|
||||
html = unicode(title) + unicode(authors) + unicode(article)
|
||||
next = main_section.find('li', attrs={'class' : 'next'})
|
||||
|
||||
while next:
|
||||
url = next.find('a')['href']
|
||||
br.open(url)
|
||||
source = br.response().read()
|
||||
page = self.index_to_soup(source)
|
||||
main_section = page.find(id='mainSection')
|
||||
article = main_section.find('div', attrs={'id' : 'article'})
|
||||
aside = article.find(id='articleAside')
|
||||
if aside is not None:
|
||||
aside.extract()
|
||||
html = html + unicode(article)
|
||||
next = main_section.find('li', attrs={'class' : 'next'})
|
||||
# get actual contents
|
||||
for content in article_div.contents:
|
||||
strs.append("".join(str(content)))
|
||||
|
||||
# return contents as a string
|
||||
return unicode("".join(strs))
|
||||
|
||||
|
||||
self.temp_files.append(PersistentTemporaryFile('_temparse.html'))
|
||||
self.temp_files[-1].write(html)
|
||||
self.temp_files[-1].close()
|
||||
return self.temp_files[-1].name
|
||||
#
|
||||
# Articles can be divided into several pages, this method parses them recursevely
|
||||
#
|
||||
def get_article_page(self, br, url, page):
|
||||
br.open(url)
|
||||
source = br.response().read()
|
||||
|
||||
html = ''
|
||||
|
||||
matches = re.search(r'<article>(.*)</article>', source, re.DOTALL)
|
||||
if matches is None:
|
||||
print "no article tag found, returning..."
|
||||
return
|
||||
|
||||
main_section = BeautifulSoup(matches.group(0))
|
||||
|
||||
if page == 0:
|
||||
title = main_section.find('h1')
|
||||
html = html + unicode(title)
|
||||
|
||||
authors = ''
|
||||
authorBox = main_section.find('div', attrs={'class' : 'AuthorBox'})
|
||||
if authorBox is not None:
|
||||
authorH4 = authorBox.find('h4')
|
||||
if authorH4 is not None:
|
||||
authors = self.tag_to_string(authorH4)
|
||||
html = html + unicode(authors)
|
||||
|
||||
info = main_section.find('p', attrs={'class' : 'lead'})
|
||||
html = html + unicode(info)
|
||||
|
||||
html = html + self.get_article_divs('3917dc34e07c9c7180df2ea9ef103361845c8af42b71f51b960059226090a1ac articleStart', main_section)
|
||||
html = html + self.get_article_divs('3917dc34e07c9c7180df2ea9ef103361845c8af42b71f51b960059226090a1ac', main_section)
|
||||
|
||||
nextPage = main_section.find('a', attrs={'class' : 'next'})
|
||||
if nextPage:
|
||||
html = html + self.get_article_page(br, nextPage['href'], page+1)
|
||||
|
||||
return html
|
||||
|
||||
#
|
||||
# Parses each article
|
||||
#
|
||||
def get_obfuscated_article(self, url):
|
||||
br = self.get_browser()
|
||||
html = self.get_article_page(br, url, 0)
|
||||
self.temp_files.append(PersistentTemporaryFile('_temparse.html'))
|
||||
self.temp_files[-1].write(html)
|
||||
self.temp_files[-1].close()
|
||||
return self.temp_files[-1].name
|
||||
|
||||
|
||||
#
|
||||
# Goes back given number of issues. It also knows how to go back
|
||||
# to the previous year if there are not enough issues in the current one
|
||||
#
|
||||
def find_last_issue(self, archive_url):
|
||||
archive_soup = self.index_to_soup(archive_url)
|
||||
select = archive_soup.find('select', attrs={'id' : 'paper_issue_select'})
|
||||
options = select.findAll(lambda tag: tag.name == 'option' and tag.has_key('value'))
|
||||
#
|
||||
# Goes back given number of issues. It also knows how to go back
|
||||
# to the previous year if there are not enough issues in the current one
|
||||
#
|
||||
def find_last_issue(self, archive_url):
|
||||
archive_soup = self.index_to_soup(archive_url, True)
|
||||
|
||||
# check if need to go back to previous year
|
||||
if len(options) > self.BACK_ISSUES:
|
||||
option = options[self.BACK_ISSUES];
|
||||
self.EDITION = option['value'].replace('http://www.newsweek.pl/wydania/','')
|
||||
self.index_to_soup('http://www.newsweek.pl/wydania/' + self.EDITION)
|
||||
else:
|
||||
self.BACK_ISSUES = self.BACK_ISSUES - len(options)
|
||||
self.YEAR = self.YEAR - 1
|
||||
self.find_last_issue(archive_url + ',' + str(self.YEAR))
|
||||
# workaround because html is so messed up that find() method on soup returns None
|
||||
# and therefore we need to extract subhtml that we need
|
||||
matches = re.search(r'<ul class="rightIssueList">(.*?)</ul>', archive_soup, re.DOTALL)
|
||||
if matches is None:
|
||||
return
|
||||
|
||||
subSoup = BeautifulSoup(matches.group(0))
|
||||
issueLinks = subSoup.findAll('a')
|
||||
|
||||
# check if need to go back to previous year
|
||||
if len(issueLinks) > self.BACK_ISSUES:
|
||||
link = issueLinks[self.BACK_ISSUES];
|
||||
self.EDITION = link['href'].replace('http://www.newsweek.pl/wydania/','')
|
||||
self.index_to_soup('http://www.newsweek.pl/wydania/' + self.EDITION)
|
||||
else:
|
||||
self.BACK_ISSUES = self.BACK_ISSUES - len(issueLinks)
|
||||
self.YEAR = self.YEAR - 1
|
||||
self.find_last_issue(archive_url + '/' + str(self.YEAR))
|
||||
|
||||
|
||||
#
|
||||
# Looks for the last issue which we want to download. Then goes on each
|
||||
# section and article and stores them (assigning to sections)
|
||||
#
|
||||
def parse_index(self):
|
||||
archive_url = 'http://www.newsweek.pl/wydania/archiwum'
|
||||
self.find_last_issue(archive_url)
|
||||
soup = self.index_to_soup('http://www.newsweek.pl/wydania/' + self.EDITION)
|
||||
self.DATE = self.tag_to_string(soup.find('span', attrs={'class' : 'data'}))
|
||||
main_section = soup.find(id='mainSection')
|
||||
img = main_section.find(lambda tag: tag.name == 'img' and tag.has_key('alt') and tag.has_key('title'))
|
||||
self.cover_url = img['src']
|
||||
feeds = []
|
||||
articles = {}
|
||||
sections = []
|
||||
#
|
||||
# Looks for the last issue which we want to download. Then goes on each
|
||||
# section and article and stores them (assigning to sections)
|
||||
#
|
||||
def parse_index(self):
|
||||
archive_url = 'http://www.newsweek.pl/wydania/archiwum'
|
||||
self.find_last_issue(archive_url)
|
||||
soup = self.index_to_soup('http://www.newsweek.pl/wydania/' + self.EDITION)
|
||||
|
||||
news_list = main_section.find('ul', attrs={'class' : 'newsList'})
|
||||
section = 'Inne'
|
||||
matches = re.search(r'<div class="Issue-Entry">(.*)ARTICLE_BOTTOM', soup.prettify(), re.DOTALL)
|
||||
if matches is None:
|
||||
return
|
||||
|
||||
for li in news_list.findAll('li'):
|
||||
h3 = li.find('h3')
|
||||
if h3 is not None:
|
||||
section = capwords(self.tag_to_string(h3))
|
||||
continue
|
||||
else:
|
||||
h2 = li.find('h2')
|
||||
if h2 is not None:
|
||||
article = self.create_article(h2)
|
||||
if article is None :
|
||||
continue
|
||||
main_section = BeautifulSoup(matches.group(0))
|
||||
|
||||
if articles.has_key(section):
|
||||
articles[section].append(article)
|
||||
else:
|
||||
articles[section] = [article]
|
||||
sections.append(section)
|
||||
# date
|
||||
matches = re.search(r'(\d{2}-\d{2}-\d{4})', self.tag_to_string(main_section.find('h2')))
|
||||
if matches:
|
||||
self.DATE = matches.group(0)
|
||||
|
||||
# cover
|
||||
img = main_section.find(lambda tag: tag.name == 'img' and tag.has_key('alt') and tag.has_key('title'))
|
||||
self.cover_url = img['src']
|
||||
feeds = []
|
||||
articles = {}
|
||||
sections = []
|
||||
|
||||
# sections
|
||||
for sectionUl in main_section.findAll('ul', attrs={'class' : 'whatsin'}):
|
||||
|
||||
# section header
|
||||
header = sectionUl.find('li', attrs={'class' : 'header'})
|
||||
if header is None:
|
||||
continue
|
||||
|
||||
section = capwords(self.tag_to_string(header))
|
||||
|
||||
# articles in section
|
||||
articleUl = sectionUl.find('ul')
|
||||
if articleUl is None:
|
||||
continue
|
||||
|
||||
for articleLi in articleUl.findAll('li'):
|
||||
# check if article is closed which should be skipped
|
||||
closed = articleLi.find('span', attrs={'class' : 'closeart'})
|
||||
if closed is not None:
|
||||
continue
|
||||
|
||||
article = self.create_article(articleLi)
|
||||
if article is None :
|
||||
continue
|
||||
|
||||
if articles.has_key(section):
|
||||
articles[section].append(article)
|
||||
else:
|
||||
articles[section] = [article]
|
||||
sections.append(section)
|
||||
|
||||
for section in sections:
|
||||
# print("%s -> %d" % (section, len(articles[section])))
|
||||
#
|
||||
# for article in articles[section]:
|
||||
# print(" - %s" % article)
|
||||
|
||||
feeds.append((section, articles[section]))
|
||||
|
||||
return feeds
|
||||
|
||||
|
||||
for section in sections:
|
||||
feeds.append((section, articles[section]))
|
||||
return feeds
|
||||
#
|
||||
# Creates each article metadata (skips locked ones). The content will
|
||||
# be extracted later by other method (get_obfuscated_article).
|
||||
#
|
||||
def create_article(self, articleLi):
|
||||
article = {}
|
||||
|
||||
a = articleLi.find('a')
|
||||
if a is None:
|
||||
return None
|
||||
|
||||
#
|
||||
# Creates each article metadata (skips locked ones). The content will
|
||||
# be extracted later by other method (get_obfuscated_article).
|
||||
#
|
||||
def create_article(self, h2):
|
||||
article = {}
|
||||
a = h2.find('a')
|
||||
if a is None:
|
||||
return None
|
||||
article['title'] = self.tag_to_string(a)
|
||||
article['url'] = a['href']
|
||||
article['date'] = self.DATE
|
||||
article['description'] = ''
|
||||
|
||||
article['title'] = self.tag_to_string(a)
|
||||
article['url'] = a['href']
|
||||
article['date'] = self.DATE
|
||||
desc = h2.findNext('p')
|
||||
|
||||
if desc is not None:
|
||||
article['description'] = self.tag_to_string(desc)
|
||||
else:
|
||||
article['description'] = ''
|
||||
return article
|
||||
return article
|
||||
|
@ -1,21 +1,98 @@
|
||||
from calibre.web.feeds.news import BasicNewsRecipe
|
||||
import re
|
||||
|
||||
class HindustanTimes(BasicNewsRecipe):
|
||||
class TelevisionWithoutPity(BasicNewsRecipe):
|
||||
title = u'Television Without Pity'
|
||||
language = 'en'
|
||||
__author__ = 'Krittika Goyal'
|
||||
oldest_article = 1 #days
|
||||
__author__ = 'Snarkastica'
|
||||
SHOW = 'http://www.televisionwithoutpity.com/show/SHOW-NAME-HERE/recaps/' # Used for pulling down an entire show, not just the RSS feed
|
||||
oldest_article = 7 # days
|
||||
max_articles_per_feed = 25
|
||||
# reverse_article_order=True # Useful for entire show, to display in episode order
|
||||
#encoding = 'cp1252'
|
||||
use_embedded_content = False
|
||||
|
||||
preprocess_regexps = [(re.compile(r'<span class="headline_recap_title .*?>', re.DOTALL|re.IGNORECASE), lambda match: '<span class="headline_recap_title">')]
|
||||
keep_only_tags = [dict(name='span', attrs={'class':'headline_recap_title'}), dict(
|
||||
name='p', attrs={'class':'byline'}), dict(name='div', attrs={'class':'body_recap'}), dict(name='h1')]
|
||||
no_stylesheets = True
|
||||
auto_cleanup = True
|
||||
#auto_cleanup_keep = '//div[@class="float_right"]'
|
||||
|
||||
|
||||
# Comment this out and configure process_index() to retrieve a single show
|
||||
feeds = [
|
||||
('News',
|
||||
('Ltest Recaps',
|
||||
'http://www.televisionwithoutpity.com/rss.xml'),
|
||||
]
|
||||
|
||||
'''
|
||||
This method can be used to grab all recaps for a single show
|
||||
Set the SHOW constant at the beginning of this file to the URL for a show's recap page
|
||||
(the page listing all recaps, usually of the form:
|
||||
http://www.televisionwithoutpity.com/show/SHOW-NAME/recaps/"
|
||||
Where SHOW-NAME is the hyphenated name of the show.
|
||||
|
||||
To use:
|
||||
1. Comment out feeds = [...] earlier in this file
|
||||
2. Set the SHOW constant to the show's recap page
|
||||
3. Uncomment the following function
|
||||
'''
|
||||
|
||||
'''
|
||||
def parse_index(self):
|
||||
soup = self.index_to_soup(self.SHOW)
|
||||
feeds = []
|
||||
articles = []
|
||||
showTitle = soup.find('h1').string
|
||||
recaps = soup.find('table')
|
||||
for ep in recaps.findAll('tr'):
|
||||
epData = ep.findAll('td')
|
||||
epNum = epData[0].find(text=True).strip()
|
||||
if not epNum == "Ep.":
|
||||
epT = self.tag_to_string(epData[1].find('em')).strip()
|
||||
epST = " (or " + self.tag_to_string(epData[1].find('h3')).strip() + ")"
|
||||
epTitle = epNum + ": " + epT + epST
|
||||
epData[1].find('em').extract()
|
||||
epURL = epData[1].find('a', href=True)
|
||||
epURL = epURL['href']
|
||||
epSum = self.tag_to_string(epData[1].find('p')).strip()
|
||||
epDate = epData[2].find(text=True).strip()
|
||||
epAuthor = self.tag_to_string(epData[4].find('p')).strip()
|
||||
articles.append({'title':epTitle, 'url':epURL, 'description':epSum, 'date':epDate, 'author':epAuthor})
|
||||
feeds.append((showTitle, articles))
|
||||
#self.abort_recipe_processing("test")
|
||||
return feeds
|
||||
'''
|
||||
|
||||
# This will add subsequent pages of multipage recaps to a single article page
|
||||
def append_page(self, soup, appendtag, position):
|
||||
if (soup.find('p',attrs={'class':'pages'})): # If false, will still grab single-page recaplets
|
||||
pager = soup.find('p',attrs={'class':'pages'}).find(text='Next')
|
||||
if pager:
|
||||
nexturl = pager.parent['href']
|
||||
soup2 = self.index_to_soup(nexturl)
|
||||
texttag = soup2.find('div', attrs={'class':'body_recap'})
|
||||
for it in texttag.findAll(style=True):
|
||||
del it['style']
|
||||
newpos = len(texttag.contents)
|
||||
self.append_page(soup2,texttag,newpos)
|
||||
texttag.extract()
|
||||
appendtag.insert(position,texttag)
|
||||
|
||||
def preprocess_html(self, soup):
|
||||
self.append_page(soup, soup.body, 3)
|
||||
return soup
|
||||
|
||||
# Remove the multi page links (we had to keep these in for append_page(), but they can go away now
|
||||
# Could have used CSS to hide, but some readers ignore CSS.
|
||||
def postprocess_html(self, soup, first_fetch):
|
||||
paginator = soup.findAll('p', attrs={'class':'pages'})
|
||||
if paginator:
|
||||
for p in paginator:
|
||||
p.extract()
|
||||
|
||||
# TODO: Fix this so it converts the headline class into a heading 1
|
||||
#titleTag = Tag(soup, "h1")
|
||||
#repTag = soup.find('span', attrs={'class':'headline_recap_title'})
|
||||
#titleTag.insert(0, repTag.contents[0])
|
||||
# repTag.extract()
|
||||
#soup.body.insert(1, titleTag)
|
||||
return soup
|
||||
|
@ -1,9 +1,8 @@
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2012, mkydgr'
|
||||
__copyright__ = '2010-2013, Darko Miletic <darko.miletic at gmail.com>'
|
||||
'''
|
||||
www.wired.com
|
||||
based on the (broken) built-in recipe by Darko Miletic <darko.miletic at gmail.com>
|
||||
'''
|
||||
|
||||
import re
|
||||
@ -12,12 +11,11 @@ from calibre.web.feeds.news import BasicNewsRecipe
|
||||
|
||||
class Wired(BasicNewsRecipe):
|
||||
title = 'Wired Magazine'
|
||||
__author__ = 'mkydgr'
|
||||
description = 'Technology News'
|
||||
__author__ = 'Darko Miletic'
|
||||
description = 'Gaming news'
|
||||
publisher = 'Conde Nast Digital'
|
||||
category = ''
|
||||
oldest_article = 500
|
||||
delay = 1
|
||||
category = 'news, games, IT, gadgets'
|
||||
oldest_article = 32
|
||||
max_articles_per_feed = 100
|
||||
no_stylesheets = True
|
||||
encoding = 'utf-8'
|
||||
@ -25,10 +23,16 @@ class Wired(BasicNewsRecipe):
|
||||
masthead_url = 'http://www.wired.com/images/home/wired_logo.gif'
|
||||
language = 'en'
|
||||
publication_type = 'magazine'
|
||||
extra_css = ' body{font-family: Arial,Verdana,sans-serif} .entryDescription li {display: inline; list-style-type: none} '
|
||||
index = 'http://www.wired.com/magazine'
|
||||
departments = ['features','start','test','play','found', 'reviews']
|
||||
|
||||
extra_css = """
|
||||
h1, .entry-header{font-family: brandon-grotesque,anchor-web,Helvetica,Arial,sans-serif}
|
||||
.entry-header{display: block;}
|
||||
.entry-header ul{ list-style-type:disc;}
|
||||
.author, .entryDate, .entryTime, .entryEdit, .entryCategories{display: inline}
|
||||
.entry-header li{text-transform: uppercase;}
|
||||
div#container{font-family: 'Exchange SSm 4r', Georgia, serif}
|
||||
"""
|
||||
index = 'http://www.wired.com/magazine/'
|
||||
|
||||
preprocess_regexps = [(re.compile(r'<meta name="Title".*<title>', re.DOTALL|re.IGNORECASE),lambda match: '<title>')]
|
||||
conversion_options = {
|
||||
'comment' : description
|
||||
@ -38,56 +42,37 @@ class Wired(BasicNewsRecipe):
|
||||
}
|
||||
|
||||
keep_only_tags = [dict(name='div', attrs={'class':'post'})]
|
||||
remove_tags_after = dict(name='div', attrs={'class':'tweetmeme_button'})
|
||||
remove_tags_after = dict(name='div', attrs={'id':'container'})
|
||||
remove_tags = [
|
||||
dict(name=['object','embed','iframe','link'])
|
||||
,dict(name='div', attrs={'class':['podcast_storyboard','tweetmeme_button']})
|
||||
dict(name=['object','embed','iframe','link','meta','base'])
|
||||
,dict(name='div', attrs={'class':['social-top','podcast_storyboard','tweetmeme_button']})
|
||||
,dict(attrs={'id':'ff_bottom_nav'})
|
||||
,dict(name='a',attrs={'href':'http://www.wired.com/app'})
|
||||
,dict(name='div', attrs={'id':'mag-bug'})
|
||||
]
|
||||
remove_attributes = ['height','width']
|
||||
remove_attributes = ['height','width','lang','border','clear']
|
||||
|
||||
|
||||
def parse_index(self):
|
||||
totalfeeds = []
|
||||
|
||||
soup = self.index_to_soup(self.index)
|
||||
|
||||
#department feeds
|
||||
depts = soup.find('div',attrs={'id':'department-posts'})
|
||||
|
||||
if depts:
|
||||
for ditem in self.departments:
|
||||
darticles = []
|
||||
department = depts.find('h3',attrs={'id':'department-'+ditem})
|
||||
if department:
|
||||
#print '\n###### Found department %s ########'%(ditem)
|
||||
|
||||
el = department.next
|
||||
while el and (el.__class__.__name__ == 'NavigableString' or el.name != 'h3'):
|
||||
if el.__class__.__name__ != 'NavigableString':
|
||||
#print '\t ... element',el.name
|
||||
if el.name == 'ul':
|
||||
for artitem in el.findAll('li'):
|
||||
#print '\t\t ... article',repr(artitem)
|
||||
feed_link = artitem.find('a')
|
||||
#print '\t\t\t ... link',repr(feed_link)
|
||||
if feed_link and feed_link.has_key('href'):
|
||||
url = self.makeurl(feed_link['href'])
|
||||
title = self.tag_to_string(feed_link)
|
||||
date = strftime(self.timefmt)
|
||||
#print '\t\t ... found "%s" %s'%(title,url)
|
||||
darticles.append({
|
||||
'title' :title
|
||||
,'date' :date
|
||||
,'url' :url
|
||||
,'description':''
|
||||
})
|
||||
el = None
|
||||
else:
|
||||
el = el.next
|
||||
|
||||
totalfeeds.append((ditem.capitalize(), darticles))
|
||||
majorf = soup.find('div',attrs={'class':'entry'})
|
||||
if majorf:
|
||||
articles = []
|
||||
checker = []
|
||||
for a in majorf.findAll('a', href=True):
|
||||
if a['href'].startswith('http://www.wired.com/') and a['href'].endswith('/'):
|
||||
title = self.tag_to_string(a)
|
||||
url = a['href']
|
||||
if title.lower() != 'read more' and url not in checker:
|
||||
checker.append(url)
|
||||
articles.append({
|
||||
'title' :title
|
||||
,'date' :strftime(self.timefmt)
|
||||
,'url' :a['href']
|
||||
,'description':''
|
||||
})
|
||||
totalfeeds.append(('Articles', articles))
|
||||
return totalfeeds
|
||||
|
||||
def get_cover_url(self):
|
||||
@ -95,7 +80,7 @@ class Wired(BasicNewsRecipe):
|
||||
soup = self.index_to_soup(self.index)
|
||||
cover_item = soup.find('div',attrs={'class':'spread-image'})
|
||||
if cover_item:
|
||||
cover_url = self.makeurl(cover_item.a.img['src'])
|
||||
cover_url = 'http://www.wired.com' + cover_item.a.img['src']
|
||||
return cover_url
|
||||
|
||||
def print_version(self, url):
|
||||
@ -104,10 +89,19 @@ class Wired(BasicNewsRecipe):
|
||||
def preprocess_html(self, soup):
|
||||
for item in soup.findAll(style=True):
|
||||
del item['style']
|
||||
for item in soup.findAll('a'):
|
||||
if item.string is not None:
|
||||
tstr = item.string
|
||||
item.replaceWith(tstr)
|
||||
else:
|
||||
item.name='span'
|
||||
for atrs in ['href','target','alt','title','name','id']:
|
||||
if item.has_key(atrs):
|
||||
del item[atrs]
|
||||
for item in soup.findAll('img'):
|
||||
if not item.has_key('alt'):
|
||||
item['alt'] = 'image'
|
||||
if item.has_key('data-lazy-src'):
|
||||
item['src'] = item['data-lazy-src']
|
||||
del item['data-lazy-src']
|
||||
return soup
|
||||
|
||||
def makeurl(self, addr):
|
||||
if addr[:4] != 'http' : addr='http://www.wired.com' + addr
|
||||
while addr[-2:] == '//' : addr=addr[:-1]
|
||||
return addr
|
||||
|
||||
|
@ -8,17 +8,10 @@ import copy
|
||||
|
||||
# http://online.wsj.com/page/us_in_todays_paper.html
|
||||
|
||||
def filter_classes(x):
|
||||
if not x:
|
||||
return False
|
||||
bad_classes = {'articleInsetPoll', 'trendingNow', 'sTools', 'printSummary', 'mostPopular', 'relatedCollection'}
|
||||
classes = frozenset(x.split())
|
||||
return len(bad_classes.intersection(classes)) > 0
|
||||
|
||||
class WallStreetJournal(BasicNewsRecipe):
|
||||
|
||||
title = 'The Wall Street Journal'
|
||||
__author__ = 'Kovid Goyal, Sujata Raman, and Joshua Oster-Morris'
|
||||
__author__ = 'Kovid Goyal and Joshua Oster-Morris'
|
||||
description = 'News and current affairs'
|
||||
needs_subscription = True
|
||||
language = 'en'
|
||||
@ -26,36 +19,18 @@ class WallStreetJournal(BasicNewsRecipe):
|
||||
max_articles_per_feed = 1000
|
||||
timefmt = ' [%a, %b %d, %Y]'
|
||||
no_stylesheets = True
|
||||
ignore_duplicate_articles = {'url'}
|
||||
|
||||
extra_css = '''h1{color:#093D72 ; font-size:large ; font-family:Georgia,"Century Schoolbook","Times New Roman",Times,serif; }
|
||||
h2{color:#474537; font-family:Georgia,"Century Schoolbook","Times New Roman",Times,serif; font-size:small; font-style:italic;}
|
||||
.subhead{color:gray; font-family:Georgia,"Century Schoolbook","Times New Roman",Times,serif; font-size:small; font-style:italic;}
|
||||
.insettipUnit {color:#666666; font-family:Arial,Sans-serif;font-size:xx-small }
|
||||
.targetCaption{ font-size:x-small; color:#333333; font-family:Arial,Helvetica,sans-serif}
|
||||
.article{font-family :Arial,Helvetica,sans-serif; font-size:x-small}
|
||||
.tagline {color:#333333; font-size:xx-small}
|
||||
.dateStamp {color:#666666; font-family:Arial,Helvetica,sans-serif}
|
||||
h3{color:blue ;font-family:Arial,Helvetica,sans-serif; font-size:xx-small}
|
||||
.byline{color:blue;font-family:Arial,Helvetica,sans-serif; font-size:xx-small}
|
||||
h6{color:#333333; font-family:Georgia,"Century Schoolbook","Times New Roman",Times,serif; font-size:small;font-style:italic; }
|
||||
.paperLocation{color:#666666; font-size:xx-small}'''
|
||||
|
||||
remove_tags_before = dict(name='h1')
|
||||
keep_only_tags = [
|
||||
dict(name='h1'), dict(name='h2', attrs={'class':['subhead', 'subHed deck']}),
|
||||
dict(name='span', itemprop='author', rel='author'),
|
||||
dict(name='article', id='articleBody'),
|
||||
dict(name='div', id='article_story_body'),
|
||||
]
|
||||
remove_tags = [
|
||||
dict(id=["articleTabs_tab_article",
|
||||
"articleTabs_tab_comments", 'msnLinkback', 'yahooLinkback',
|
||||
'articleTabs_panel_comments', 'footer', 'emailThisScrim', 'emailConfScrim', 'emailErrorScrim',
|
||||
"articleTabs_tab_interactive", "articleTabs_tab_video",
|
||||
"articleTabs_tab_map", "articleTabs_tab_slideshow",
|
||||
"articleTabs_tab_quotes", "articleTabs_tab_document",
|
||||
"printModeAd", "aFbLikeAuth", "videoModule",
|
||||
"mostRecommendations", "topDiscussions"]),
|
||||
{'class':['footer_columns','hidden', 'network','insetCol3wide','interactive','video','slideshow','map','insettip',
|
||||
'insetClose','more_in', "insetContent", 'articleTools_bottom', 'aTools', "tooltip", "adSummary", "nav-inline"]},
|
||||
dict(rel='shortcut icon'),
|
||||
{'class':filter_classes},
|
||||
]
|
||||
remove_tags_after = [dict(id="article_story_body"), {'class':"article story"},]
|
||||
dict(attrs={'class':['insetButton', 'insettipBox']}),
|
||||
dict(name='span', attrs={'data-country-code':True, 'data-ticker-code':True}),
|
||||
]
|
||||
|
||||
use_javascript_to_login = True
|
||||
|
||||
@ -72,15 +47,12 @@ class WallStreetJournal(BasicNewsRecipe):
|
||||
if picdiv is not None:
|
||||
self.add_toc_thumbnail(article,picdiv['src'])
|
||||
|
||||
def postprocess_html(self, soup, first):
|
||||
for tag in soup.findAll(name=['table', 'tr', 'td']):
|
||||
tag.name = 'div'
|
||||
|
||||
for tag in soup.findAll('div', dict(id=[
|
||||
"articleThumbnail_1", "articleThumbnail_2", "articleThumbnail_3",
|
||||
"articleThumbnail_4", "articleThumbnail_5", "articleThumbnail_6",
|
||||
"articleThumbnail_7"])):
|
||||
tag.extract()
|
||||
def preprocess_html(self, soup):
|
||||
# Remove thumbnail for zoomable images
|
||||
for div in soup.findAll('div', attrs={'class':lambda x: x and 'insetZoomTargetBox' in x.split()}):
|
||||
img = div.find('img')
|
||||
if img is not None:
|
||||
img.extract()
|
||||
|
||||
return soup
|
||||
|
||||
|
@ -19,35 +19,18 @@ class WallStreetJournal(BasicNewsRecipe):
|
||||
max_articles_per_feed = 1000
|
||||
timefmt = ' [%a, %b %d, %Y]'
|
||||
no_stylesheets = True
|
||||
ignore_duplicate_articles = {'url'}
|
||||
|
||||
extra_css = '''h1{color:#093D72 ; font-size:large ; font-family:Georgia,"Century Schoolbook","Times New Roman",Times,serif; }
|
||||
h2{color:#474537; font-family:Georgia,"Century Schoolbook","Times New Roman",Times,serif; font-size:small; font-style:italic;}
|
||||
.subhead{color:gray; font-family:Georgia,"Century Schoolbook","Times New Roman",Times,serif; font-size:small; font-style:italic;}
|
||||
.insettipUnit {color:#666666; font-family:Arial,Sans-serif;font-size:xx-small }
|
||||
.targetCaption{ font-size:x-small; color:#333333; font-family:Arial,Helvetica,sans-serif}
|
||||
.article{font-family :Arial,Helvetica,sans-serif; font-size:x-small}
|
||||
.tagline {color:#333333; font-size:xx-small}
|
||||
.dateStamp {color:#666666; font-family:Arial,Helvetica,sans-serif}
|
||||
h3{color:blue ;font-family:Arial,Helvetica,sans-serif; font-size:xx-small}
|
||||
.byline{color:blue;font-family:Arial,Helvetica,sans-serif; font-size:xx-small}
|
||||
h6{color:#333333; font-family:Georgia,"Century Schoolbook","Times New Roman",Times,serif; font-size:small;font-style:italic; }
|
||||
.paperLocation{color:#666666; font-size:xx-small}'''
|
||||
|
||||
remove_tags_before = dict(name='h1')
|
||||
keep_only_tags = [
|
||||
dict(name='h1'), dict(name='h2', attrs={'class':['subhead', 'subHed deck']}),
|
||||
dict(name='span', itemprop='author', rel='author'),
|
||||
dict(name='article', id='articleBody'),
|
||||
dict(name='div', id='article_story_body'),
|
||||
]
|
||||
remove_tags = [
|
||||
dict(id=["articleTabs_tab_article",
|
||||
"articleTabs_tab_comments",
|
||||
"articleTabs_tab_interactive","articleTabs_tab_video","articleTabs_tab_map","articleTabs_tab_slideshow",
|
||||
"articleTabs_tab_quotes"]),
|
||||
{'class':['footer_columns','network','insetCol3wide','interactive','video','slideshow','map','insettip','insetClose','more_in', "insetContent", 'articleTools_bottom', 'aTools', "tooltip", "adSummary", "nav-inline"]},
|
||||
dict(name='div', attrs={'data-flash-settings':True}),
|
||||
{'class':['insetContent embedType-interactive insetCol3wide','insetCol6wide','insettipUnit']},
|
||||
dict(rel='shortcut icon'),
|
||||
{'class':lambda x: x and 'sTools' in x},
|
||||
{'class':lambda x: x and 'printSummary' in x},
|
||||
{'class':lambda x: x and 'mostPopular' in x},
|
||||
]
|
||||
remove_tags_after = [dict(id="article_story_body"), {'class':"article story"},]
|
||||
dict(attrs={'class':['insetButton', 'insettipBox']}),
|
||||
dict(name='span', attrs={'data-country-code':True, 'data-ticker-code':True}),
|
||||
]
|
||||
|
||||
def populate_article_metadata(self, article, soup, first):
|
||||
if first and hasattr(self, 'add_toc_thumbnail'):
|
||||
@ -55,12 +38,12 @@ class WallStreetJournal(BasicNewsRecipe):
|
||||
if picdiv is not None:
|
||||
self.add_toc_thumbnail(article,picdiv['src'])
|
||||
|
||||
def postprocess_html(self, soup, first):
|
||||
for tag in soup.findAll(name=['table', 'tr', 'td']):
|
||||
tag.name = 'div'
|
||||
|
||||
for tag in soup.findAll('div', dict(id=["articleThumbnail_1", "articleThumbnail_2", "articleThumbnail_3", "articleThumbnail_4", "articleThumbnail_5", "articleThumbnail_6", "articleThumbnail_7"])):
|
||||
tag.extract()
|
||||
def preprocess_html(self, soup):
|
||||
# Remove thumbnail for zoomable images
|
||||
for div in soup.findAll('div', attrs={'class':lambda x: x and 'insetZoomTargetBox' in x.split()}):
|
||||
img = div.find('img')
|
||||
if img is not None:
|
||||
img.extract()
|
||||
|
||||
return soup
|
||||
|
||||
@ -69,7 +52,6 @@ class WallStreetJournal(BasicNewsRecipe):
|
||||
href = 'http://online.wsj.com' + href
|
||||
return href
|
||||
|
||||
|
||||
def wsj_get_index(self):
|
||||
return self.index_to_soup('http://online.wsj.com/itp')
|
||||
|
||||
@ -83,7 +65,7 @@ class WallStreetJournal(BasicNewsRecipe):
|
||||
except:
|
||||
articles = []
|
||||
if articles:
|
||||
feeds.append((title, articles))
|
||||
feeds.append((title, articles))
|
||||
return feeds
|
||||
|
||||
def parse_index(self):
|
||||
@ -99,16 +81,16 @@ class WallStreetJournal(BasicNewsRecipe):
|
||||
for a in div.findAll('a', href=lambda x: x and '/itp/' in x):
|
||||
pageone = a['href'].endswith('pageone')
|
||||
if pageone:
|
||||
title = 'Front Section'
|
||||
url = self.abs_wsj_url(a['href'])
|
||||
feeds = self.wsj_add_feed(feeds,title,url)
|
||||
title = 'What''s News'
|
||||
url = url.replace('pageone','whatsnews')
|
||||
feeds = self.wsj_add_feed(feeds,title,url)
|
||||
title = 'Front Section'
|
||||
url = self.abs_wsj_url(a['href'])
|
||||
feeds = self.wsj_add_feed(feeds,title,url)
|
||||
title = 'What''s News'
|
||||
url = url.replace('pageone','whatsnews')
|
||||
feeds = self.wsj_add_feed(feeds,title,url)
|
||||
else:
|
||||
title = self.tag_to_string(a)
|
||||
url = self.abs_wsj_url(a['href'])
|
||||
feeds = self.wsj_add_feed(feeds,title,url)
|
||||
title = self.tag_to_string(a)
|
||||
url = self.abs_wsj_url(a['href'])
|
||||
feeds = self.wsj_add_feed(feeds,title,url)
|
||||
return feeds
|
||||
|
||||
def wsj_find_wn_articles(self, url):
|
||||
@ -117,21 +99,21 @@ class WallStreetJournal(BasicNewsRecipe):
|
||||
|
||||
whats_news = soup.find('div', attrs={'class':lambda x: x and 'whatsNews-simple' in x})
|
||||
if whats_news is not None:
|
||||
for a in whats_news.findAll('a', href=lambda x: x and '/article/' in x):
|
||||
container = a.findParent(['p'])
|
||||
meta = a.find(attrs={'class':'meta_sectionName'})
|
||||
if meta is not None:
|
||||
meta.extract()
|
||||
title = self.tag_to_string(a).strip()
|
||||
url = a['href']
|
||||
desc = ''
|
||||
if container is not None:
|
||||
desc = self.tag_to_string(container)
|
||||
for a in whats_news.findAll('a', href=lambda x: x and '/article/' in x):
|
||||
container = a.findParent(['p'])
|
||||
meta = a.find(attrs={'class':'meta_sectionName'})
|
||||
if meta is not None:
|
||||
meta.extract()
|
||||
title = self.tag_to_string(a).strip()
|
||||
url = a['href']
|
||||
desc = ''
|
||||
if container is not None:
|
||||
desc = self.tag_to_string(container)
|
||||
|
||||
articles.append({'title':title, 'url':url,
|
||||
'description':desc, 'date':''})
|
||||
articles.append({'title':title, 'url':url,
|
||||
'description':desc, 'date':''})
|
||||
|
||||
self.log('\tFound WN article:', title)
|
||||
self.log('\tFound WN article:', title)
|
||||
|
||||
return articles
|
||||
|
||||
@ -140,18 +122,18 @@ class WallStreetJournal(BasicNewsRecipe):
|
||||
|
||||
whats_news = soup.find('div', attrs={'class':lambda x: x and 'whatsNews-simple' in x})
|
||||
if whats_news is not None:
|
||||
whats_news.extract()
|
||||
whats_news.extract()
|
||||
|
||||
articles = []
|
||||
|
||||
flavorarea = soup.find('div', attrs={'class':lambda x: x and 'ahed' in x})
|
||||
if flavorarea is not None:
|
||||
flavorstory = flavorarea.find('a', href=lambda x: x and x.startswith('/article'))
|
||||
if flavorstory is not None:
|
||||
flavorstory['class'] = 'mjLinkItem'
|
||||
metapage = soup.find('span', attrs={'class':lambda x: x and 'meta_sectionName' in x})
|
||||
if metapage is not None:
|
||||
flavorstory.append( copy.copy(metapage) ) #metapage should always be A1 because that should be first on the page
|
||||
flavorstory = flavorarea.find('a', href=lambda x: x and x.startswith('/article'))
|
||||
if flavorstory is not None:
|
||||
flavorstory['class'] = 'mjLinkItem'
|
||||
metapage = soup.find('span', attrs={'class':lambda x: x and 'meta_sectionName' in x})
|
||||
if metapage is not None:
|
||||
flavorstory.append(copy.copy(metapage)) # metapage should always be A1 because that should be first on the page
|
||||
|
||||
for a in soup.findAll('a', attrs={'class':'mjLinkItem'}, href=True):
|
||||
container = a.findParent(['li', 'div'])
|
||||
@ -176,5 +158,3 @@ class WallStreetJournal(BasicNewsRecipe):
|
||||
self.log('\tFound article:', title)
|
||||
|
||||
return articles
|
||||
|
||||
|
||||
|
BIN
resources/images/textures/dark_cloth.png
Normal file
BIN
resources/images/textures/dark_cloth.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 19 KiB |
BIN
resources/images/textures/dark_wood.png
Normal file
BIN
resources/images/textures/dark_wood.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 132 KiB |
BIN
resources/images/textures/grey_wash_wall.png
Normal file
BIN
resources/images/textures/grey_wash_wall.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 46 KiB |
BIN
resources/images/textures/light_wood.png
Normal file
BIN
resources/images/textures/light_wood.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 168 KiB |
BIN
resources/images/textures/subtle_wood.png
Normal file
BIN
resources/images/textures/subtle_wood.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 100 KiB |
@ -138,6 +138,8 @@ class UploadInstallers(Command): # {{{
|
||||
available = set(glob.glob('dist/*'))
|
||||
files = {x:installer_description(x) for x in
|
||||
all_possible.intersection(available)}
|
||||
for x in files:
|
||||
os.chmod(x, stat.S_IRUSR|stat.S_IWUSR|stat.S_IRGRP|stat.S_IROTH)
|
||||
sizes = {os.path.basename(x):os.path.getsize(x) for x in files}
|
||||
self.record_sizes(sizes)
|
||||
tdir = mkdtemp()
|
||||
|
@ -4,7 +4,7 @@ __license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
__appname__ = u'calibre'
|
||||
numeric_version = (1, 6, 0)
|
||||
numeric_version = (1, 7, 0)
|
||||
__version__ = u'.'.join(map(unicode, numeric_version))
|
||||
__author__ = u"Kovid Goyal <kovid@kovidgoyal.net>"
|
||||
|
||||
|
@ -1008,7 +1008,7 @@ class DB(object):
|
||||
callback(_('Restoring database from SQL') + '...')
|
||||
with closing(Connection(tmpdb)) as conn:
|
||||
shell = Shell(db=conn, encoding='utf-8')
|
||||
shell.process_command('.read ' + fname)
|
||||
shell.process_command('.read ' + fname.replace(os.sep, '/'))
|
||||
conn.execute('PRAGMA user_version=%d;'%uv)
|
||||
|
||||
self.close()
|
||||
@ -1406,6 +1406,8 @@ class DB(object):
|
||||
|
||||
source_ok = current_path and os.path.exists(spath)
|
||||
wam = WindowsAtomicFolderMove(spath) if iswindows and source_ok else None
|
||||
format_map = {}
|
||||
original_format_map = {}
|
||||
try:
|
||||
if not os.path.exists(tpath):
|
||||
os.makedirs(tpath)
|
||||
@ -1416,22 +1418,34 @@ class DB(object):
|
||||
windows_atomic_move=wam, use_hardlink=True)
|
||||
for fmt in formats:
|
||||
dest = os.path.join(tpath, fname+'.'+fmt.lower())
|
||||
self.copy_format_to(book_id, fmt, formats_field.format_fname(book_id, fmt), current_path,
|
||||
format_map[fmt] = dest
|
||||
ofmt_fname = formats_field.format_fname(book_id, fmt)
|
||||
original_format_map[fmt] = os.path.join(spath, ofmt_fname+'.'+fmt.lower())
|
||||
self.copy_format_to(book_id, fmt, ofmt_fname, current_path,
|
||||
dest, windows_atomic_move=wam, use_hardlink=True)
|
||||
# Update db to reflect new file locations
|
||||
for fmt in formats:
|
||||
formats_field.table.set_fname(book_id, fmt, fname, self)
|
||||
path_field.table.set_path(book_id, path, self)
|
||||
|
||||
# Delete not needed directories
|
||||
# Delete not needed files and directories
|
||||
if source_ok:
|
||||
if os.path.exists(spath) and not samefile(spath, tpath):
|
||||
if wam is not None:
|
||||
wam.delete_originals()
|
||||
self.rmtree(spath)
|
||||
parent = os.path.dirname(spath)
|
||||
if len(os.listdir(parent)) == 0:
|
||||
self.rmtree(parent)
|
||||
if os.path.exists(spath):
|
||||
if samefile(spath, tpath):
|
||||
# The format filenames may have changed while the folder
|
||||
# name remains the same
|
||||
for fmt, opath in original_format_map.iteritems():
|
||||
npath = format_map.get(fmt, None)
|
||||
if npath and os.path.abspath(npath.lower()) != os.path.abspath(opath.lower()) and samefile(opath, npath):
|
||||
# opath and npath are different hard links to the same file
|
||||
os.unlink(opath)
|
||||
else:
|
||||
if wam is not None:
|
||||
wam.delete_originals()
|
||||
self.rmtree(spath)
|
||||
parent = os.path.dirname(spath)
|
||||
if len(os.listdir(parent)) == 0:
|
||||
self.rmtree(parent)
|
||||
finally:
|
||||
if wam is not None:
|
||||
wam.close_handles()
|
||||
|
@ -18,7 +18,7 @@ from calibre.constants import iswindows, preferred_encoding
|
||||
from calibre.customize.ui import run_plugins_on_import, run_plugins_on_postimport
|
||||
from calibre.db import SPOOL_SIZE, _get_next_series_num_for_list
|
||||
from calibre.db.categories import get_categories
|
||||
from calibre.db.locking import create_locks, DowngradeLockError
|
||||
from calibre.db.locking import create_locks, DowngradeLockError, SafeReadLock
|
||||
from calibre.db.errors import NoSuchFormat
|
||||
from calibre.db.fields import create_field, IDENTITY, InvalidLinkTable
|
||||
from calibre.db.search import Search
|
||||
@ -57,11 +57,8 @@ def wrap_simple(lock, func):
|
||||
return func(*args, **kwargs)
|
||||
except DowngradeLockError:
|
||||
# We already have an exclusive lock, no need to acquire a shared
|
||||
# lock. This can happen when updating the search cache in the
|
||||
# presence of composite columns. Updating the search cache holds an
|
||||
# exclusive lock, but searching a composite column involves
|
||||
# reading field values via ProxyMetadata which tries to get a
|
||||
# shared lock.
|
||||
# lock. See the safe_read_lock properties' documentation for why
|
||||
# this is necessary.
|
||||
return func(*args, **kwargs)
|
||||
return call_func_with_lock
|
||||
|
||||
@ -118,6 +115,22 @@ class Cache(object):
|
||||
self._search_api = Search(self, 'saved_searches', self.field_metadata.get_search_terms())
|
||||
self.initialize_dynamic()
|
||||
|
||||
@property
|
||||
def safe_read_lock(self):
|
||||
''' A safe read lock is a lock that does nothing if the thread already
|
||||
has a write lock, otherwise it acquires a read lock. This is necessary
|
||||
to prevent DowngradeLockErrors, which can happen when updating the
|
||||
search cache in the presence of composite columns. Updating the search
|
||||
cache holds an exclusive lock, but searching a composite column
|
||||
involves reading field values via ProxyMetadata which tries to get a
|
||||
shared lock. There may be other scenarios that trigger this as well.
|
||||
|
||||
This property returns a new lock object on every access. This lock
|
||||
object is not recursive (for performance) and must only be used in a
|
||||
with statement as ``with cache.safe_read_lock:`` otherwise bad things
|
||||
will happen.'''
|
||||
return SafeReadLock(self.read_lock)
|
||||
|
||||
@write_api
|
||||
def initialize_dynamic(self):
|
||||
# Reconstruct the user categories, putting them into field_metadata
|
||||
@ -501,7 +514,7 @@ class Cache(object):
|
||||
x = self.format_metadata_cache[book_id].get(fmt, None)
|
||||
if x is not None:
|
||||
return x
|
||||
with self.read_lock:
|
||||
with self.safe_read_lock:
|
||||
try:
|
||||
name = self.fields['formats'].format_fname(book_id, fmt)
|
||||
path = self._field_for('path', book_id).replace('/', os.sep)
|
||||
@ -545,7 +558,7 @@ class Cache(object):
|
||||
cover_as_data is True then as mi.cover_data.
|
||||
'''
|
||||
|
||||
with self.read_lock:
|
||||
with self.safe_read_lock:
|
||||
mi = self._get_metadata(book_id, get_user_categories=get_user_categories)
|
||||
|
||||
if get_cover:
|
||||
@ -751,7 +764,7 @@ class Cache(object):
|
||||
ext = ('.'+fmt.lower()) if fmt else ''
|
||||
if as_path:
|
||||
if preserve_filename:
|
||||
with self.read_lock:
|
||||
with self.safe_read_lock:
|
||||
try:
|
||||
fname = self.fields['formats'].format_fname(book_id, fmt)
|
||||
except:
|
||||
@ -777,7 +790,7 @@ class Cache(object):
|
||||
return None
|
||||
ret = pt.name
|
||||
elif as_file:
|
||||
with self.read_lock:
|
||||
with self.safe_read_lock:
|
||||
try:
|
||||
fname = self.fields['formats'].format_fname(book_id, fmt)
|
||||
except:
|
||||
@ -878,7 +891,7 @@ class Cache(object):
|
||||
@api
|
||||
def get_categories(self, sort='name', book_ids=None, icon_map=None, already_fixed=None):
|
||||
try:
|
||||
with self.read_lock:
|
||||
with self.safe_read_lock:
|
||||
return get_categories(self, sort=sort, book_ids=book_ids, icon_map=icon_map)
|
||||
except InvalidLinkTable as err:
|
||||
bad_field = err.field_name
|
||||
@ -1397,6 +1410,10 @@ class Cache(object):
|
||||
except:
|
||||
path = None
|
||||
path_map[book_id] = path
|
||||
if iswindows:
|
||||
paths = (x.replace(os.sep, '/') for x in path_map.itervalues() if x)
|
||||
self.backend.windows_check_if_files_in_use(paths)
|
||||
|
||||
self.backend.remove_books(path_map, permanent=permanent)
|
||||
for field in self.fields.itervalues():
|
||||
try:
|
||||
|
@ -6,10 +6,11 @@ from __future__ import (unicode_literals, division, absolute_import,
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
|
||||
import os, tempfile, shutil, errno, time
|
||||
import os, tempfile, shutil, errno, time, atexit
|
||||
from threading import Thread
|
||||
from Queue import Queue
|
||||
|
||||
from calibre.ptempfile import remove_dir
|
||||
from calibre.utils.recycle_bin import delete_tree, delete_file
|
||||
|
||||
class DeleteService(Thread):
|
||||
@ -40,39 +41,64 @@ class DeleteService(Thread):
|
||||
base_path = os.path.dirname(library_path)
|
||||
base = os.path.basename(library_path)
|
||||
try:
|
||||
return tempfile.mkdtemp(prefix=base+' deleted ', dir=base_path)
|
||||
ans = tempfile.mkdtemp(prefix=base+' deleted ', dir=base_path)
|
||||
except OSError:
|
||||
return tempfile.mkdtemp(prefix=base+' deleted ')
|
||||
ans = tempfile.mkdtemp(prefix=base+' deleted ')
|
||||
atexit.register(remove_dir, ans)
|
||||
return ans
|
||||
|
||||
def remove_dir_if_empty(self, path):
|
||||
try:
|
||||
os.rmdir(path)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOTEMPTY or len(os.listdir(path)) > 0:
|
||||
# Some linux systems appear to raise an EPERM instead of an
|
||||
# ENOTEMPTY, see https://bugs.launchpad.net/bugs/1240797
|
||||
return
|
||||
raise
|
||||
|
||||
def delete_books(self, paths, library_path):
|
||||
tdir = self.create_staging(library_path)
|
||||
self.queue_paths(tdir, paths, delete_empty_parent=True)
|
||||
|
||||
def queue_paths(self, tdir, paths, delete_empty_parent=True):
|
||||
queued = False
|
||||
try:
|
||||
self._queue_paths(tdir, paths, delete_empty_parent=delete_empty_parent)
|
||||
except:
|
||||
if os.path.exists(tdir):
|
||||
shutil.rmtree(tdir, ignore_errors=True)
|
||||
raise
|
||||
|
||||
def _queue_paths(self, tdir, paths, delete_empty_parent=True):
|
||||
requests = []
|
||||
for path in paths:
|
||||
if os.path.exists(path):
|
||||
basename = os.path.basename(path)
|
||||
c = 0
|
||||
while True:
|
||||
dest = os.path.join(tdir, basename)
|
||||
if not os.path.exists(dest):
|
||||
break
|
||||
c += 1
|
||||
basename = '%d - %s' % (c, os.path.basename(path))
|
||||
try:
|
||||
shutil.move(path, tdir)
|
||||
shutil.move(path, dest)
|
||||
except EnvironmentError:
|
||||
if os.path.isdir(path):
|
||||
# shutil.move may have partially copied the directory,
|
||||
# so the subsequent call to move() will fail as the
|
||||
# destination directory already exists
|
||||
raise
|
||||
# Wait a little in case something has locked a file
|
||||
time.sleep(1)
|
||||
shutil.move(path, tdir)
|
||||
shutil.move(path, dest)
|
||||
if delete_empty_parent:
|
||||
parent = os.path.dirname(path)
|
||||
try:
|
||||
os.rmdir(parent)
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOTEMPTY:
|
||||
raise
|
||||
self.requests.put(os.path.join(tdir, os.path.basename(path)))
|
||||
queued = True
|
||||
if not queued:
|
||||
try:
|
||||
os.rmdir(tdir)
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOTEMPTY:
|
||||
raise
|
||||
self.remove_dir_if_empty(os.path.dirname(path))
|
||||
requests.append(dest)
|
||||
if not requests:
|
||||
self.remove_dir_if_empty(tdir)
|
||||
else:
|
||||
self.requests.put(tdir)
|
||||
|
||||
def delete_files(self, paths, library_path):
|
||||
tdir = self.create_staging(library_path)
|
||||
@ -96,16 +122,17 @@ class DeleteService(Thread):
|
||||
'Blocks until all pending deletes have completed'
|
||||
self.requests.join()
|
||||
|
||||
def do_delete(self, x):
|
||||
if os.path.isdir(x):
|
||||
delete_tree(x)
|
||||
else:
|
||||
delete_file(x)
|
||||
try:
|
||||
os.rmdir(os.path.dirname(x))
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOTEMPTY:
|
||||
raise
|
||||
def do_delete(self, tdir):
|
||||
if os.path.exists(tdir):
|
||||
try:
|
||||
for x in os.listdir(tdir):
|
||||
x = os.path.join(tdir, x)
|
||||
if os.path.isdir(x):
|
||||
delete_tree(x)
|
||||
else:
|
||||
delete_file(x)
|
||||
finally:
|
||||
shutil.rmtree(tdir)
|
||||
|
||||
__ds = None
|
||||
def delete_service():
|
||||
|
@ -132,7 +132,7 @@ def adata_getter(field):
|
||||
author_ids, adata = cache['adata']
|
||||
except KeyError:
|
||||
db = dbref()
|
||||
with db.read_lock:
|
||||
with db.safe_read_lock:
|
||||
author_ids = db._field_ids_for('authors', book_id)
|
||||
adata = db._author_data(author_ids)
|
||||
cache['adata'] = (author_ids, adata)
|
||||
|
@ -154,7 +154,7 @@ class LibraryDatabase(object):
|
||||
return tuple(self.new_api.all_book_ids())
|
||||
|
||||
def is_empty(self):
|
||||
with self.new_api.read_lock:
|
||||
with self.new_api.safe_read_lock:
|
||||
return not bool(self.new_api.fields['title'].table.book_col_map)
|
||||
|
||||
def get_usage_count_by_id(self, field):
|
||||
@ -363,7 +363,7 @@ class LibraryDatabase(object):
|
||||
|
||||
def authors_with_sort_strings(self, index, index_is_id=False):
|
||||
book_id = index if index_is_id else self.id(index)
|
||||
with self.new_api.read_lock:
|
||||
with self.new_api.safe_read_lock:
|
||||
authors = self.new_api._field_ids_for('authors', book_id)
|
||||
adata = self.new_api._author_data(authors)
|
||||
return [(aid, adata[aid]['name'], adata[aid]['sort'], adata[aid]['link']) for aid in authors]
|
||||
@ -379,7 +379,7 @@ class LibraryDatabase(object):
|
||||
self.notify('metadata', list(changed_books))
|
||||
|
||||
def book_on_device(self, book_id):
|
||||
with self.new_api.read_lock:
|
||||
with self.new_api.safe_read_lock:
|
||||
return self.new_api.fields['ondevice'].book_on_device(book_id)
|
||||
|
||||
def book_on_device_string(self, book_id):
|
||||
@ -393,7 +393,7 @@ class LibraryDatabase(object):
|
||||
return self.new_api.fields['ondevice'].book_on_device_func
|
||||
|
||||
def books_in_series(self, series_id):
|
||||
with self.new_api.read_lock:
|
||||
with self.new_api.safe_read_lock:
|
||||
book_ids = self.new_api._books_for_field('series', series_id)
|
||||
ff = self.new_api._field_for
|
||||
return sorted(book_ids, key=lambda x:ff('series_index', x))
|
||||
|
@ -13,6 +13,8 @@ from calibre.utils.config_base import tweaks
|
||||
|
||||
class LockingError(RuntimeError):
|
||||
|
||||
is_locking_error = True
|
||||
|
||||
def __init__(self, msg, extra=None):
|
||||
RuntimeError.__init__(self, msg)
|
||||
self.locking_debug_msg = extra
|
||||
@ -211,16 +213,15 @@ class RWLockWrapper(object):
|
||||
self._shlock = shlock
|
||||
self._is_shared = is_shared
|
||||
|
||||
def __enter__(self):
|
||||
def acquire(self):
|
||||
self._shlock.acquire(shared=self._is_shared)
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
self.release()
|
||||
|
||||
def release(self):
|
||||
def release(self, *args):
|
||||
self._shlock.release()
|
||||
|
||||
__enter__ = acquire
|
||||
__exit__ = release
|
||||
|
||||
def owns_lock(self):
|
||||
return self._shlock.owns_lock()
|
||||
|
||||
@ -229,11 +230,11 @@ class DebugRWLockWrapper(RWLockWrapper):
|
||||
def __init__(self, *args, **kwargs):
|
||||
RWLockWrapper.__init__(self, *args, **kwargs)
|
||||
|
||||
def __enter__(self):
|
||||
def acquire(self):
|
||||
print ('#' * 120, file=sys.stderr)
|
||||
print ('acquire called: thread id:', current_thread(), 'shared:', self._is_shared, file=sys.stderr)
|
||||
traceback.print_stack()
|
||||
RWLockWrapper.__enter__(self)
|
||||
RWLockWrapper.acquire(self)
|
||||
print ('acquire done: thread id:', current_thread(), file=sys.stderr)
|
||||
print ('_' * 120, file=sys.stderr)
|
||||
|
||||
@ -245,4 +246,28 @@ class DebugRWLockWrapper(RWLockWrapper):
|
||||
print ('release done: thread id:', current_thread(), 'is_shared:', self._shlock.is_shared, 'is_exclusive:', self._shlock.is_exclusive, file=sys.stderr)
|
||||
print ('_' * 120, file=sys.stderr)
|
||||
|
||||
__enter__ = acquire
|
||||
__exit__ = release
|
||||
|
||||
class SafeReadLock(object):
|
||||
|
||||
def __init__(self, read_lock):
|
||||
self.read_lock = read_lock
|
||||
self.acquired = False
|
||||
|
||||
def acquire(self):
|
||||
try:
|
||||
self.read_lock.acquire()
|
||||
except DowngradeLockError:
|
||||
pass
|
||||
else:
|
||||
self.acquired = True
|
||||
return self
|
||||
|
||||
def release(self, *args):
|
||||
if self.acquired:
|
||||
self.read_lock.release()
|
||||
self.acquired = False
|
||||
|
||||
__enter__ = acquire
|
||||
__exit__ = release
|
||||
|
@ -76,9 +76,30 @@ class FilesystemTest(BaseTest):
|
||||
f = open(fpath, 'rb')
|
||||
with self.assertRaises(IOError):
|
||||
cache.set_field('title', {1:'Moved'})
|
||||
with self.assertRaises(IOError):
|
||||
cache.remove_books({1})
|
||||
f.close()
|
||||
self.assertNotEqual(cache.field_for('title', 1), 'Moved', 'Title was changed despite file lock')
|
||||
|
||||
# Test on folder with hardlinks
|
||||
from calibre.ptempfile import TemporaryDirectory
|
||||
from calibre.utils.filenames import hardlink_file, WindowsAtomicFolderMove
|
||||
raw = b'xxx'
|
||||
with TemporaryDirectory() as tdir1, TemporaryDirectory() as tdir2:
|
||||
a, b = os.path.join(tdir1, 'a'), os.path.join(tdir1, 'b')
|
||||
a = os.path.join(tdir1, 'a')
|
||||
with open(a, 'wb') as f:
|
||||
f.write(raw)
|
||||
hardlink_file(a, b)
|
||||
wam = WindowsAtomicFolderMove(tdir1)
|
||||
wam.copy_path_to(a, os.path.join(tdir2, 'a'))
|
||||
wam.copy_path_to(b, os.path.join(tdir2, 'b'))
|
||||
wam.delete_originals()
|
||||
self.assertEqual([], os.listdir(tdir1))
|
||||
self.assertEqual({'a', 'b'}, set(os.listdir(tdir2)))
|
||||
self.assertEqual(raw, open(os.path.join(tdir2, 'a'), 'rb').read())
|
||||
self.assertEqual(raw, open(os.path.join(tdir2, 'b'), 'rb').read())
|
||||
|
||||
def test_library_move(self):
|
||||
' Test moving of library '
|
||||
from calibre.ptempfile import TemporaryDirectory
|
||||
@ -106,3 +127,15 @@ class FilesystemTest(BaseTest):
|
||||
self.assertLessEqual(len(cache.field_for('path', 1)), cache.backend.PATH_LIMIT * 2)
|
||||
fpath = cache.format_abspath(1, cache.formats(1)[0])
|
||||
self.assertLessEqual(len(fpath), len(cache.backend.library_path) + cache.backend.PATH_LIMIT * 4)
|
||||
|
||||
def test_fname_change(self):
|
||||
' Test the changing of the filename but not the folder name '
|
||||
cache = self.init_cache()
|
||||
title = 'a'*30 + 'bbb'
|
||||
cache.backend.PATH_LIMIT = 100
|
||||
cache.set_field('title', {3:title})
|
||||
cache.add_format(3, 'TXT', BytesIO(b'xxx'))
|
||||
cache.backend.PATH_LIMIT = 40
|
||||
cache.set_field('title', {3:title})
|
||||
fpath = cache.format_abspath(3, 'TXT')
|
||||
self.assertEqual(sorted([os.path.basename(fpath)]), sorted(os.listdir(os.path.dirname(fpath))))
|
||||
|
@ -205,7 +205,7 @@ class View(object):
|
||||
|
||||
def get_series_sort(self, idx, index_is_id=True, default_value=''):
|
||||
book_id = idx if index_is_id else self.index_to_id(idx)
|
||||
with self.cache.read_lock:
|
||||
with self.cache.safe_read_lock:
|
||||
lang_map = self.cache.fields['languages'].book_value_map
|
||||
lang = lang_map.get(book_id, None) or None
|
||||
if lang:
|
||||
@ -223,7 +223,7 @@ class View(object):
|
||||
|
||||
def get_author_data(self, idx, index_is_id=True, default_value=None):
|
||||
id_ = idx if index_is_id else self.index_to_id(idx)
|
||||
with self.cache.read_lock:
|
||||
with self.cache.safe_read_lock:
|
||||
ids = self.cache._field_ids_for('authors', id_)
|
||||
adata = self.cache._author_data(ids)
|
||||
ans = [':::'.join((adata[aid]['name'], adata[aid]['sort'], adata[aid]['link'])) for aid in ids if aid in adata]
|
||||
|
@ -59,8 +59,24 @@ class TOLINO(EB600):
|
||||
VENDOR_NAME = ['DEUTSCHE']
|
||||
WINDOWS_MAIN_MEM = WINDOWS_CARD_A_MEM = ['_TELEKOMTOLINO']
|
||||
|
||||
EXTRA_CUSTOMIZATION_MESSAGE = [
|
||||
_('Swap main and card A') +
|
||||
':::' +
|
||||
_('Check this box if the device\'s main memory is being seen as card a and the card '
|
||||
'is being seen as main memory. Some Tolino devices may need this option.'),
|
||||
]
|
||||
|
||||
EXTRA_CUSTOMIZATION_DEFAULT = [
|
||||
True,
|
||||
]
|
||||
|
||||
OPT_SWAP_MEMORY = 0
|
||||
|
||||
# There are apparently two versions of this device, one with swapped
|
||||
# drives and one without, see https://bugs.launchpad.net/bugs/1240504
|
||||
def linux_swap_drives(self, drives):
|
||||
if len(drives) < 2 or not drives[1] or not drives[2]:
|
||||
e = self.settings().extra_customization
|
||||
if len(drives) < 2 or not drives[0] or not drives[1] or not e[self.OPT_SWAP_MEMORY]:
|
||||
return drives
|
||||
drives = list(drives)
|
||||
t = drives[0]
|
||||
@ -69,7 +85,8 @@ class TOLINO(EB600):
|
||||
return tuple(drives)
|
||||
|
||||
def windows_sort_drives(self, drives):
|
||||
if len(drives) < 2:
|
||||
e = self.settings().extra_customization
|
||||
if len(drives) < 2 or not e[self.OPT_SWAP_MEMORY]:
|
||||
return drives
|
||||
main = drives.get('main', None)
|
||||
carda = drives.get('carda', None)
|
||||
|
@ -64,7 +64,7 @@ class HANLINV3(USBMS):
|
||||
return names
|
||||
|
||||
def linux_swap_drives(self, drives):
|
||||
if len(drives) < 2 or not drives[1] or not drives[2]: return drives
|
||||
if len(drives) < 2 or not drives[0] or not drives[1]: return drives
|
||||
drives = list(drives)
|
||||
t = drives[0]
|
||||
drives[0] = drives[1]
|
||||
|
@ -461,7 +461,7 @@ class WAYTEQ(USBMS):
|
||||
|
||||
def linux_swap_drives(self, drives):
|
||||
# See https://bugs.launchpad.net/bugs/1151901
|
||||
if len(drives) < 2 or not drives[1] or not drives[2]:
|
||||
if len(drives) < 2 or not drives[0] or not drives[1]:
|
||||
return drives
|
||||
drives = list(drives)
|
||||
t = drives[0]
|
||||
|
@ -120,7 +120,8 @@ class USER_DEFINED(USBMS):
|
||||
self.plugin_needs_delayed_initialization = False
|
||||
|
||||
def windows_sort_drives(self, drives):
|
||||
if len(drives) < 2: return drives
|
||||
if len(drives) < 2:
|
||||
return drives
|
||||
e = self.settings().extra_customization
|
||||
if not e[self.OPT_SWAP_MAIN_AND_CARD]:
|
||||
return drives
|
||||
@ -132,7 +133,8 @@ class USER_DEFINED(USBMS):
|
||||
return drives
|
||||
|
||||
def linux_swap_drives(self, drives):
|
||||
if len(drives) < 2 or not drives[1] or not drives[2]: return drives
|
||||
if len(drives) < 2 or not drives[0] or not drives[1]:
|
||||
return drives
|
||||
e = self.settings().extra_customization
|
||||
if not e[self.OPT_SWAP_MAIN_AND_CARD]:
|
||||
return drives
|
||||
@ -143,7 +145,8 @@ class USER_DEFINED(USBMS):
|
||||
return tuple(drives)
|
||||
|
||||
def osx_sort_names(self, names):
|
||||
if len(names) < 2: return names
|
||||
if len(names) < 2:
|
||||
return names
|
||||
e = self.settings().extra_customization
|
||||
if not e[self.OPT_SWAP_MAIN_AND_CARD]:
|
||||
return names
|
||||
|
@ -17,11 +17,13 @@ from lxml import html
|
||||
from urlparse import urldefrag
|
||||
|
||||
from calibre import prepare_string_for_xml
|
||||
from calibre.ebooks.oeb.base import XHTML, XHTML_NS, barename, namespace,\
|
||||
OEB_IMAGES, XLINK, rewrite_links, urlnormalize
|
||||
from calibre.ebooks.oeb.base import (
|
||||
XHTML, XHTML_NS, barename, namespace, OEB_IMAGES, XLINK, rewrite_links, urlnormalize)
|
||||
from calibre.ebooks.oeb.stylizer import Stylizer
|
||||
from calibre.utils.logging import default_log
|
||||
|
||||
SELF_CLOSING_TAGS = {'area', 'base', 'basefont', 'br', 'hr', 'input', 'img', 'link', 'meta'}
|
||||
|
||||
class OEB2HTML(object):
|
||||
'''
|
||||
Base class. All subclasses should implement dump_text to actually transform
|
||||
@ -49,7 +51,7 @@ class OEB2HTML(object):
|
||||
return self.mlize_spine(oeb_book)
|
||||
|
||||
def mlize_spine(self, oeb_book):
|
||||
output = [u'<html><body><head><meta http-equiv="Content-Type" content="text/html;charset=utf-8" /></head>']
|
||||
output = [u'<html><head><meta http-equiv="Content-Type" content="text/html;charset=utf-8" /></head><body>']
|
||||
for item in oeb_book.spine:
|
||||
self.log.debug('Converting %s to HTML...' % item.href)
|
||||
self.rewrite_ids(item.data, item)
|
||||
@ -183,7 +185,11 @@ class OEB2HTMLNoCSSizer(OEB2HTML):
|
||||
at += ' %s="%s"' % (k, prepare_string_for_xml(v, attribute=True))
|
||||
|
||||
# Write the tag.
|
||||
text.append('<%s%s>' % (tag, at))
|
||||
text.append('<%s%s' % (tag, at))
|
||||
if tag in SELF_CLOSING_TAGS:
|
||||
text.append(' />')
|
||||
else:
|
||||
text.append('>')
|
||||
|
||||
# Turn styles into tags.
|
||||
if style['font-weight'] in ('bold', 'bolder'):
|
||||
@ -210,7 +216,8 @@ class OEB2HTMLNoCSSizer(OEB2HTML):
|
||||
# Close all open tags.
|
||||
tags.reverse()
|
||||
for t in tags:
|
||||
text.append('</%s>' % t)
|
||||
if t not in SELF_CLOSING_TAGS:
|
||||
text.append('</%s>' % t)
|
||||
|
||||
# Add the text that is outside of the tag.
|
||||
if hasattr(elem, 'tail') and elem.tail:
|
||||
@ -267,10 +274,14 @@ class OEB2HTMLInlineCSSizer(OEB2HTML):
|
||||
# Turn style into strings for putting in the tag.
|
||||
style_t = ''
|
||||
if style_a:
|
||||
style_t = ' style="%s"' % style_a
|
||||
style_t = ' style="%s"' % style_a.replace('"', "'")
|
||||
|
||||
# Write the tag.
|
||||
text.append('<%s%s%s>' % (tag, at, style_t))
|
||||
text.append('<%s%s%s' % (tag, at, style_t))
|
||||
if tag in SELF_CLOSING_TAGS:
|
||||
text.append(' />')
|
||||
else:
|
||||
text.append('>')
|
||||
|
||||
# Process tags that contain text.
|
||||
if hasattr(elem, 'text') and elem.text:
|
||||
@ -283,7 +294,8 @@ class OEB2HTMLInlineCSSizer(OEB2HTML):
|
||||
# Close all open tags.
|
||||
tags.reverse()
|
||||
for t in tags:
|
||||
text.append('</%s>' % t)
|
||||
if t not in SELF_CLOSING_TAGS:
|
||||
text.append('</%s>' % t)
|
||||
|
||||
# Add the text that is outside of the tag.
|
||||
if hasattr(elem, 'tail') and elem.tail:
|
||||
@ -312,7 +324,8 @@ class OEB2HTMLClassCSSizer(OEB2HTML):
|
||||
css = u'<link href="style.css" rel="stylesheet" type="text/css" />'
|
||||
else:
|
||||
css = u'<style type="text/css">' + self.get_css(oeb_book) + u'</style>'
|
||||
output = [u'<html><head><meta http-equiv="Content-Type" content="text/html;charset=utf-8" />'] + [css] + [u'</head><body>'] + output + [u'</body></html>']
|
||||
output = [u'<html><head><meta http-equiv="Content-Type" content="text/html;charset=utf-8" />'] + \
|
||||
[css] + [u'</head><body>'] + output + [u'</body></html>']
|
||||
return ''.join(output)
|
||||
|
||||
def dump_text(self, elem, stylizer, page):
|
||||
@ -350,7 +363,11 @@ class OEB2HTMLClassCSSizer(OEB2HTML):
|
||||
at += ' %s="%s"' % (k, prepare_string_for_xml(v, attribute=True))
|
||||
|
||||
# Write the tag.
|
||||
text.append('<%s%s>' % (tag, at))
|
||||
text.append('<%s%s' % (tag, at))
|
||||
if tag in SELF_CLOSING_TAGS:
|
||||
text.append(' />')
|
||||
else:
|
||||
text.append('>')
|
||||
|
||||
# Process tags that contain text.
|
||||
if hasattr(elem, 'text') and elem.text:
|
||||
@ -363,7 +380,8 @@ class OEB2HTMLClassCSSizer(OEB2HTML):
|
||||
# Close all open tags.
|
||||
tags.reverse()
|
||||
for t in tags:
|
||||
text.append('</%s>' % t)
|
||||
if t not in SELF_CLOSING_TAGS:
|
||||
text.append('</%s>' % t)
|
||||
|
||||
# Add the text that is outside of the tag.
|
||||
if hasattr(elem, 'tail') and elem.tail:
|
||||
|
@ -745,6 +745,10 @@ class EpubContainer(Container):
|
||||
f.write(guess_type('a.epub'))
|
||||
zip_rebuilder(self.root, outpath)
|
||||
|
||||
@property
|
||||
def path_to_ebook(self):
|
||||
return self.pathtoepub
|
||||
|
||||
# }}}
|
||||
|
||||
# AZW3 {{{
|
||||
@ -839,6 +843,11 @@ class AZW3Container(Container):
|
||||
oeb = create_oebbook(default_log, opf, plumber.opts)
|
||||
set_cover(oeb)
|
||||
outp.convert(oeb, outpath, inp, plumber.opts, default_log)
|
||||
|
||||
@property
|
||||
def path_to_ebook(self):
|
||||
return self.pathtoepub
|
||||
|
||||
# }}}
|
||||
|
||||
def get_container(path, log=None, tdir=None):
|
||||
|
@ -410,7 +410,10 @@ class CSSFlattener(object):
|
||||
|
||||
if cssdict:
|
||||
for x in self.filter_css:
|
||||
cssdict.pop(x, None)
|
||||
popval = cssdict.pop(x, None)
|
||||
if (self.body_font_family and popval and x == 'font-family' and
|
||||
popval.partition(',')[0][1:-1] == self.body_font_family.partition(',')[0][1:-1]):
|
||||
cssdict[x] = popval
|
||||
|
||||
if cssdict:
|
||||
if self.lineh and self.fbase and tag != 'body':
|
||||
|
@ -118,6 +118,7 @@ defs['cover_grid_color'] = (80, 80, 80)
|
||||
defs['cover_grid_cache_size'] = 100
|
||||
defs['cover_grid_disk_cache_size'] = 2500
|
||||
defs['cover_grid_show_title'] = False
|
||||
defs['cover_grid_texture'] = None
|
||||
defs['show_vl_tabs'] = False
|
||||
del defs
|
||||
# }}}
|
||||
@ -753,11 +754,13 @@ class ResizableDialog(QDialog):
|
||||
def __init__(self, *args, **kwargs):
|
||||
QDialog.__init__(self, *args)
|
||||
self.setupUi(self)
|
||||
nh, nw = min_available_height()-25, available_width()-10
|
||||
desktop = QCoreApplication.instance().desktop()
|
||||
geom = desktop.availableGeometry(self)
|
||||
nh, nw = geom.height()-25, geom.width()-10
|
||||
if nh < 0:
|
||||
nh = 800
|
||||
nh = max(800, self.height())
|
||||
if nw < 0:
|
||||
nw = 600
|
||||
nw = max(600, self.height())
|
||||
nh = min(self.height(), nh)
|
||||
nw = min(self.width(), nw)
|
||||
self.resize(nw, nh)
|
||||
|
@ -5,6 +5,7 @@ __license__ = 'GPL v3'
|
||||
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
|
||||
import errno
|
||||
from functools import partial
|
||||
from collections import Counter
|
||||
|
||||
@ -360,7 +361,17 @@ class DeleteAction(InterfaceAction):
|
||||
return
|
||||
next_id = view.next_id
|
||||
if len(to_delete_ids) < 5:
|
||||
view.model().delete_books_by_id(to_delete_ids)
|
||||
try:
|
||||
view.model().delete_books_by_id(to_delete_ids)
|
||||
except IOError as err:
|
||||
if err.errno == errno.EACCES:
|
||||
import traceback
|
||||
fname = getattr(err, 'filename', 'file') or 'file'
|
||||
return error_dialog(self.gui, _('Permission denied'),
|
||||
_('Could not access %s. Is it being used by another'
|
||||
' program? Click "Show details" for more information.')%fname, det_msg=traceback.format_exc(),
|
||||
show=True)
|
||||
|
||||
self.library_ids_deleted2(to_delete_ids, next_id=next_id)
|
||||
else:
|
||||
self.__md = MultiDeleter(self.gui, to_delete_ids,
|
||||
|
@ -20,7 +20,7 @@ class MarkBooksAction(InterfaceAction):
|
||||
action_type = 'current'
|
||||
action_add_menu = True
|
||||
dont_add_to = frozenset([
|
||||
'toolbar-device', 'context-menu-device', 'menubar-device', 'context-menu-cover-browser'])
|
||||
'context-menu-device', 'menubar-device', 'context-menu-cover-browser'])
|
||||
action_menu_clone_qaction = _('Toggle mark for selected books')
|
||||
|
||||
accepts_drops = True
|
||||
|
@ -15,7 +15,11 @@
|
||||
</property>
|
||||
<layout class="QGridLayout" name="gridLayout">
|
||||
<item row="7" column="1">
|
||||
<widget class="QSpinBox" name="opt_toc_threshold"/>
|
||||
<widget class="QSpinBox" name="opt_toc_threshold">
|
||||
<property name="maximum">
|
||||
<number>10000</number>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="1" column="0" colspan="2">
|
||||
<widget class="QCheckBox" name="opt_use_auto_toc">
|
||||
|
@ -234,9 +234,12 @@ class Comments(Base):
|
||||
self.widgets = [self._box]
|
||||
|
||||
def setter(self, val):
|
||||
if val is None:
|
||||
if not val or not val.strip():
|
||||
val = ''
|
||||
self._tb.html = comments_to_html(val)
|
||||
else:
|
||||
val = comments_to_html(val)
|
||||
self._tb.html = val
|
||||
self._tb.wyswyg_dirtied()
|
||||
|
||||
def getter(self):
|
||||
val = unicode(self._tb.html).strip()
|
||||
|
@ -18,8 +18,8 @@ class CommentsDialog(QDialog, Ui_CommentsDialog):
|
||||
self.setWindowFlags(self.windowFlags()&(~Qt.WindowContextHelpButtonHint))
|
||||
self.setWindowIcon(icon)
|
||||
|
||||
if text is not None:
|
||||
self.textbox.html = comments_to_html(text)
|
||||
self.textbox.html = comments_to_html(text) if text else ''
|
||||
self.textbox.wyswyg_dirtied()
|
||||
# self.textbox.setTabChangesFocus(True)
|
||||
self.buttonBox.button(QDialogButtonBox.Ok).setText(_('&OK'))
|
||||
self.buttonBox.button(QDialogButtonBox.Cancel).setText(_('&Cancel'))
|
||||
|
@ -15,7 +15,7 @@ from calibre import browser, get_download_filename
|
||||
from calibre.ebooks import BOOK_EXTENSIONS
|
||||
from calibre.gui2 import Dispatcher
|
||||
from calibre.gui2.threaded_jobs import ThreadedJob
|
||||
from calibre.ptempfile import PersistentTemporaryFile
|
||||
from calibre.ptempfile import PersistentTemporaryDirectory
|
||||
from calibre.utils.filenames import ascii_filename
|
||||
|
||||
class EbookDownload(object):
|
||||
@ -56,7 +56,8 @@ class EbookDownload(object):
|
||||
cj.load(cookie_file)
|
||||
br.set_cookiejar(cj)
|
||||
with closing(br.open(url)) as r:
|
||||
tf = PersistentTemporaryFile(suffix=filename)
|
||||
temp_path = os.path.join(PersistentTemporaryDirectory(), filename)
|
||||
tf = open(temp_path, 'w+b')
|
||||
tf.write(r.read())
|
||||
dfilename = tf.name
|
||||
|
||||
|
@ -8,7 +8,6 @@ __copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
|
||||
import itertools, operator, os
|
||||
from types import MethodType
|
||||
from time import time
|
||||
from threading import Event, Thread
|
||||
from Queue import LifoQueue
|
||||
from functools import wraps, partial
|
||||
@ -19,10 +18,11 @@ from PyQt4.Qt import (
|
||||
QTimer, QPalette, QColor, QItemSelection, QPixmap, QMenu, QApplication,
|
||||
QMimeData, QUrl, QDrag, QPoint, QPainter, QRect, pyqtProperty, QEvent,
|
||||
QPropertyAnimation, QEasingCurve, pyqtSlot, QHelpEvent, QAbstractItemView,
|
||||
QStyleOptionViewItem, QToolTip, QByteArray, QBuffer)
|
||||
QStyleOptionViewItem, QToolTip, QByteArray, QBuffer, QBrush)
|
||||
|
||||
from calibre import fit_image, prints, prepare_string_for_xml
|
||||
from calibre.ebooks.metadata import fmt_sidx
|
||||
from calibre.utils import join_with_timeout
|
||||
from calibre.gui2 import gprefs, config
|
||||
from calibre.gui2.library.caches import CoverCache, ThumbnailCache
|
||||
from calibre.utils.config import prefs, tweaks
|
||||
@ -482,17 +482,6 @@ class CoverDelegate(QStyledItemDelegate):
|
||||
return True
|
||||
return False
|
||||
|
||||
def join_with_timeout(q, timeout=2):
|
||||
q.all_tasks_done.acquire()
|
||||
try:
|
||||
endtime = time() + timeout
|
||||
while q.unfinished_tasks:
|
||||
remaining = endtime - time()
|
||||
if remaining <= 0.0:
|
||||
raise RuntimeError('Waiting for queue to clear timed out')
|
||||
q.all_tasks_done.wait(remaining)
|
||||
finally:
|
||||
q.all_tasks_done.release()
|
||||
# }}}
|
||||
|
||||
# The View {{{
|
||||
@ -586,6 +575,12 @@ class GridView(QListView):
|
||||
pal = QPalette()
|
||||
col = QColor(r, g, b)
|
||||
pal.setColor(pal.Base, col)
|
||||
tex = gprefs['cover_grid_texture']
|
||||
if tex:
|
||||
from calibre.gui2.preferences.texture_chooser import texture_path
|
||||
path = texture_path(tex)
|
||||
if path:
|
||||
pal.setBrush(pal.Base, QBrush(QPixmap(path)))
|
||||
dark = (r + g + b)/3.0 < 128
|
||||
pal.setColor(pal.Text, QColor(Qt.white if dark else Qt.black))
|
||||
self.setPalette(pal)
|
||||
|
@ -928,7 +928,10 @@ class BooksModel(QAbstractTableModel): # {{{
|
||||
if role == Qt.DisplayRole: # orientation is vertical
|
||||
return QVariant(section+1)
|
||||
if role == Qt.DecorationRole:
|
||||
return self.marked_icon if self.db.data.get_marked(self.db.data.index_to_id(section)) else self.row_decoration
|
||||
try:
|
||||
return self.marked_icon if self.db.data.get_marked(self.db.data.index_to_id(section)) else self.row_decoration
|
||||
except (ValueError, IndexError):
|
||||
pass
|
||||
return NONE
|
||||
|
||||
def flags(self, index):
|
||||
|
@ -66,7 +66,7 @@ class HeaderView(QHeaderView): # {{{
|
||||
try:
|
||||
opt.icon = model.headerData(logical_index, opt.orientation, Qt.DecorationRole)
|
||||
opt.iconAlignment = Qt.AlignVCenter
|
||||
except TypeError:
|
||||
except (IndexError, ValueError, TypeError):
|
||||
pass
|
||||
if sm.isRowSelected(logical_index, QModelIndex()):
|
||||
opt.state |= QStyle.State_Sunken
|
||||
@ -693,7 +693,13 @@ class BooksView(QTableView): # {{{
|
||||
self.alternate_views.marked_changed(old_marked, current_marked)
|
||||
if bool(old_marked) == bool(current_marked):
|
||||
changed = old_marked | current_marked
|
||||
sections = tuple(map(self.model().db.data.id_to_index, changed))
|
||||
i = self.model().db.data.id_to_index
|
||||
def f(x):
|
||||
try:
|
||||
return i(x)
|
||||
except ValueError:
|
||||
pass
|
||||
sections = tuple(x for x in map(f, changed) if x is not None)
|
||||
self.row_header.headerDataChanged(Qt.Vertical, min(sections), max(sections))
|
||||
else:
|
||||
# Marked items have either appeared or all been removed
|
||||
|
@ -8,13 +8,15 @@ __docformat__ = 'restructuredtext en'
|
||||
from threading import Thread
|
||||
from functools import partial
|
||||
|
||||
from PyQt4.Qt import (QApplication, QFont, QFontInfo, QFontDialog, QColorDialog,
|
||||
QAbstractListModel, Qt, QIcon, QKeySequence, QPalette, QColor, pyqtSignal)
|
||||
from PyQt4.Qt import (
|
||||
QApplication, QFont, QFontInfo, QFontDialog, QColorDialog, QPainter,
|
||||
QAbstractListModel, Qt, QIcon, QKeySequence, QColor, pyqtSignal,
|
||||
QWidget, QSizePolicy, QBrush, QPixmap, QSize, QPushButton)
|
||||
|
||||
from calibre import human_readable
|
||||
from calibre.gui2.preferences import ConfigWidgetBase, test_widget, CommaSeparatedList
|
||||
from calibre.gui2.preferences.look_feel_ui import Ui_Form
|
||||
from calibre.gui2 import config, gprefs, qt_app, NONE, open_local_file
|
||||
from calibre.gui2 import config, gprefs, qt_app, NONE, open_local_file, question_dialog
|
||||
from calibre.utils.localization import (available_translations,
|
||||
get_language, get_lang)
|
||||
from calibre.utils.config import prefs
|
||||
@ -98,6 +100,33 @@ class DisplayedFields(QAbstractListModel): # {{{
|
||||
|
||||
# }}}
|
||||
|
||||
class Background(QWidget): # {{{
|
||||
|
||||
def __init__(self, parent):
|
||||
QWidget.__init__(self, parent)
|
||||
self.bcol = QColor(*gprefs['cover_grid_color'])
|
||||
self.btex = gprefs['cover_grid_texture']
|
||||
self.update_brush()
|
||||
self.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed)
|
||||
|
||||
def update_brush(self):
|
||||
self.brush = QBrush(self.bcol)
|
||||
if self.btex:
|
||||
from calibre.gui2.preferences.texture_chooser import texture_path
|
||||
path = texture_path(self.btex)
|
||||
if path:
|
||||
self.brush.setTexture(QPixmap(path))
|
||||
self.update()
|
||||
|
||||
def sizeHint(self):
|
||||
return QSize(200, 120)
|
||||
|
||||
def paintEvent(self, ev):
|
||||
painter = QPainter(self)
|
||||
painter.fillRect(ev.rect(), self.brush)
|
||||
painter.end()
|
||||
# }}}
|
||||
|
||||
class ConfigWidget(ConfigWidgetBase, Ui_Form):
|
||||
|
||||
size_calculated = pyqtSignal(object)
|
||||
@ -209,10 +238,24 @@ class ConfigWidget(ConfigWidgetBase, Ui_Form):
|
||||
keys = [unicode(x.toString(QKeySequence.NativeText)) for x in keys]
|
||||
self.fs_help_msg.setText(unicode(self.fs_help_msg.text())%(
|
||||
_(' or ').join(keys)))
|
||||
self.cover_grid_color_button.clicked.connect(self.change_cover_grid_color)
|
||||
self.cover_grid_default_color_button.clicked.connect(self.restore_cover_grid_color)
|
||||
self.size_calculated.connect(self.update_cg_cache_size, type=Qt.QueuedConnection)
|
||||
self.tabWidget.currentChanged.connect(self.tab_changed)
|
||||
|
||||
l = self.cg_background_box.layout()
|
||||
self.cg_bg_widget = w = Background(self)
|
||||
l.addWidget(w, 0, 0, 3, 1)
|
||||
self.cover_grid_color_button = b = QPushButton(_('Change &color'), self)
|
||||
b.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
|
||||
l.addWidget(b, 0, 1)
|
||||
b.clicked.connect(self.change_cover_grid_color)
|
||||
self.cover_grid_texture_button = b = QPushButton(_('Change &background image'), self)
|
||||
b.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
|
||||
l.addWidget(b, 1, 1)
|
||||
b.clicked.connect(self.change_cover_grid_texture)
|
||||
self.cover_grid_default_appearance_button = b = QPushButton(_('Restore &default appearance'), self)
|
||||
b.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
|
||||
l.addWidget(b, 2, 1)
|
||||
b.clicked.connect(self.restore_cover_grid_appearance)
|
||||
self.cover_grid_empty_cache.clicked.connect(self.empty_cache)
|
||||
self.cover_grid_open_cache.clicked.connect(self.open_cg_cache)
|
||||
self.cover_grid_smaller_cover.clicked.connect(partial(self.resize_cover, True))
|
||||
@ -270,6 +313,7 @@ class ConfigWidget(ConfigWidgetBase, Ui_Form):
|
||||
self.edit_rules.initialize(db.field_metadata, db.prefs, mi, 'column_color_rules')
|
||||
self.icon_rules.initialize(db.field_metadata, db.prefs, mi, 'column_icon_rules')
|
||||
self.set_cg_color(gprefs['cover_grid_color'])
|
||||
self.set_cg_texture(gprefs['cover_grid_texture'])
|
||||
self.update_aspect_ratio()
|
||||
|
||||
def open_cg_cache(self):
|
||||
@ -292,9 +336,12 @@ class ConfigWidget(ConfigWidgetBase, Ui_Form):
|
||||
self.size_calculated.emit(self.gui.grid_view.thumbnail_cache.current_size)
|
||||
|
||||
def set_cg_color(self, val):
|
||||
pal = QPalette()
|
||||
pal.setColor(QPalette.Window, QColor(*val))
|
||||
self.cover_grid_color_label.setPalette(pal)
|
||||
self.cg_bg_widget.bcol = QColor(*val)
|
||||
self.cg_bg_widget.update_brush()
|
||||
|
||||
def set_cg_texture(self, val):
|
||||
self.cg_bg_widget.btex = val
|
||||
self.cg_bg_widget.update_brush()
|
||||
|
||||
def empty_cache(self):
|
||||
self.gui.grid_view.thumbnail_cache.empty()
|
||||
@ -312,17 +359,32 @@ class ConfigWidget(ConfigWidgetBase, Ui_Form):
|
||||
self.icon_rules.clear()
|
||||
self.changed_signal.emit()
|
||||
self.set_cg_color(gprefs.defaults['cover_grid_color'])
|
||||
self.set_cg_texture(gprefs.defaults['cover_grid_texture'])
|
||||
|
||||
def change_cover_grid_color(self):
|
||||
col = QColorDialog.getColor(self.cover_grid_color_label.palette().color(QPalette.Window),
|
||||
col = QColorDialog.getColor(self.cg_bg_widget.bcol,
|
||||
self.gui, _('Choose background color for cover grid'))
|
||||
if col.isValid():
|
||||
col = tuple(col.getRgb())[:3]
|
||||
self.set_cg_color(col)
|
||||
self.changed_signal.emit()
|
||||
if self.cg_bg_widget.btex:
|
||||
if question_dialog(
|
||||
self, _('Remove background image?'),
|
||||
_('There is currently a background image set, so the color'
|
||||
' you have chosen will not be visible. Remove the background image?')):
|
||||
self.set_cg_texture(None)
|
||||
|
||||
def restore_cover_grid_color(self):
|
||||
def change_cover_grid_texture(self):
|
||||
from calibre.gui2.preferences.texture_chooser import TextureChooser
|
||||
d = TextureChooser(parent=self, initial=self.cg_bg_widget.btex)
|
||||
if d.exec_() == d.Accepted:
|
||||
self.set_cg_texture(d.texture)
|
||||
self.changed_signal.emit()
|
||||
|
||||
def restore_cover_grid_appearance(self):
|
||||
self.set_cg_color(gprefs.defaults['cover_grid_color'])
|
||||
self.set_cg_texture(gprefs.defaults['cover_grid_texture'])
|
||||
self.changed_signal.emit()
|
||||
|
||||
def build_font_obj(self):
|
||||
@ -383,7 +445,8 @@ class ConfigWidget(ConfigWidgetBase, Ui_Form):
|
||||
self.display_model.commit()
|
||||
self.edit_rules.commit(self.gui.current_db.prefs)
|
||||
self.icon_rules.commit(self.gui.current_db.prefs)
|
||||
gprefs['cover_grid_color'] = tuple(self.cover_grid_color_label.palette().color(QPalette.Window).getRgb())[:3]
|
||||
gprefs['cover_grid_color'] = tuple(self.cg_bg_widget.bcol.getRgb())[:3]
|
||||
gprefs['cover_grid_texture'] = self.cg_bg_widget.btex
|
||||
return rr
|
||||
|
||||
def refresh_gui(self, gui):
|
||||
|
@ -312,61 +312,12 @@
|
||||
</layout>
|
||||
</item>
|
||||
<item>
|
||||
<layout class="QHBoxLayout" name="horizontalLayout_2">
|
||||
<item>
|
||||
<widget class="QLabel" name="label_14">
|
||||
<property name="text">
|
||||
<string>Background color for the cover grid:</string>
|
||||
</property>
|
||||
<property name="buddy">
|
||||
<cstring>cover_grid_color_button</cstring>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QLabel" name="cover_grid_color_label">
|
||||
<property name="minimumSize">
|
||||
<size>
|
||||
<width>50</width>
|
||||
<height>50</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="autoFillBackground">
|
||||
<bool>true</bool>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string/>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QPushButton" name="cover_grid_color_button">
|
||||
<property name="text">
|
||||
<string>Change &color</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QPushButton" name="cover_grid_default_color_button">
|
||||
<property name="text">
|
||||
<string>Restore &default color</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<spacer name="horizontalSpacer_2">
|
||||
<property name="orientation">
|
||||
<enum>Qt::Horizontal</enum>
|
||||
</property>
|
||||
<property name="sizeHint" stdset="0">
|
||||
<size>
|
||||
<width>40</width>
|
||||
<height>20</height>
|
||||
</size>
|
||||
</property>
|
||||
</spacer>
|
||||
</item>
|
||||
</layout>
|
||||
<widget class="QGroupBox" name="cg_background_box">
|
||||
<property name="title">
|
||||
<string>Background for the cover grid</string>
|
||||
</property>
|
||||
<layout class="QGridLayout" name="gridLayout_5"/>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QGroupBox" name="groupBox_4">
|
||||
|
151
src/calibre/gui2/preferences/texture_chooser.py
Normal file
151
src/calibre/gui2/preferences/texture_chooser.py
Normal file
@ -0,0 +1,151 @@
|
||||
#!/usr/bin/env python
|
||||
# vim:fileencoding=utf-8
|
||||
from __future__ import (unicode_literals, division, absolute_import,
|
||||
print_function)
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
|
||||
import glob, os, string, shutil
|
||||
from functools import partial
|
||||
from PyQt4.Qt import (
|
||||
QDialog, QVBoxLayout, QListWidget, QListWidgetItem, Qt, QIcon,
|
||||
QApplication, QSize, QPixmap, QDialogButtonBox, QTimer, QLabel)
|
||||
|
||||
from calibre.constants import config_dir
|
||||
from calibre.gui2 import choose_files, error_dialog
|
||||
from calibre.utils.icu import sort_key
|
||||
|
||||
def texture_dir():
|
||||
ans = os.path.join(config_dir, 'textures')
|
||||
if not os.path.exists(ans):
|
||||
os.makedirs(ans)
|
||||
return ans
|
||||
|
||||
def texture_path(fname):
|
||||
if not fname:
|
||||
return
|
||||
if fname.startswith(':'):
|
||||
return I('textures/%s' % fname[1:])
|
||||
return os.path.join(texture_dir(), fname)
|
||||
|
||||
class TextureChooser(QDialog):
|
||||
|
||||
def __init__(self, parent=None, initial=None):
|
||||
QDialog.__init__(self, parent)
|
||||
self.setWindowTitle(_('Choose a texture'))
|
||||
|
||||
self.l = l = QVBoxLayout()
|
||||
self.setLayout(l)
|
||||
|
||||
self.tdir = texture_dir()
|
||||
|
||||
self.images = il = QListWidget(self)
|
||||
il.itemDoubleClicked.connect(self.accept, type=Qt.QueuedConnection)
|
||||
il.setIconSize(QSize(256, 256))
|
||||
il.setViewMode(il.IconMode)
|
||||
il.setFlow(il.LeftToRight)
|
||||
il.setSpacing(20)
|
||||
il.setSelectionMode(il.SingleSelection)
|
||||
il.itemSelectionChanged.connect(self.update_remove_state)
|
||||
l.addWidget(il)
|
||||
|
||||
self.ad = ad = QLabel(_('The builtin textures come from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.'))
|
||||
ad.setOpenExternalLinks(True)
|
||||
ad.setWordWrap(True)
|
||||
l.addWidget(ad)
|
||||
self.bb = bb = QDialogButtonBox(QDialogButtonBox.Ok|QDialogButtonBox.Cancel)
|
||||
bb.accepted.connect(self.accept)
|
||||
bb.rejected.connect(self.reject)
|
||||
b = self.add_button = bb.addButton(_('Add texture'), bb.ActionRole)
|
||||
b.setIcon(QIcon(I('plus.png')))
|
||||
b.clicked.connect(self.add_texture)
|
||||
b = self.remove_button = bb.addButton(_('Remove texture'), bb.ActionRole)
|
||||
b.setIcon(QIcon(I('minus.png')))
|
||||
b.clicked.connect(self.remove_texture)
|
||||
l.addWidget(bb)
|
||||
|
||||
images = [{
|
||||
'fname': ':'+os.path.basename(x),
|
||||
'path': x,
|
||||
'name': ' '.join(map(string.capitalize, os.path.splitext(os.path.basename(x))[0].split('_')))
|
||||
} for x in glob.glob(I('textures/*.png'))] + [{
|
||||
'fname': os.path.basename(x),
|
||||
'path': x,
|
||||
'name': os.path.splitext(os.path.basename(x))[0],
|
||||
} for x in glob.glob(os.path.join(self.tdir, '*')) if x.rpartition('.')[-1].lower() in {'jpeg', 'png', 'jpg'}]
|
||||
|
||||
images.sort(key=lambda x:sort_key(x['name']))
|
||||
|
||||
map(self.create_item, images)
|
||||
self.update_remove_state()
|
||||
|
||||
if initial:
|
||||
existing = {unicode(i.data(Qt.UserRole).toString()):i for i in (self.images.item(c) for c in xrange(self.images.count()))}
|
||||
item = existing.get(initial, None)
|
||||
if item is not None:
|
||||
item.setSelected(True)
|
||||
QTimer.singleShot(100, partial(il.scrollToItem, item))
|
||||
|
||||
self.resize(QSize(950, 650))
|
||||
|
||||
def create_item(self, data):
|
||||
x = data
|
||||
i = QListWidgetItem(QIcon(QPixmap(x['path']).scaled(256, 256, transformMode=Qt.SmoothTransformation)), x['name'], self.images)
|
||||
i.setData(Qt.UserRole, x['fname'])
|
||||
i.setData(Qt.UserRole+1, x['path'])
|
||||
return i
|
||||
|
||||
def update_remove_state(self):
|
||||
removeable = bool(self.selected_fname and not self.selected_fname.startswith(':'))
|
||||
self.remove_button.setEnabled(removeable)
|
||||
|
||||
@property
|
||||
def texture(self):
|
||||
return self.selected_fname
|
||||
|
||||
def add_texture(self):
|
||||
path = choose_files(self, 'choose-texture-image', _('Choose Image'),
|
||||
filters=[(_('Images'), ['jpeg', 'jpg', 'png'])], all_files=False, select_only_single_file=True)
|
||||
if not path:
|
||||
return
|
||||
path = path[0]
|
||||
fname = os.path.basename(path)
|
||||
name = fname.rpartition('.')[0]
|
||||
existing = {unicode(i.data(Qt.UserRole).toString()):i for i in (self.images.item(c) for c in xrange(self.images.count()))}
|
||||
dest = os.path.join(self.tdir, fname)
|
||||
with open(path, 'rb') as s, open(dest, 'wb') as f:
|
||||
shutil.copyfileobj(s, f)
|
||||
if fname in existing:
|
||||
self.takeItem(existing[fname])
|
||||
data = {'fname': fname, 'path': dest, 'name': name}
|
||||
i = self.create_item(data)
|
||||
i.setSelected(True)
|
||||
self.images.scrollToItem(i)
|
||||
|
||||
@property
|
||||
def selected_item(self):
|
||||
for x in self.images.selectedItems():
|
||||
return x
|
||||
|
||||
@property
|
||||
def selected_fname(self):
|
||||
try:
|
||||
return unicode(self.selected_item.data(Qt.UserRole).toString())
|
||||
except (AttributeError, TypeError):
|
||||
pass
|
||||
|
||||
def remove_texture(self):
|
||||
if not self.selected_fname:
|
||||
return
|
||||
if self.selected_fname.startswith(':'):
|
||||
return error_dialog(self, _('Cannot remove'),
|
||||
_('Cannot remover builtin textures'), show=True)
|
||||
os.remove(unicode(self.selected_item.data(Qt.UserRole+1).toString()))
|
||||
self.images.takeItem(self.images.row(self.selected_item))
|
||||
|
||||
if __name__ == '__main__':
|
||||
app = QApplication([]) # noqa
|
||||
d = TextureChooser()
|
||||
d.exec_()
|
||||
print (d.texture)
|
@ -6,6 +6,8 @@ from __future__ import (unicode_literals, division, absolute_import,
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
|
||||
from calibre.utils.config import JSONConfig
|
||||
tprefs = JSONConfig('tweak_book_gui')
|
||||
|
||||
_current_container = None
|
||||
|
||||
|
@ -8,22 +8,25 @@ __copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
|
||||
import tempfile, shutil
|
||||
|
||||
from PyQt4.Qt import QObject
|
||||
from PyQt4.Qt import QObject, QApplication
|
||||
|
||||
from calibre.gui2 import error_dialog, choose_files
|
||||
from calibre.gui2 import error_dialog, choose_files, question_dialog, info_dialog
|
||||
from calibre.ptempfile import PersistentTemporaryDirectory
|
||||
from calibre.ebooks.oeb.polish.main import SUPPORTED
|
||||
from calibre.ebooks.oeb.polish.container import get_container, clone_container
|
||||
from calibre.gui2.tweak_book import set_current_container, current_container
|
||||
from calibre.gui2.tweak_book import set_current_container, current_container, tprefs
|
||||
from calibre.gui2.tweak_book.undo import GlobalUndoHistory
|
||||
from calibre.gui2.tweak_book.save import SaveManager
|
||||
|
||||
class Boss(QObject):
|
||||
|
||||
def __init__(self, parent=None):
|
||||
def __init__(self, parent):
|
||||
QObject.__init__(self, parent)
|
||||
self.global_undo = GlobalUndoHistory()
|
||||
self.container_count = 0
|
||||
self.tdir = None
|
||||
self.save_manager = SaveManager(parent)
|
||||
self.save_manager.report_error.connect(self.report_save_error)
|
||||
|
||||
def __call__(self, gui):
|
||||
self.gui = gui
|
||||
@ -40,6 +43,10 @@ class Boss(QObject):
|
||||
def open_book(self, path=None):
|
||||
if not self.check_dirtied():
|
||||
return
|
||||
if self.save_manager.has_tasks:
|
||||
return info_dialog(self.gui, _('Cannot open'),
|
||||
_('The current book is being saved, you cannot open a new book until'
|
||||
' the saving is completed'), show=True)
|
||||
|
||||
if not hasattr(path, 'rpartition'):
|
||||
path = choose_files(self.gui, 'open-book-for-tweaking', _('Choose book'),
|
||||
@ -71,12 +78,41 @@ class Boss(QObject):
|
||||
self.current_metadata = self.gui.current_metadata = container.mi
|
||||
self.global_undo.open_book(container)
|
||||
self.gui.update_window_title()
|
||||
self.gui.file_list.build(container)
|
||||
self.gui.file_list.build(container, preserve_state=False)
|
||||
self.gui.action_save.setEnabled(False)
|
||||
self.update_global_history_actions()
|
||||
|
||||
def update_global_history_actions(self):
|
||||
gu = self.global_undo
|
||||
for x, text in (('undo', _('&Revert to before')), ('redo', '&Revert to after')):
|
||||
ac = getattr(self.gui, 'action_global_%s' % x)
|
||||
ac.setEnabled(getattr(gu, 'can_' + x))
|
||||
ac.setText(text + ' ' + (getattr(gu, x + '_msg') or '...'))
|
||||
|
||||
def add_savepoint(self, msg):
|
||||
nc = clone_container(current_container(), self.mkdtemp())
|
||||
self.global_undo.add_savepoint(nc, msg)
|
||||
set_current_container(nc)
|
||||
self.update_global_history_actions()
|
||||
|
||||
def apply_container_update_to_gui(self):
|
||||
container = current_container()
|
||||
self.gui.file_list.build(container)
|
||||
self.update_global_history_actions()
|
||||
self.gui.action_save.setEnabled(True)
|
||||
# TODO: Apply to other GUI elements
|
||||
|
||||
def do_global_undo(self):
|
||||
container = self.global_undo.undo()
|
||||
if container is not None:
|
||||
set_current_container(container)
|
||||
self.apply_container_update_to_gui()
|
||||
|
||||
def do_global_redo(self):
|
||||
container = self.global_undo.redo()
|
||||
if container is not None:
|
||||
set_current_container(container)
|
||||
self.apply_container_update_to_gui()
|
||||
|
||||
def delete_requested(self, spine_items, other_items):
|
||||
if not self.check_dirtied():
|
||||
@ -86,6 +122,48 @@ class Boss(QObject):
|
||||
c.remove_from_spine(spine_items)
|
||||
for name in other_items:
|
||||
c.remove_item(name)
|
||||
self.gui.action_save.setEnabled(True)
|
||||
self.gui.file_list.delete_done(spine_items, other_items)
|
||||
# TODO: Update other GUI elements
|
||||
|
||||
def save_book(self):
|
||||
self.gui.action_save.setEnabled(False)
|
||||
tdir = tempfile.mkdtemp(prefix='save-%05d-' % self.container_count, dir=self.tdir)
|
||||
container = clone_container(current_container(), tdir)
|
||||
self.save_manager.schedule(tdir, container)
|
||||
|
||||
def report_save_error(self, tb):
|
||||
error_dialog(self.gui, _('Could not save'),
|
||||
_('Saving of the book failed. Click "Show Details"'
|
||||
' for more information.'), det_msg=tb, show=True)
|
||||
|
||||
def quit(self):
|
||||
if not self.confirm_quit():
|
||||
return
|
||||
self.save_state()
|
||||
QApplication.instance().quit()
|
||||
|
||||
def confirm_quit(self):
|
||||
if self.save_manager.has_tasks:
|
||||
if not question_dialog(
|
||||
self.gui, _('Are you sure?'), _(
|
||||
'The current book is being saved in the background, quitting will abort'
|
||||
' the save process, are you sure?'), default_yes=False):
|
||||
return False
|
||||
if self.gui.action_save.isEnabled():
|
||||
if not question_dialog(
|
||||
self.gui, _('Are you sure?'), _(
|
||||
'The current book has unsaved changes, you will lose them if you quit,'
|
||||
' are you sure?'), default_yes=False):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def shutdown(self):
|
||||
self.save_state()
|
||||
self.save_manager.shutdown()
|
||||
self.save_manager.wait(0.1)
|
||||
|
||||
def save_state(self):
|
||||
with tprefs:
|
||||
self.gui.save_state()
|
||||
|
@ -6,6 +6,7 @@ from __future__ import (unicode_literals, division, absolute_import,
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
|
||||
from binascii import hexlify
|
||||
from PyQt4.Qt import (
|
||||
QWidget, QTreeWidget, QGridLayout, QSize, Qt, QTreeWidgetItem, QIcon,
|
||||
QStyledItemDelegate, QStyle, QPixmap, QPainter, pyqtSignal)
|
||||
@ -16,6 +17,7 @@ from calibre.ebooks.oeb.polish.container import guess_type
|
||||
from calibre.ebooks.oeb.polish.cover import get_cover_page_name, get_raster_cover_name
|
||||
from calibre.gui2 import error_dialog
|
||||
from calibre.gui2.tweak_book import current_container
|
||||
from calibre.utils.icu import sort_key
|
||||
|
||||
TOP_ICON_SIZE = 24
|
||||
NAME_ROLE = Qt.UserRole
|
||||
@ -73,7 +75,6 @@ class FileList(QTreeWidget):
|
||||
self.setAutoExpandDelay(1000)
|
||||
self.setAnimated(True)
|
||||
self.setMouseTracking(True)
|
||||
self.in_drop_event = False
|
||||
self.setContextMenuPolicy(Qt.CustomContextMenu)
|
||||
self.customContextMenuRequested.connect(self.show_context_menu)
|
||||
self.root = self.invisibleRootItem()
|
||||
@ -89,7 +90,25 @@ class FileList(QTreeWidget):
|
||||
'images':'view-image.png',
|
||||
}.iteritems()}
|
||||
|
||||
def build(self, container):
|
||||
def get_state(self):
|
||||
s = {'pos':self.verticalScrollBar().value()}
|
||||
s['expanded'] = {c for c, item in self.categories.iteritems() if item.isExpanded()}
|
||||
s['selected'] = {unicode(i.data(0, NAME_ROLE).toString()) for i in self.selectedItems()}
|
||||
return s
|
||||
|
||||
def set_state(self, state):
|
||||
for category, item in self.categories.iteritems():
|
||||
item.setExpanded(category in state['expanded'])
|
||||
self.verticalScrollBar().setValue(state['pos'])
|
||||
for parent in self.categories.itervalues():
|
||||
for c in (parent.child(i) for i in xrange(parent.childCount())):
|
||||
name = unicode(c.data(0, NAME_ROLE).toString())
|
||||
if name in state['selected']:
|
||||
c.setSelected(True)
|
||||
|
||||
def build(self, container, preserve_state=True):
|
||||
if preserve_state:
|
||||
state = self.get_state()
|
||||
self.clear()
|
||||
self.root = self.invisibleRootItem()
|
||||
self.root.setFlags(Qt.ItemIsDragEnabled)
|
||||
@ -140,6 +159,7 @@ class FileList(QTreeWidget):
|
||||
# We have an exact duplicate (can happen if there are
|
||||
# duplicates in the spine)
|
||||
item.setText(0, processed[name].text(0))
|
||||
item.setText(1, processed[name].text(1))
|
||||
return
|
||||
|
||||
parts = name.split('/')
|
||||
@ -148,6 +168,7 @@ class FileList(QTreeWidget):
|
||||
text = parts.pop() + '/' + text
|
||||
seen[text] = item
|
||||
item.setText(0, text)
|
||||
item.setText(1, hexlify(sort_key(text)))
|
||||
|
||||
def render_emblems(item, emblems):
|
||||
emblems = tuple(emblems)
|
||||
@ -220,11 +241,16 @@ class FileList(QTreeWidget):
|
||||
continue
|
||||
processed[name] = create_item(name)
|
||||
|
||||
for c in self.categories.itervalues():
|
||||
self.expandItem(c)
|
||||
for name, c in self.categories.iteritems():
|
||||
c.setExpanded(True)
|
||||
if name != 'text':
|
||||
c.sortChildren(1, Qt.AscendingOrder)
|
||||
|
||||
if preserve_state:
|
||||
self.set_state(state)
|
||||
|
||||
def show_context_menu(self, point):
|
||||
pass
|
||||
pass # TODO: Implement this
|
||||
|
||||
def keyPressEvent(self, ev):
|
||||
if ev.key() in (Qt.Key_Delete, Qt.Key_Backspace):
|
||||
@ -262,6 +288,14 @@ class FileList(QTreeWidget):
|
||||
for c in removals:
|
||||
c.parent().removeChild(c)
|
||||
|
||||
def dropEvent(self, event):
|
||||
text = self.categories['text']
|
||||
pre_drop_order = {text.child(i):i for i in xrange(text.childCount())}
|
||||
super(FileList, self).dropEvent(event)
|
||||
current_order = {text.child(i):i for i in xrange(text.childCount())}
|
||||
if current_order != pre_drop_order:
|
||||
pass # TODO: Implement this
|
||||
|
||||
class FileListWidget(QWidget):
|
||||
|
||||
delete_requested = pyqtSignal(object, object)
|
||||
@ -277,7 +311,7 @@ class FileListWidget(QWidget):
|
||||
for x in ('delete_done',):
|
||||
setattr(self, x, getattr(self.file_list, x))
|
||||
|
||||
def build(self, container):
|
||||
self.file_list.build(container)
|
||||
def build(self, container, preserve_state=True):
|
||||
self.file_list.build(container, preserve_state=preserve_state)
|
||||
|
||||
|
||||
|
136
src/calibre/gui2/tweak_book/save.py
Normal file
136
src/calibre/gui2/tweak_book/save.py
Normal file
@ -0,0 +1,136 @@
|
||||
#!/usr/bin/env python
|
||||
# vim:fileencoding=utf-8
|
||||
from __future__ import (unicode_literals, division, absolute_import,
|
||||
print_function)
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
|
||||
import shutil, os
|
||||
from threading import Thread
|
||||
from Queue import LifoQueue, Empty
|
||||
|
||||
from PyQt4.Qt import (QObject, pyqtSignal, QLabel, QWidget, QHBoxLayout, Qt)
|
||||
|
||||
from calibre.constants import iswindows
|
||||
from calibre.ptempfile import PersistentTemporaryFile
|
||||
from calibre.gui2.progress_indicator import ProgressIndicator
|
||||
from calibre.utils import join_with_timeout
|
||||
from calibre.utils.filenames import atomic_rename
|
||||
|
||||
class SaveWidget(QWidget):
|
||||
|
||||
def __init__(self, parent=None):
|
||||
QWidget.__init__(self, parent)
|
||||
self.l = l = QHBoxLayout(self)
|
||||
self.setLayout(l)
|
||||
self.label = QLabel('')
|
||||
self.pi = ProgressIndicator(self, 24)
|
||||
l.addWidget(self.label)
|
||||
l.addWidget(self.pi)
|
||||
l.setContentsMargins(0, 0, 0, 0)
|
||||
self.pi.setVisible(False)
|
||||
self.stop()
|
||||
|
||||
def start(self):
|
||||
self.pi.setVisible(True)
|
||||
self.pi.startAnimation()
|
||||
self.label.setText(_('Saving...'))
|
||||
|
||||
def stop(self):
|
||||
self.pi.setVisible(False)
|
||||
self.pi.stopAnimation()
|
||||
self.label.setText(_('Saved'))
|
||||
|
||||
class SaveManager(QObject):
|
||||
|
||||
start_save = pyqtSignal()
|
||||
report_error = pyqtSignal(object)
|
||||
save_done = pyqtSignal()
|
||||
|
||||
def __init__(self, parent):
|
||||
QObject.__init__(self, parent)
|
||||
self.count = 0
|
||||
self.last_saved = -1
|
||||
self.requests = LifoQueue()
|
||||
t = Thread(name='save-thread', target=self.run)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
self.status_widget = w = SaveWidget(parent)
|
||||
self.start_save.connect(w.start, type=Qt.QueuedConnection)
|
||||
self.save_done.connect(w.stop, type=Qt.QueuedConnection)
|
||||
|
||||
def schedule(self, tdir, container):
|
||||
self.count += 1
|
||||
self.requests.put((self.count, tdir, container))
|
||||
|
||||
def run(self):
|
||||
while True:
|
||||
x = self.requests.get()
|
||||
if x is None:
|
||||
self.requests.task_done()
|
||||
self.__empty_queue()
|
||||
break
|
||||
try:
|
||||
count, tdir, container = x
|
||||
self.process_save(count, tdir, container)
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
finally:
|
||||
self.requests.task_done()
|
||||
|
||||
def __empty_queue(self):
|
||||
' Only to be used during shutdown '
|
||||
while True:
|
||||
try:
|
||||
self.requests.get_nowait()
|
||||
except Empty:
|
||||
break
|
||||
else:
|
||||
self.requests.task_done()
|
||||
|
||||
def process_save(self, count, tdir, container):
|
||||
if count <= self.last_saved:
|
||||
shutil.rmtree(tdir, ignore_errors=True)
|
||||
return
|
||||
self.last_saved = count
|
||||
self.start_save.emit()
|
||||
try:
|
||||
self.do_save(tdir, container)
|
||||
except:
|
||||
import traceback
|
||||
self.report_error.emit(traceback.format_exc())
|
||||
self.save_done.emit()
|
||||
|
||||
def do_save(self, tdir, container):
|
||||
temp = None
|
||||
try:
|
||||
path = container.path_to_ebook
|
||||
temp = PersistentTemporaryFile(
|
||||
prefix=('_' if iswindows else '.'), suffix=os.path.splitext(path)[1], dir=os.path.dirname(path))
|
||||
temp.close()
|
||||
temp = temp.name
|
||||
container.commit(temp)
|
||||
atomic_rename(temp, path)
|
||||
finally:
|
||||
if temp and os.path.exists(temp):
|
||||
os.remove(temp)
|
||||
shutil.rmtree(tdir, ignore_errors=True)
|
||||
|
||||
@property
|
||||
def has_tasks(self):
|
||||
return bool(self.requests.unfinished_tasks)
|
||||
|
||||
def wait(self, timeout=30):
|
||||
if timeout is None:
|
||||
self.requests.join()
|
||||
else:
|
||||
try:
|
||||
join_with_timeout(self.requests, timeout)
|
||||
except RuntimeError:
|
||||
return False
|
||||
return True
|
||||
|
||||
def shutdown(self):
|
||||
self.requests.put(None)
|
@ -6,10 +6,11 @@ from __future__ import (unicode_literals, division, absolute_import,
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
|
||||
from PyQt4.Qt import QDockWidget, Qt, QLabel, QIcon, QAction
|
||||
from PyQt4.Qt import QDockWidget, Qt, QLabel, QIcon, QAction, QApplication
|
||||
|
||||
from calibre.constants import __appname__, get_version
|
||||
from calibre.gui2.main_window import MainWindow
|
||||
from calibre.gui2.tweak_book import current_container
|
||||
from calibre.gui2.tweak_book import current_container, tprefs
|
||||
from calibre.gui2.tweak_book.file_list import FileListWidget
|
||||
from calibre.gui2.tweak_book.job import BlockingJob
|
||||
from calibre.gui2.tweak_book.boss import Boss
|
||||
@ -18,6 +19,7 @@ from calibre.gui2.keyboard import Manager as KeyboardManager
|
||||
class Main(MainWindow):
|
||||
|
||||
APP_NAME = _('Tweak Book')
|
||||
STATE_VERSION = 0
|
||||
|
||||
def __init__(self, opts):
|
||||
MainWindow.__init__(self, opts, disable_automatic_gc=True)
|
||||
@ -38,9 +40,17 @@ class Main(MainWindow):
|
||||
|
||||
self.status_bar = self.statusBar()
|
||||
self.l = QLabel('Placeholder')
|
||||
self.status_bar.addPermanentWidget(self.boss.save_manager.status_widget)
|
||||
self.status_bar.addWidget(QLabel(_('{0} {1} created by {2}').format(__appname__, get_version(), 'Kovid Goyal')))
|
||||
f = self.status_bar.font()
|
||||
f.setBold(True)
|
||||
self.status_bar.setFont(f)
|
||||
|
||||
self.setCentralWidget(self.l)
|
||||
self.boss(self)
|
||||
g = QApplication.instance().desktop().availableGeometry(self)
|
||||
self.resize(g.width()-50, g.height()-50)
|
||||
self.restore_state()
|
||||
|
||||
self.keyboard.finalize()
|
||||
|
||||
@ -58,18 +68,37 @@ class Main(MainWindow):
|
||||
return ac
|
||||
|
||||
self.action_open_book = reg('document_open.png', _('Open &book'), self.boss.open_book, 'open-book', 'Ctrl+O', _('Open a new book'))
|
||||
self.action_global_undo = reg('back.png', _('&Revert to before'), self.boss.do_global_undo, 'global-undo', 'Ctrl+Left',
|
||||
_('Revert book to before the last action (Undo)'))
|
||||
self.action_global_redo = reg('forward.png', _('&Revert to after'), self.boss.do_global_redo, 'global-redo', 'Ctrl+Right',
|
||||
_('Revert book state to after the next action (Redo)'))
|
||||
self.action_save = reg('save.png', _('&Save'), self.boss.save_book, 'save-book', 'Ctrl+S', _('Save book'))
|
||||
self.action_save.setEnabled(False)
|
||||
self.action_quit = reg('quit.png', _('&Quit'), self.boss.quit, 'quit', 'Ctrl+Q', _('Quit'))
|
||||
|
||||
def create_menubar(self):
|
||||
b = self.menuBar()
|
||||
|
||||
f = b.addMenu(_('&File'))
|
||||
f.addAction(self.action_open_book)
|
||||
f.addAction(self.action_save)
|
||||
f.addAction(self.action_quit)
|
||||
|
||||
e = b.addMenu(_('&Edit'))
|
||||
e.addAction(self.action_global_undo)
|
||||
e.addAction(self.action_global_redo)
|
||||
|
||||
def create_toolbar(self):
|
||||
self.global_bar = b = self.addToolBar(_('Global'))
|
||||
b.setObjectName('global_bar') # Needed for saveState
|
||||
b.addAction(self.action_open_book)
|
||||
b.addAction(self.action_global_undo)
|
||||
b.addAction(self.action_global_redo)
|
||||
b.addAction(self.action_save)
|
||||
|
||||
def create_docks(self):
|
||||
self.file_list_dock = d = QDockWidget(_('&Files Browser'), self)
|
||||
d.setObjectName('file_list_dock') # Needed for saveState
|
||||
d.setAllowedAreas(Qt.LeftDockWidgetArea | Qt.RightDockWidgetArea)
|
||||
self.file_list = FileListWidget(d)
|
||||
d.setWidget(self.file_list)
|
||||
@ -81,3 +110,26 @@ class Main(MainWindow):
|
||||
|
||||
def update_window_title(self):
|
||||
self.setWindowTitle(self.current_metadata.title + ' [%s] - %s' %(current_container().book_type.upper(), self.APP_NAME))
|
||||
|
||||
def closeEvent(self, e):
|
||||
if not self.boss.confirm_quit():
|
||||
e.ignore()
|
||||
return
|
||||
try:
|
||||
self.boss.shutdown()
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
e.accept()
|
||||
|
||||
def save_state(self):
|
||||
tprefs.set('main_window_geometry', bytearray(self.saveGeometry()))
|
||||
tprefs.set('main_window_state', bytearray(self.saveState(self.STATE_VERSION)))
|
||||
|
||||
def restore_state(self):
|
||||
geom = tprefs.get('main_window_geometry', None)
|
||||
if geom is not None:
|
||||
self.restoreGeometry(geom)
|
||||
state = tprefs.get('main_window_state', None)
|
||||
if state is not None:
|
||||
self.restoreState(state, self.STATE_VERSION)
|
||||
|
@ -53,4 +53,23 @@ class GlobalUndoHistory(object):
|
||||
self.pos += 1
|
||||
return self.current_container
|
||||
|
||||
@property
|
||||
def can_undo(self):
|
||||
return self.pos > 0
|
||||
|
||||
@property
|
||||
def can_redo(self):
|
||||
return self.pos < len(self.states) - 1
|
||||
|
||||
@property
|
||||
def undo_msg(self):
|
||||
if not self.can_undo:
|
||||
return ''
|
||||
return self.states[self.pos - 1].message or ''
|
||||
|
||||
@property
|
||||
def redo_msg(self):
|
||||
if not self.can_redo:
|
||||
return ''
|
||||
return self.states[self.pos].message or ''
|
||||
|
||||
|
@ -500,7 +500,7 @@ class BrowseServer(object):
|
||||
datatype, self.opts.url_prefix)
|
||||
href = re.search(r'<a href="([^"]+)"', html)
|
||||
if href is not None:
|
||||
raise cherrypy.HTTPRedirect(href.group(1))
|
||||
raise cherrypy.InternalRedirect(href.group(1))
|
||||
|
||||
if len(items) <= self.opts.max_opds_ungrouped_items:
|
||||
script = 'false'
|
||||
|
@ -7,3 +7,19 @@ __docformat__ = 'restructuredtext en'
|
||||
Miscelleaneous utilities.
|
||||
'''
|
||||
|
||||
from time import time
|
||||
|
||||
def join_with_timeout(q, timeout=2):
|
||||
''' Join the queue q with a specified timeout. Blocks until all tasks on
|
||||
the queue are done or times out with a runtime error. '''
|
||||
q.all_tasks_done.acquire()
|
||||
try:
|
||||
endtime = time() + timeout
|
||||
while q.unfinished_tasks:
|
||||
remaining = endtime - time()
|
||||
if remaining <= 0.0:
|
||||
raise RuntimeError('Waiting for queue to clear timed out')
|
||||
q.all_tasks_done.wait(remaining)
|
||||
finally:
|
||||
q.all_tasks_done.release()
|
||||
|
||||
|
@ -201,32 +201,31 @@ def case_preserving_open_file(path, mode='wb', mkdir_mode=0o777):
|
||||
fpath = os.path.join(cpath, fname)
|
||||
return ans, fpath
|
||||
|
||||
def samefile_windows(src, dst):
|
||||
def windows_get_fileid(path):
|
||||
''' The fileid uniquely identifies actual file contents (it is the same for
|
||||
all hardlinks to a file). Similar to inode number on linux. '''
|
||||
import win32file
|
||||
from pywintypes import error
|
||||
if isbytestring(path):
|
||||
path = path.decode(filesystem_encoding)
|
||||
try:
|
||||
h = win32file.CreateFile(path, 0, 0, None, win32file.OPEN_EXISTING,
|
||||
win32file.FILE_FLAG_BACKUP_SEMANTICS, 0)
|
||||
try:
|
||||
data = win32file.GetFileInformationByHandle(h)
|
||||
finally:
|
||||
win32file.CloseHandle(h)
|
||||
except (error, EnvironmentError):
|
||||
return None
|
||||
return data[4], data[8], data[9]
|
||||
|
||||
def samefile_windows(src, dst):
|
||||
samestring = (os.path.normcase(os.path.abspath(src)) ==
|
||||
os.path.normcase(os.path.abspath(dst)))
|
||||
if samestring:
|
||||
return True
|
||||
|
||||
handles = []
|
||||
|
||||
def get_fileid(x):
|
||||
if isbytestring(x):
|
||||
x = x.decode(filesystem_encoding)
|
||||
try:
|
||||
h = win32file.CreateFile(x, 0, 0, None, win32file.OPEN_EXISTING,
|
||||
win32file.FILE_FLAG_BACKUP_SEMANTICS, 0)
|
||||
handles.append(h)
|
||||
data = win32file.GetFileInformationByHandle(h)
|
||||
except (error, EnvironmentError):
|
||||
return None
|
||||
return (data[4], data[8], data[9])
|
||||
|
||||
a, b = get_fileid(src), get_fileid(dst)
|
||||
for h in handles:
|
||||
win32file.CloseHandle(h)
|
||||
a, b = windows_get_fileid(src), windows_get_fileid(dst)
|
||||
if a is None and b is None:
|
||||
return False
|
||||
return a == b
|
||||
@ -319,6 +318,7 @@ class WindowsAtomicFolderMove(object):
|
||||
|
||||
import win32file, winerror
|
||||
from pywintypes import error
|
||||
from collections import defaultdict
|
||||
|
||||
if isbytestring(path):
|
||||
path = path.decode(filesystem_encoding)
|
||||
@ -326,7 +326,13 @@ class WindowsAtomicFolderMove(object):
|
||||
if not os.path.exists(path):
|
||||
return
|
||||
|
||||
for x in os.listdir(path):
|
||||
names = os.listdir(path)
|
||||
name_to_fileid = {x:windows_get_fileid(os.path.join(path, x)) for x in names}
|
||||
fileid_to_names = defaultdict(set)
|
||||
for name, fileid in name_to_fileid.iteritems():
|
||||
fileid_to_names[fileid].add(name)
|
||||
|
||||
for x in names:
|
||||
f = os.path.normcase(os.path.abspath(os.path.join(path, x)))
|
||||
if not os.path.isfile(f):
|
||||
continue
|
||||
@ -341,6 +347,21 @@ class WindowsAtomicFolderMove(object):
|
||||
win32file.FILE_SHARE_DELETE, None,
|
||||
win32file.OPEN_EXISTING, win32file.FILE_FLAG_SEQUENTIAL_SCAN, 0)
|
||||
except error as e:
|
||||
if getattr(e, 'winerror', 0) == winerror.ERROR_SHARING_VIOLATION:
|
||||
# The file could be a hardlink to an already opened file,
|
||||
# in which case we use the same handle for both files
|
||||
fileid = name_to_fileid[x]
|
||||
found = False
|
||||
if fileid is not None:
|
||||
for other in fileid_to_names[fileid]:
|
||||
other = os.path.normcase(os.path.abspath(os.path.join(path, other)))
|
||||
if other in self.handle_map:
|
||||
self.handle_map[f] = self.handle_map[other]
|
||||
found = True
|
||||
break
|
||||
if found:
|
||||
continue
|
||||
|
||||
self.close_handles()
|
||||
if getattr(e, 'winerror', 0) == winerror.ERROR_SHARING_VIOLATION:
|
||||
err = IOError(errno.EACCES,
|
||||
@ -371,6 +392,8 @@ class WindowsAtomicFolderMove(object):
|
||||
return
|
||||
except:
|
||||
pass
|
||||
|
||||
win32file.SetFilePointer(handle, 0, win32file.FILE_BEGIN)
|
||||
with lopen(dest, 'wb') as f:
|
||||
while True:
|
||||
hr, raw = win32file.ReadFile(handle, 1024*1024)
|
||||
@ -381,6 +404,7 @@ class WindowsAtomicFolderMove(object):
|
||||
f.write(raw)
|
||||
|
||||
def release_file(self, path):
|
||||
' Release the lock on the file pointed to by path. Will also release the lock on any hardlinks to path '
|
||||
key = None
|
||||
for p, h in self.handle_map.iteritems():
|
||||
if samefile_windows(path, p):
|
||||
@ -389,7 +413,9 @@ class WindowsAtomicFolderMove(object):
|
||||
if key is not None:
|
||||
import win32file
|
||||
win32file.CloseHandle(key[1])
|
||||
self.handle_map.pop(key[0])
|
||||
remove = [f for f, h in self.handle_map.iteritems() if h is key[1]]
|
||||
for x in remove:
|
||||
self.handle_map.pop(x)
|
||||
|
||||
def close_handles(self):
|
||||
import win32file
|
||||
|
@ -515,8 +515,8 @@ class TemplateFormatter(string.Formatter):
|
||||
try:
|
||||
ans = self.evaluate(fmt, [], kwargs).strip()
|
||||
except Exception as e:
|
||||
# if DEBUG:
|
||||
# traceback.print_exc()
|
||||
if DEBUG and getattr(e, 'is_locking_error', False):
|
||||
traceback.print_exc()
|
||||
ans = error_value + ' ' + e.message
|
||||
return ans
|
||||
|
||||
@ -529,7 +529,7 @@ class ValidateFormatter(TemplateFormatter):
|
||||
|
||||
def validate(self, x):
|
||||
from calibre.ebooks.metadata.book.base import Metadata
|
||||
self.book = Metadata('');
|
||||
self.book = Metadata('')
|
||||
return self.vformat(x, [], {})
|
||||
|
||||
validation_formatter = ValidateFormatter()
|
||||
|
@ -1187,7 +1187,7 @@ class BuiltinDaysBetween(BuiltinFormatterFunction):
|
||||
except:
|
||||
return ''
|
||||
i = d1 - d2
|
||||
return str('%d.%d'%(i.days, i.seconds/8640))
|
||||
return '%.1f'%(i.days + (i.seconds/(24.0*60.0*60.0)))
|
||||
|
||||
class BuiltinLanguageStrings(BuiltinFormatterFunction):
|
||||
name = 'language_strings'
|
||||
|
Loading…
x
Reference in New Issue
Block a user