mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-07 10:14:46 -04:00
Merge from trunk
This commit is contained in:
commit
1a19cd7c19
@ -19,6 +19,56 @@
|
|||||||
# new recipes:
|
# new recipes:
|
||||||
# - title:
|
# - title:
|
||||||
|
|
||||||
|
- version: 0.8.67
|
||||||
|
date: 2012-08-31
|
||||||
|
|
||||||
|
new features:
|
||||||
|
- title: "PDF Output: Generate a PDF Outline based on the Table of Contents of the input document"
|
||||||
|
|
||||||
|
- title: "Conversion: Add an option under Structure Detection to set the 'Start reading at' metadata with an XPath expression."
|
||||||
|
tickets: [1043233]
|
||||||
|
|
||||||
|
- title: "Speed up changing the title and author of files with books larger than 3MB by avoiding an unnecessary extra copy."
|
||||||
|
|
||||||
|
- title: "Wireless device driver: Make detecting and connecting to devices easier on networks where mdns is disabled"
|
||||||
|
|
||||||
|
- title: "PDF Output: Allow choosing the default font family and size when generating PDF files (under PDF Options) in the conversion dialog"
|
||||||
|
|
||||||
|
- title: "Metadata dialog: Comments editor: Allow specifying the name of a link when using the insert link button."
|
||||||
|
tickets: [1042683]
|
||||||
|
|
||||||
|
- title: "Remove the unmaintained pdfmanipulate command line utility. There are many other tools that provide similar functionality, for example, pdftk and podofo"
|
||||||
|
|
||||||
|
bug fixes:
|
||||||
|
- title: "Catalogs: Fix regression that broke sorting of non series titles before series titles"
|
||||||
|
|
||||||
|
- title: "PDF Output: Do not create duplicate embedded fonts in the PDF for every individual HTML file in the input document"
|
||||||
|
|
||||||
|
- title: "Fix regression that broke DnD of files having a # character in their names to the book details panel"
|
||||||
|
|
||||||
|
- title: "PDF Output: Allow generating PDF files with more than 512 pages on windows."
|
||||||
|
tickets: [1041614]
|
||||||
|
|
||||||
|
- title: "Fix minor bug in handling of the completion popups when using the next/previous buttons in the edit metadata dialog"
|
||||||
|
ticket: [1041389]
|
||||||
|
|
||||||
|
improved recipes:
|
||||||
|
- Coding Horror
|
||||||
|
- TIME Magazine
|
||||||
|
|
||||||
|
new recipes:
|
||||||
|
- title: Cumhuriyet Yzarlar
|
||||||
|
author: Sethi Eksi
|
||||||
|
|
||||||
|
- title: Arcadia
|
||||||
|
author: Masahiro Hasegawa
|
||||||
|
|
||||||
|
- title: Business Week Magazine and Chronicle of Higher Education
|
||||||
|
author: Rick Shang
|
||||||
|
|
||||||
|
- title: CIPER Chile
|
||||||
|
author: Darko Miletic
|
||||||
|
|
||||||
- version: 0.8.66
|
- version: 0.8.66
|
||||||
date: 2012-08-24
|
date: 2012-08-24
|
||||||
|
|
||||||
|
@ -33,38 +33,36 @@ class BusinessWeekMagazine(BasicNewsRecipe):
|
|||||||
div0 = soup.find ('div', attrs={'class':'column left'})
|
div0 = soup.find ('div', attrs={'class':'column left'})
|
||||||
section_title = ''
|
section_title = ''
|
||||||
feeds = OrderedDict()
|
feeds = OrderedDict()
|
||||||
articles = []
|
for div in div0.findAll('h4'):
|
||||||
for div in div0.findAll('a'):
|
articles = []
|
||||||
section_title = self.tag_to_string(div.findPrevious('h3')).strip()
|
section_title = self.tag_to_string(div.findPrevious('h3')).strip()
|
||||||
self.log('Processing section:', section_title)
|
title=self.tag_to_string(div.a).strip()
|
||||||
title=self.tag_to_string(div).strip()
|
url=div.a['href']
|
||||||
url=div['href']
|
|
||||||
soup0 = self.index_to_soup(url)
|
soup0 = self.index_to_soup(url)
|
||||||
urlprint=soup0.find('li', attrs={'class':'print'}).a['href']
|
urlprint=soup0.find('li', attrs={'class':'print'}).a['href']
|
||||||
articles.append({'title':title, 'url':urlprint, 'description':'', 'date':''})
|
articles.append({'title':title, 'url':urlprint, 'description':'', 'date':''})
|
||||||
|
|
||||||
|
|
||||||
if articles:
|
if articles:
|
||||||
if section_title not in feeds:
|
if section_title not in feeds:
|
||||||
feeds[section_title] = []
|
feeds[section_title] = []
|
||||||
feeds[section_title] += articles
|
feeds[section_title] += articles
|
||||||
|
|
||||||
div1 = soup.find ('div', attrs={'class':'column center'})
|
div1 = soup.find ('div', attrs={'class':'column center'})
|
||||||
section_title = ''
|
section_title = ''
|
||||||
articles = []
|
for div in div1.findAll('h5'):
|
||||||
for div in div1.findAll('a'):
|
articles = []
|
||||||
desc=self.tag_to_string(div.findNext('p')).strip()
|
desc=self.tag_to_string(div.findNext('p')).strip()
|
||||||
section_title = self.tag_to_string(div.findPrevious('h3')).strip()
|
section_title = self.tag_to_string(div.findPrevious('h3')).strip()
|
||||||
self.log('Processing section:', section_title)
|
title=self.tag_to_string(div.a).strip()
|
||||||
title=self.tag_to_string(div).strip()
|
url=div.a['href']
|
||||||
url=div['href']
|
|
||||||
soup0 = self.index_to_soup(url)
|
soup0 = self.index_to_soup(url)
|
||||||
urlprint=soup0.find('li', attrs={'class':'print'}).a['href']
|
urlprint=soup0.find('li', attrs={'class':'print'}).a['href']
|
||||||
articles.append({'title':title, 'url':urlprint, 'description':desc, 'date':''})
|
articles.append({'title':title, 'url':urlprint, 'description':desc, 'date':''})
|
||||||
|
|
||||||
if articles:
|
if articles:
|
||||||
if section_title not in feeds:
|
if section_title not in feeds:
|
||||||
feeds[section_title] = []
|
feeds[section_title] = []
|
||||||
feeds[section_title] += articles
|
feeds[section_title] += articles
|
||||||
ans = [(key, val) for key, val in feeds.iteritems()]
|
ans = [(key, val) for key, val in feeds.iteritems()]
|
||||||
return ans
|
return ans
|
||||||
|
|
||||||
|
@ -12,7 +12,7 @@ from calibre.web.feeds.news import BasicNewsRecipe
|
|||||||
class AcademiaCatavencu(BasicNewsRecipe):
|
class AcademiaCatavencu(BasicNewsRecipe):
|
||||||
title = u'Academia Ca\u0163avencu'
|
title = u'Academia Ca\u0163avencu'
|
||||||
__author__ = u'Silviu Cotoar\u0103'
|
__author__ = u'Silviu Cotoar\u0103'
|
||||||
description = 'Tagma cum laude'
|
description = 'Academia Catavencu. Pamflete!'
|
||||||
publisher = u'Ca\u0163avencu'
|
publisher = u'Ca\u0163avencu'
|
||||||
oldest_article = 5
|
oldest_article = 5
|
||||||
language = 'ro'
|
language = 'ro'
|
||||||
@ -21,7 +21,7 @@ class AcademiaCatavencu(BasicNewsRecipe):
|
|||||||
use_embedded_content = False
|
use_embedded_content = False
|
||||||
category = 'Ziare'
|
category = 'Ziare'
|
||||||
encoding = 'utf-8'
|
encoding = 'utf-8'
|
||||||
cover_url = 'http://www.academiacatavencu.info/images/logo.png'
|
cover_url = 'http://www.inpolitics.ro/Uploads/Articles/academia_catavencu.jpg'
|
||||||
|
|
||||||
conversion_options = {
|
conversion_options = {
|
||||||
'comments' : description
|
'comments' : description
|
||||||
@ -31,21 +31,21 @@ class AcademiaCatavencu(BasicNewsRecipe):
|
|||||||
}
|
}
|
||||||
|
|
||||||
keep_only_tags = [
|
keep_only_tags = [
|
||||||
dict(name='h1', attrs={'class':'art_title'}),
|
dict(name='h1', attrs={'class':'entry-title'}),
|
||||||
dict(name='div', attrs={'class':'art_text'})
|
dict(name='div', attrs={'class':'entry-content'})
|
||||||
]
|
]
|
||||||
|
|
||||||
remove_tags = [
|
remove_tags = [
|
||||||
dict(name='div', attrs={'class':['desp_m']})
|
dict(name='div', attrs={'class':['mr_social_sharing_wrapper']})
|
||||||
, dict(name='div', attrs={'id':['tags']})
|
, dict(name='div', attrs={'id':['fb_share_1']})
|
||||||
]
|
]
|
||||||
|
|
||||||
remove_tags_after = [
|
remove_tags_after = [
|
||||||
dict(name='div', attrs={'class':['desp_m']})
|
dict(name='div', attrs={'id':['fb_share_1']})
|
||||||
]
|
]
|
||||||
|
|
||||||
feeds = [
|
feeds = [
|
||||||
(u'Feeds', u'http://www.academiacatavencu.info/rss.xml')
|
(u'Feeds', u'http://www.academiacatavencu.info/feed')
|
||||||
]
|
]
|
||||||
|
|
||||||
def preprocess_html(self, soup):
|
def preprocess_html(self, soup):
|
||||||
|
@ -13,13 +13,13 @@ class Chronicle(BasicNewsRecipe):
|
|||||||
keep_only_tags = [
|
keep_only_tags = [
|
||||||
dict(name='div', attrs={'class':'article'}),
|
dict(name='div', attrs={'class':'article'}),
|
||||||
]
|
]
|
||||||
remove_tags = [dict(name='div',attrs={'class':'related module1'})]
|
remove_tags = [dict(name='div',attrs={'class':['related module1','maintitle']}),
|
||||||
|
dict(name='div', attrs={'id':['section-nav','icon-row']})]
|
||||||
no_javascript = True
|
no_javascript = True
|
||||||
no_stylesheets = True
|
no_stylesheets = True
|
||||||
|
|
||||||
|
|
||||||
needs_subscription = True
|
needs_subscription = True
|
||||||
|
|
||||||
def get_browser(self):
|
def get_browser(self):
|
||||||
br = BasicNewsRecipe.get_browser()
|
br = BasicNewsRecipe.get_browser()
|
||||||
if self.username is not None and self.password is not None:
|
if self.username is not None and self.password is not None:
|
||||||
@ -27,7 +27,7 @@ class Chronicle(BasicNewsRecipe):
|
|||||||
br.select_form(nr=1)
|
br.select_form(nr=1)
|
||||||
br['username'] = self.username
|
br['username'] = self.username
|
||||||
br['password'] = self.password
|
br['password'] = self.password
|
||||||
br.submit()
|
br.submit()
|
||||||
return br
|
return br
|
||||||
|
|
||||||
def parse_index(self):
|
def parse_index(self):
|
||||||
@ -47,33 +47,35 @@ class Chronicle(BasicNewsRecipe):
|
|||||||
|
|
||||||
#Go to the main body
|
#Go to the main body
|
||||||
soup = self.index_to_soup(issueurl)
|
soup = self.index_to_soup(issueurl)
|
||||||
div0 = soup.find ('div', attrs={'id':'article-body'})
|
div = soup.find ('div', attrs={'id':'article-body'})
|
||||||
|
|
||||||
feeds = OrderedDict()
|
feeds = OrderedDict()
|
||||||
for div in div0.findAll('div',attrs={'class':'module1'}):
|
section_title = ''
|
||||||
section_title = self.tag_to_string(div.find('h3'))
|
for post in div.findAll('li'):
|
||||||
for post in div.findAll('li',attrs={'class':'sub-promo'}):
|
articles = []
|
||||||
articles = []
|
a=post.find('a', href=True)
|
||||||
a=post.find('a', href=True)
|
if a is not None:
|
||||||
title=self.tag_to_string(a)
|
title=self.tag_to_string(a)
|
||||||
url="http://chronicle.com"+a['href'].strip()
|
url="http://chronicle.com"+a['href'].strip()
|
||||||
|
sectiontitle=post.findPrevious('h3')
|
||||||
|
if sectiontitle is None:
|
||||||
|
sectiontitle=post.findPrevious('h4')
|
||||||
|
section_title=self.tag_to_string(sectiontitle)
|
||||||
desc=self.tag_to_string(post.find('p'))
|
desc=self.tag_to_string(post.find('p'))
|
||||||
articles.append({'title':title, 'url':url, 'description':desc, 'date':''})
|
articles.append({'title':title, 'url':url, 'description':desc, 'date':''})
|
||||||
|
|
||||||
if articles:
|
if articles:
|
||||||
if section_title not in feeds:
|
if section_title not in feeds:
|
||||||
feeds[section_title] = []
|
feeds[section_title] = []
|
||||||
feeds[section_title] += articles
|
feeds[section_title] += articles
|
||||||
ans = [(key, val) for key, val in feeds.iteritems()]
|
ans = [(key, val) for key, val in feeds.iteritems()]
|
||||||
return ans
|
return ans
|
||||||
|
|
||||||
def preprocess_html(self,soup):
|
def preprocess_html(self,soup):
|
||||||
#process all the images
|
#process all the images
|
||||||
for div in soup.findAll('div', attrs={'class':'tableauPlaceholder'}):
|
for div in soup.findAll('div', attrs={'class':'tableauPlaceholder'}):
|
||||||
|
|
||||||
noscripts=div.find('noscript').a
|
noscripts=div.find('noscript').a
|
||||||
div.replaceWith(noscripts)
|
div.replaceWith(noscripts)
|
||||||
for div0 in soup.findAll('div',text='Powered by Tableau'):
|
for div0 in soup.findAll('div',text='Powered by Tableau'):
|
||||||
div0.extract()
|
div0.extract()
|
||||||
return soup
|
return soup
|
||||||
|
|
||||||
|
@ -1,71 +1,51 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
__license__ = 'GPL v3'
|
||||||
|
__copyright__ = u'2011, Silviu Cotoar\u0103'
|
||||||
|
'''
|
||||||
|
dilemaveche.ro
|
||||||
|
'''
|
||||||
|
|
||||||
from calibre.web.feeds.news import BasicNewsRecipe
|
from calibre.web.feeds.news import BasicNewsRecipe
|
||||||
|
|
||||||
class DilemaVeche(BasicNewsRecipe):
|
class DilemaVeche(BasicNewsRecipe):
|
||||||
title = u'Dilema Veche' # apare vinerea, mai pe dupa-masa,depinde de Luiza cred (care se semneaza ca fiind creatorul fiecarui articol in feed-ul RSS)
|
title = u'Dilema Veche'
|
||||||
__author__ = 'song2' # inspirat din scriptul pentru Le Monde. Inspired from the Le Monde script
|
__author__ = u'Silviu Cotoar\u0103'
|
||||||
description = '"Sint vechi, domnule!" (I.L. Caragiale)'
|
description = 'Sint vechi, domnule! (I.L. Caragiale)'
|
||||||
publisher = 'Adevarul Holding'
|
publisher = u'Adev\u0103rul Holding'
|
||||||
oldest_article = 7
|
oldest_article = 5
|
||||||
max_articles_per_feed = 200
|
language = 'ro'
|
||||||
encoding = 'utf8'
|
max_articles_per_feed = 100
|
||||||
language = 'ro'
|
|
||||||
masthead_url = 'http://www.dilemaveche.ro/sites/all/themes/dilema/theme/dilema_two/layouter/dilema_two_homepage/logo.png'
|
|
||||||
publication_type = 'magazine'
|
|
||||||
feeds = [
|
|
||||||
('Editoriale si opinii - Situatiunea', 'http://www.dilemaveche.ro/taxonomy/term/37/0/feed'),
|
|
||||||
('Editoriale si opinii - Pe ce lume traim', 'http://www.dilemaveche.ro/taxonomy/term/38/0/feed'),
|
|
||||||
('Editoriale si opinii - Bordeie si obiceie', 'http://www.dilemaveche.ro/taxonomy/term/44/0/feed'),
|
|
||||||
('Editoriale si opinii - Talc Show', 'http://www.dilemaveche.ro/taxonomy/term/44/0/feed'),
|
|
||||||
('Tema saptamanii', 'http://www.dilemaveche.ro/taxonomy/term/19/0/feed'),
|
|
||||||
('La zi in cultura - Dilema va recomanda', 'http://www.dilemaveche.ro/taxonomy/term/58/0/feed'),
|
|
||||||
('La zi in cultura - Carte', 'http://www.dilemaveche.ro/taxonomy/term/14/0/feed'),
|
|
||||||
('La zi in cultura - Film', 'http://www.dilemaveche.ro/taxonomy/term/13/0/feed'),
|
|
||||||
('La zi in cultura - Muzica', 'http://www.dilemaveche.ro/taxonomy/term/1341/0/feed'),
|
|
||||||
('La zi in cultura - Arte performative', 'http://www.dilemaveche.ro/taxonomy/term/1342/0/feed'),
|
|
||||||
('La zi in cultura - Arte vizuale', 'http://www.dilemaveche.ro/taxonomy/term/1512/0/feed'),
|
|
||||||
('Societate - Ieri cu vedere spre azi', 'http://www.dilemaveche.ro/taxonomy/term/15/0/feed'),
|
|
||||||
('Societate - Din polul opus', 'http://www.dilemaveche.ro/taxonomy/term/41/0/feed'),
|
|
||||||
('Societate - Mass comedia', 'http://www.dilemaveche.ro/taxonomy/term/43/0/feed'),
|
|
||||||
('Societate - La singular si la plural', 'http://www.dilemaveche.ro/taxonomy/term/42/0/feed'),
|
|
||||||
('Oameni si idei - Educatie', 'http://www.dilemaveche.ro/taxonomy/term/46/0/feed'),
|
|
||||||
('Oameni si idei - Polemici si dezbateri', 'http://www.dilemaveche.ro/taxonomy/term/48/0/feed'),
|
|
||||||
('Oameni si idei - Stiinta si tehnologie', 'http://www.dilemaveche.ro/taxonomy/term/46/0/feed'),
|
|
||||||
('Dileme on-line', 'http://www.dilemaveche.ro/taxonomy/term/005/0/feed')
|
|
||||||
]
|
|
||||||
remove_tags_before = dict(name='div',attrs={'class':'spacer_10'})
|
|
||||||
remove_tags = [
|
|
||||||
dict(name='div', attrs={'class':'art_related_left'}),
|
|
||||||
dict(name='div', attrs={'class':'controale'}),
|
|
||||||
dict(name='div', attrs={'class':'simple_overlay'}),
|
|
||||||
]
|
|
||||||
remove_tags_after = [dict(id='facebookLike')]
|
|
||||||
remove_javascript = True
|
|
||||||
no_stylesheets = True
|
no_stylesheets = True
|
||||||
remove_empty_feeds = True
|
use_embedded_content = False
|
||||||
extra_css = """
|
category = 'Ziare'
|
||||||
body{font-family: Georgia,Times,serif }
|
encoding = 'utf-8'
|
||||||
img{margin-bottom: 0.4em; display:block}
|
cover_url = 'http://dilemaveche.ro/sites/all/themes/dilema/theme/dilema_two/layouter/dilema_two_homepage/logo.png'
|
||||||
"""
|
|
||||||
def get_cover_url(self):
|
|
||||||
cover_url = None
|
|
||||||
soup = self.index_to_soup('http://dilemaveche.ro')
|
|
||||||
link_item = soup.find('div',attrs={'class':'box_dr_pdf_picture'})
|
|
||||||
if link_item and link_item.a:
|
|
||||||
cover_url = link_item.a['href']
|
|
||||||
br = BasicNewsRecipe.get_browser()
|
|
||||||
try:
|
|
||||||
br.open(cover_url)
|
|
||||||
except: #daca nu gaseste pdf-ul
|
|
||||||
self.log("\nPDF indisponibil")
|
|
||||||
link_item = soup.find('div',attrs={'class':'box_dr_pdf_picture'})
|
|
||||||
if link_item and link_item.img:
|
|
||||||
cover_url = link_item.img['src']
|
|
||||||
br = BasicNewsRecipe.get_browser()
|
|
||||||
try:
|
|
||||||
br.open(cover_url)
|
|
||||||
except: #daca nu gaseste nici imaginea mica mica
|
|
||||||
print('Mama lor de nenorociti! nu este nici pdf nici imagine')
|
|
||||||
cover_url ='http://www.dilemaveche.ro/sites/all/themes/dilema/theme/dilema_two/layouter/dilema_two_homepage/logo.png'
|
|
||||||
return cover_url
|
|
||||||
cover_margins = (10, 15, '#ffffff')
|
|
||||||
|
|
||||||
|
conversion_options = {
|
||||||
|
'comments' : description
|
||||||
|
,'tags' : category
|
||||||
|
,'language' : language
|
||||||
|
,'publisher' : publisher
|
||||||
|
}
|
||||||
|
|
||||||
|
keep_only_tags = [
|
||||||
|
dict(name='div', attrs={'class':'c_left_column'})
|
||||||
|
]
|
||||||
|
|
||||||
|
remove_tags = [
|
||||||
|
dict(name='div', attrs={'id':['adshop_widget_428x60']}) ,
|
||||||
|
dict(name='div', attrs={'id':['gallery']})
|
||||||
|
]
|
||||||
|
|
||||||
|
remove_tags_after = [
|
||||||
|
dict(name='div', attrs={'id':['adshop_widget_428x60']})
|
||||||
|
]
|
||||||
|
|
||||||
|
feeds = [
|
||||||
|
(u'Feeds', u'http://dilemaveche.ro/rss.xml')
|
||||||
|
]
|
||||||
|
|
||||||
|
def preprocess_html(self, soup):
|
||||||
|
return self.adeify_images(soup)
|
||||||
|
@ -10,7 +10,7 @@ from calibre import strftime
|
|||||||
from calibre.web.feeds.news import BasicNewsRecipe
|
from calibre.web.feeds.news import BasicNewsRecipe
|
||||||
|
|
||||||
class FinancialTimes(BasicNewsRecipe):
|
class FinancialTimes(BasicNewsRecipe):
|
||||||
title = 'Financial Times - UK printed edition'
|
title = 'Financial Times (UK)'
|
||||||
__author__ = 'Darko Miletic'
|
__author__ = 'Darko Miletic'
|
||||||
description = "The Financial Times (FT) is one of the world's leading business news and information organisations, recognised internationally for its authority, integrity and accuracy."
|
description = "The Financial Times (FT) is one of the world's leading business news and information organisations, recognised internationally for its authority, integrity and accuracy."
|
||||||
publisher = 'The Financial Times Ltd.'
|
publisher = 'The Financial Times Ltd.'
|
||||||
@ -101,17 +101,19 @@ class FinancialTimes(BasicNewsRecipe):
|
|||||||
def parse_index(self):
|
def parse_index(self):
|
||||||
feeds = []
|
feeds = []
|
||||||
soup = self.index_to_soup(self.INDEX)
|
soup = self.index_to_soup(self.INDEX)
|
||||||
|
dates= self.tag_to_string(soup.find('div', attrs={'class':'btm-links'}).find('div'))
|
||||||
|
self.timefmt = ' [%s]'%dates
|
||||||
wide = soup.find('div',attrs={'class':'wide'})
|
wide = soup.find('div',attrs={'class':'wide'})
|
||||||
if not wide:
|
if not wide:
|
||||||
return feeds
|
return feeds
|
||||||
strest = wide.findAll('h3', attrs={'class':'section'})
|
strest = wide.findAll('h3', attrs={'class':'section'})
|
||||||
if not strest:
|
if not strest:
|
||||||
return feeds
|
return feeds
|
||||||
st = wide.find('h4',attrs={'class':'section-no-arrow'})
|
st = wide.findAll('h4',attrs={'class':'section-no-arrow'})
|
||||||
if st:
|
if st:
|
||||||
strest.insert(0,st)
|
st.extend(strest)
|
||||||
count = 0
|
count = 0
|
||||||
for item in strest:
|
for item in st:
|
||||||
count = count + 1
|
count = count + 1
|
||||||
if self.test and count > 2:
|
if self.test and count > 2:
|
||||||
return feeds
|
return feeds
|
||||||
@ -151,7 +153,7 @@ class FinancialTimes(BasicNewsRecipe):
|
|||||||
def get_cover_url(self):
|
def get_cover_url(self):
|
||||||
cdate = datetime.date.today()
|
cdate = datetime.date.today()
|
||||||
if cdate.isoweekday() == 7:
|
if cdate.isoweekday() == 7:
|
||||||
cdate -= datetime.timedelta(days=1)
|
cdate -= datetime.timedelta(days=1)
|
||||||
return cdate.strftime('http://specials.ft.com/vtf_pdf/%d%m%y_FRONT1_LON.pdf')
|
return cdate.strftime('http://specials.ft.com/vtf_pdf/%d%m%y_FRONT1_LON.pdf')
|
||||||
|
|
||||||
def get_obfuscated_article(self, url):
|
def get_obfuscated_article(self, url):
|
||||||
@ -163,9 +165,8 @@ class FinancialTimes(BasicNewsRecipe):
|
|||||||
count = 10
|
count = 10
|
||||||
except:
|
except:
|
||||||
print "Retrying download..."
|
print "Retrying download..."
|
||||||
count += 1
|
count += 1
|
||||||
self.temp_files.append(PersistentTemporaryFile('_fa.html'))
|
self.temp_files.append(PersistentTemporaryFile('_fa.html'))
|
||||||
self.temp_files[-1].write(html)
|
self.temp_files[-1].write(html)
|
||||||
self.temp_files[-1].close()
|
self.temp_files[-1].close()
|
||||||
return self.temp_files[-1].name
|
return self.temp_files[-1].name
|
||||||
|
|
@ -7,18 +7,19 @@ class HoustonChronicle(BasicNewsRecipe):
|
|||||||
|
|
||||||
title = u'The Houston Chronicle'
|
title = u'The Houston Chronicle'
|
||||||
description = 'News from Houston, Texas'
|
description = 'News from Houston, Texas'
|
||||||
__author__ = 'Kovid Goyal'
|
__author__ = 'Kovid Goyal'
|
||||||
language = 'en'
|
language = 'en'
|
||||||
timefmt = ' [%a, %d %b, %Y]'
|
timefmt = ' [%a, %d %b, %Y]'
|
||||||
no_stylesheets = True
|
no_stylesheets = True
|
||||||
use_embedded_content = False
|
use_embedded_content = False
|
||||||
remove_attributes = ['style']
|
remove_attributes = ['style']
|
||||||
|
auto_cleanup = True
|
||||||
|
|
||||||
oldest_article = 2.0
|
oldest_article = 2.0
|
||||||
|
|
||||||
keep_only_tags = {'class':lambda x: x and ('hst-articletitle' in x or
|
#keep_only_tags = {'class':lambda x: x and ('hst-articletitle' in x or
|
||||||
'hst-articletext' in x or 'hst-galleryitem' in x)}
|
#'hst-articletext' in x or 'hst-galleryitem' in x)}
|
||||||
remove_attributes = ['xmlns']
|
#remove_attributes = ['xmlns']
|
||||||
|
|
||||||
feeds = [
|
feeds = [
|
||||||
('News', "http://www.chron.com/rss/feed/News-270.php"),
|
('News', "http://www.chron.com/rss/feed/News-270.php"),
|
||||||
@ -37,3 +38,4 @@ class HoustonChronicle(BasicNewsRecipe):
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -39,10 +39,10 @@ class SCMP(BasicNewsRecipe):
|
|||||||
#br.set_debug_responses(True)
|
#br.set_debug_responses(True)
|
||||||
#br.set_debug_redirects(True)
|
#br.set_debug_redirects(True)
|
||||||
if self.username is not None and self.password is not None:
|
if self.username is not None and self.password is not None:
|
||||||
br.open('http://www.scmp.com/portal/site/SCMP/')
|
br.open('http://www.scmp.com/')
|
||||||
br.select_form(name='loginForm')
|
br.select_form(nr=1)
|
||||||
br['Login' ] = self.username
|
br['name'] = self.username
|
||||||
br['Password'] = self.password
|
br['pass'] = self.password
|
||||||
br.submit()
|
br.submit()
|
||||||
return br
|
return br
|
||||||
|
|
||||||
|
@ -36,12 +36,14 @@ class TimesNewRoman(BasicNewsRecipe):
|
|||||||
|
|
||||||
remove_tags = [
|
remove_tags = [
|
||||||
dict(name='p', attrs={'class':['articleinfo']})
|
dict(name='p', attrs={'class':['articleinfo']})
|
||||||
, dict(name='div',attrs={'class':['vergefacebooklike']})
|
, dict(name='div', attrs={'class':['shareTools']})
|
||||||
, dict(name='div', attrs={'class':'cleared'})
|
, dict(name='div', attrs={'class':'fb_iframe_widget'})
|
||||||
|
, dict(name='div', attrs={'id':'jc'})
|
||||||
]
|
]
|
||||||
|
|
||||||
remove_tags_after = [
|
remove_tags_after = [
|
||||||
dict(name='div', attrs={'class':'cleared'})
|
dict(name='div', attrs={'class':'fb_iframe_widget'}),
|
||||||
|
dict(name='div', attrs={'id':'jc'})
|
||||||
]
|
]
|
||||||
|
|
||||||
feeds = [
|
feeds = [
|
||||||
|
Binary file not shown.
@ -13,6 +13,8 @@ let g:syntastic_cpp_include_dirs = [
|
|||||||
\]
|
\]
|
||||||
let g:syntastic_c_include_dirs = g:syntastic_cpp_include_dirs
|
let g:syntastic_c_include_dirs = g:syntastic_cpp_include_dirs
|
||||||
|
|
||||||
|
set wildignore+=resources/viewer/mathjax/**
|
||||||
|
|
||||||
fun! CalibreLog()
|
fun! CalibreLog()
|
||||||
" Setup buffers to edit the calibre changelog and version info prior to
|
" Setup buffers to edit the calibre changelog and version info prior to
|
||||||
" making a release.
|
" making a release.
|
||||||
|
@ -139,6 +139,7 @@ extensions = [
|
|||||||
Extension('podofo',
|
Extension('podofo',
|
||||||
[
|
[
|
||||||
'calibre/utils/podofo/utils.cpp',
|
'calibre/utils/podofo/utils.cpp',
|
||||||
|
'calibre/utils/podofo/output.cpp',
|
||||||
'calibre/utils/podofo/doc.cpp',
|
'calibre/utils/podofo/doc.cpp',
|
||||||
'calibre/utils/podofo/outline.cpp',
|
'calibre/utils/podofo/outline.cpp',
|
||||||
'calibre/utils/podofo/podofo.cpp',
|
'calibre/utils/podofo/podofo.cpp',
|
||||||
@ -186,7 +187,7 @@ if iswindows:
|
|||||||
headers=[
|
headers=[
|
||||||
'calibre/devices/mtp/windows/global.h',
|
'calibre/devices/mtp/windows/global.h',
|
||||||
],
|
],
|
||||||
libraries=['ole32', 'portabledeviceguids', 'user32'],
|
libraries=['ole32', 'oleaut32', 'portabledeviceguids', 'user32'],
|
||||||
# needs_ddk=True,
|
# needs_ddk=True,
|
||||||
cflags=['/X']
|
cflags=['/X']
|
||||||
),
|
),
|
||||||
|
@ -15,7 +15,8 @@ from setup import Command, modules, basenames, functions, __version__, \
|
|||||||
SITE_PACKAGES = ['PIL', 'dateutil', 'dns', 'PyQt4', 'mechanize',
|
SITE_PACKAGES = ['PIL', 'dateutil', 'dns', 'PyQt4', 'mechanize',
|
||||||
'sip.so', 'BeautifulSoup.py', 'cssutils', 'encutils', 'lxml',
|
'sip.so', 'BeautifulSoup.py', 'cssutils', 'encutils', 'lxml',
|
||||||
'sipconfig.py', 'xdg', 'dbus', '_dbus_bindings.so', 'dbus_bindings.py',
|
'sipconfig.py', 'xdg', 'dbus', '_dbus_bindings.so', 'dbus_bindings.py',
|
||||||
'_dbus_glib_bindings.so', 'netifaces.so']
|
'_dbus_glib_bindings.so', 'netifaces.so', '_psutil_posix.so',
|
||||||
|
'_psutil_linux.so', 'psutil']
|
||||||
|
|
||||||
QTDIR = '/usr/lib/qt4'
|
QTDIR = '/usr/lib/qt4'
|
||||||
QTDLLS = ('QtCore', 'QtGui', 'QtNetwork', 'QtSvg', 'QtXml', 'QtWebKit', 'QtDBus')
|
QTDLLS = ('QtCore', 'QtGui', 'QtNetwork', 'QtSvg', 'QtXml', 'QtWebKit', 'QtDBus')
|
||||||
|
@ -360,6 +360,15 @@ Run
|
|||||||
python setup.py build
|
python setup.py build
|
||||||
cp build/lib.win32-2.7/netifaces.pyd /cygdrive/c/Python27/Lib/site-packages/
|
cp build/lib.win32-2.7/netifaces.pyd /cygdrive/c/Python27/Lib/site-packages/
|
||||||
|
|
||||||
|
psutil
|
||||||
|
--------
|
||||||
|
|
||||||
|
Download the source tarball
|
||||||
|
|
||||||
|
Run
|
||||||
|
|
||||||
|
Python setup.py build
|
||||||
|
cp -r build/lib.win32-*/* /cygdrive/c/Python27/Lib/site-packages/
|
||||||
|
|
||||||
calibre
|
calibre
|
||||||
---------
|
---------
|
||||||
|
@ -152,7 +152,7 @@ class Translations(POT): # {{{
|
|||||||
subprocess.check_call(['msgfmt', '-o', dest, iso639])
|
subprocess.check_call(['msgfmt', '-o', dest, iso639])
|
||||||
elif locale not in ('en_GB', 'en_CA', 'en_AU', 'si', 'ur', 'sc',
|
elif locale not in ('en_GB', 'en_CA', 'en_AU', 'si', 'ur', 'sc',
|
||||||
'ltg', 'nds', 'te', 'yi', 'fo', 'sq', 'ast', 'ml', 'ku',
|
'ltg', 'nds', 'te', 'yi', 'fo', 'sq', 'ast', 'ml', 'ku',
|
||||||
'fr_CA', 'him'):
|
'fr_CA', 'him', 'jv', 'ka'):
|
||||||
self.warn('No ISO 639 translations for locale:', locale)
|
self.warn('No ISO 639 translations for locale:', locale)
|
||||||
|
|
||||||
self.write_stats()
|
self.write_stats()
|
||||||
|
@ -47,6 +47,21 @@ def installer_description(fname):
|
|||||||
return 'Calibre Portable'
|
return 'Calibre Portable'
|
||||||
return 'Unknown file'
|
return 'Unknown file'
|
||||||
|
|
||||||
|
def upload_signatures():
|
||||||
|
tdir = mkdtemp()
|
||||||
|
for installer in installers():
|
||||||
|
if not os.path.exists(installer):
|
||||||
|
continue
|
||||||
|
with open(installer, 'rb') as f:
|
||||||
|
raw = f.read()
|
||||||
|
fingerprint = hashlib.sha512(raw).hexdigest()
|
||||||
|
fname = os.path.basename(installer+'.sha512')
|
||||||
|
with open(os.path.join(tdir, fname), 'wb') as f:
|
||||||
|
f.write(fingerprint)
|
||||||
|
check_call('scp %s/*.sha512 divok:%s/signatures/' % (tdir, DOWNLOADS),
|
||||||
|
shell=True)
|
||||||
|
shutil.rmtree(tdir)
|
||||||
|
|
||||||
class ReUpload(Command): # {{{
|
class ReUpload(Command): # {{{
|
||||||
|
|
||||||
description = 'Re-uplaod any installers present in dist/'
|
description = 'Re-uplaod any installers present in dist/'
|
||||||
@ -57,6 +72,7 @@ class ReUpload(Command): # {{{
|
|||||||
opts.replace = True
|
opts.replace = True
|
||||||
|
|
||||||
def run(self, opts):
|
def run(self, opts):
|
||||||
|
upload_signatures()
|
||||||
for x in installers():
|
for x in installers():
|
||||||
if os.path.exists(x):
|
if os.path.exists(x):
|
||||||
os.remove(x)
|
os.remove(x)
|
||||||
@ -223,19 +239,7 @@ class UploadToServer(Command): # {{{
|
|||||||
%(__version__, DOWNLOADS), shell=True)
|
%(__version__, DOWNLOADS), shell=True)
|
||||||
check_call('ssh divok /etc/init.d/apache2 graceful',
|
check_call('ssh divok /etc/init.d/apache2 graceful',
|
||||||
shell=True)
|
shell=True)
|
||||||
tdir = mkdtemp()
|
upload_signatures()
|
||||||
for installer in installers():
|
|
||||||
if not os.path.exists(installer):
|
|
||||||
continue
|
|
||||||
with open(installer, 'rb') as f:
|
|
||||||
raw = f.read()
|
|
||||||
fingerprint = hashlib.sha512(raw).hexdigest()
|
|
||||||
fname = os.path.basename(installer+'.sha512')
|
|
||||||
with open(os.path.join(tdir, fname), 'wb') as f:
|
|
||||||
f.write(fingerprint)
|
|
||||||
check_call('scp %s/*.sha512 divok:%s/signatures/' % (tdir, DOWNLOADS),
|
|
||||||
shell=True)
|
|
||||||
shutil.rmtree(tdir)
|
|
||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
# Testing {{{
|
# Testing {{{
|
||||||
|
@ -4,7 +4,7 @@ __license__ = 'GPL v3'
|
|||||||
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
|
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
|
||||||
__docformat__ = 'restructuredtext en'
|
__docformat__ = 'restructuredtext en'
|
||||||
__appname__ = u'calibre'
|
__appname__ = u'calibre'
|
||||||
numeric_version = (0, 8, 66)
|
numeric_version = (0, 8, 67)
|
||||||
__version__ = u'.'.join(map(unicode, numeric_version))
|
__version__ = u'.'.join(map(unicode, numeric_version))
|
||||||
__author__ = u"Kovid Goyal <kovid@kovidgoyal.net>"
|
__author__ = u"Kovid Goyal <kovid@kovidgoyal.net>"
|
||||||
|
|
||||||
|
@ -675,7 +675,6 @@ from calibre.devices.bambook.driver import BAMBOOK
|
|||||||
from calibre.devices.boeye.driver import BOEYE_BEX, BOEYE_BDX
|
from calibre.devices.boeye.driver import BOEYE_BEX, BOEYE_BDX
|
||||||
from calibre.devices.smart_device_app.driver import SMART_DEVICE_APP
|
from calibre.devices.smart_device_app.driver import SMART_DEVICE_APP
|
||||||
|
|
||||||
|
|
||||||
# Order here matters. The first matched device is the one used.
|
# Order here matters. The first matched device is the one used.
|
||||||
plugins += [
|
plugins += [
|
||||||
HANLINV3,
|
HANLINV3,
|
||||||
@ -749,6 +748,12 @@ plugins += [
|
|||||||
SMART_DEVICE_APP,
|
SMART_DEVICE_APP,
|
||||||
USER_DEFINED,
|
USER_DEFINED,
|
||||||
]
|
]
|
||||||
|
|
||||||
|
from calibre.utils.config_base import tweaks
|
||||||
|
if tweaks.get('test_mtp_driver', False):
|
||||||
|
from calibre.devices.mtp.driver import MTP_DEVICE
|
||||||
|
plugins.append(MTP_DEVICE)
|
||||||
|
|
||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
# New metadata download plugins {{{
|
# New metadata download plugins {{{
|
||||||
|
@ -115,54 +115,65 @@ def debug(ioreg_to_tmp=False, buf=None, plugins=None):
|
|||||||
out('Available plugins:', textwrap.fill(' '.join([x.__class__.__name__ for x in
|
out('Available plugins:', textwrap.fill(' '.join([x.__class__.__name__ for x in
|
||||||
devplugins])))
|
devplugins])))
|
||||||
out(' ')
|
out(' ')
|
||||||
out('Looking for devices...')
|
found_dev = False
|
||||||
for dev in devplugins:
|
for dev in devplugins:
|
||||||
connected, det = s.is_device_connected(dev, debug=True)
|
if not dev.MANAGES_DEVICE_PRESENCE: continue
|
||||||
if connected:
|
out('Looking for devices of type:', dev.__class__.__name__)
|
||||||
out('\t\tDetected possible device', dev.__class__.__name__)
|
if dev.debug_managed_device_detection(s.devices, buf):
|
||||||
connected_devices.append((dev, det))
|
found_dev = True
|
||||||
|
break
|
||||||
out(' ')
|
|
||||||
errors = {}
|
|
||||||
success = False
|
|
||||||
out('Devices possibly connected:', end=' ')
|
|
||||||
for dev, det in connected_devices:
|
|
||||||
out(dev.name, end=', ')
|
|
||||||
if not connected_devices:
|
|
||||||
out('None', end='')
|
|
||||||
out(' ')
|
|
||||||
for dev, det in connected_devices:
|
|
||||||
out('Trying to open', dev.name, '...', end=' ')
|
|
||||||
try:
|
|
||||||
dev.reset(detected_device=det)
|
|
||||||
dev.open(det, None)
|
|
||||||
out('OK')
|
|
||||||
except:
|
|
||||||
import traceback
|
|
||||||
errors[dev] = traceback.format_exc()
|
|
||||||
out('failed')
|
|
||||||
continue
|
|
||||||
success = True
|
|
||||||
if hasattr(dev, '_main_prefix'):
|
|
||||||
out('Main memory:', repr(dev._main_prefix))
|
|
||||||
out('Total space:', dev.total_space())
|
|
||||||
break
|
|
||||||
if not success and errors:
|
|
||||||
out('Opening of the following devices failed')
|
|
||||||
for dev,msg in errors.items():
|
|
||||||
out(dev)
|
|
||||||
out(msg)
|
|
||||||
out(' ')
|
|
||||||
|
|
||||||
if ioreg is not None:
|
|
||||||
ioreg = 'IOREG Output\n'+ioreg
|
|
||||||
out(' ')
|
out(' ')
|
||||||
if ioreg_to_tmp:
|
|
||||||
open('/tmp/ioreg.txt', 'wb').write(ioreg)
|
if not found_dev:
|
||||||
out('Dont forget to send the contents of /tmp/ioreg.txt')
|
out('Looking for devices...')
|
||||||
out('You can open it with the command: open /tmp/ioreg.txt')
|
for dev in devplugins:
|
||||||
else:
|
if dev.MANAGES_DEVICE_PRESENCE: continue
|
||||||
out(ioreg)
|
connected, det = s.is_device_connected(dev, debug=True)
|
||||||
|
if connected:
|
||||||
|
out('\t\tDetected possible device', dev.__class__.__name__)
|
||||||
|
connected_devices.append((dev, det))
|
||||||
|
|
||||||
|
out(' ')
|
||||||
|
errors = {}
|
||||||
|
success = False
|
||||||
|
out('Devices possibly connected:', end=' ')
|
||||||
|
for dev, det in connected_devices:
|
||||||
|
out(dev.name, end=', ')
|
||||||
|
if not connected_devices:
|
||||||
|
out('None', end='')
|
||||||
|
out(' ')
|
||||||
|
for dev, det in connected_devices:
|
||||||
|
out('Trying to open', dev.name, '...', end=' ')
|
||||||
|
try:
|
||||||
|
dev.reset(detected_device=det)
|
||||||
|
dev.open(det, None)
|
||||||
|
out('OK')
|
||||||
|
except:
|
||||||
|
import traceback
|
||||||
|
errors[dev] = traceback.format_exc()
|
||||||
|
out('failed')
|
||||||
|
continue
|
||||||
|
success = True
|
||||||
|
if hasattr(dev, '_main_prefix'):
|
||||||
|
out('Main memory:', repr(dev._main_prefix))
|
||||||
|
out('Total space:', dev.total_space())
|
||||||
|
break
|
||||||
|
if not success and errors:
|
||||||
|
out('Opening of the following devices failed')
|
||||||
|
for dev,msg in errors.items():
|
||||||
|
out(dev)
|
||||||
|
out(msg)
|
||||||
|
out(' ')
|
||||||
|
|
||||||
|
if ioreg is not None:
|
||||||
|
ioreg = 'IOREG Output\n'+ioreg
|
||||||
|
out(' ')
|
||||||
|
if ioreg_to_tmp:
|
||||||
|
open('/tmp/ioreg.txt', 'wb').write(ioreg)
|
||||||
|
out('Dont forget to send the contents of /tmp/ioreg.txt')
|
||||||
|
out('You can open it with the command: open /tmp/ioreg.txt')
|
||||||
|
else:
|
||||||
|
out(ioreg)
|
||||||
|
|
||||||
if hasattr(buf, 'getvalue'):
|
if hasattr(buf, 'getvalue'):
|
||||||
return buf.getvalue().decode('utf-8')
|
return buf.getvalue().decode('utf-8')
|
||||||
|
@ -186,10 +186,15 @@ class ANDROID(USBMS):
|
|||||||
}
|
}
|
||||||
EBOOK_DIR_MAIN = ['eBooks/import', 'wordplayer/calibretransfer', 'Books',
|
EBOOK_DIR_MAIN = ['eBooks/import', 'wordplayer/calibretransfer', 'Books',
|
||||||
'sdcard/ebooks']
|
'sdcard/ebooks']
|
||||||
EXTRA_CUSTOMIZATION_MESSAGE = _('Comma separated list of directories to '
|
EXTRA_CUSTOMIZATION_MESSAGE = [_('Comma separated list of directories to '
|
||||||
'send e-books to on the device. The first one that exists will '
|
'send e-books to on the device\'s <b>main memory</b>. The first one that exists will '
|
||||||
|
'be used'),
|
||||||
|
_('Comma separated list of directories to '
|
||||||
|
'send e-books to on the device\'s <b>storage cards</b>. The first one that exists will '
|
||||||
'be used')
|
'be used')
|
||||||
EXTRA_CUSTOMIZATION_DEFAULT = ', '.join(EBOOK_DIR_MAIN)
|
]
|
||||||
|
|
||||||
|
EXTRA_CUSTOMIZATION_DEFAULT = [', '.join(EBOOK_DIR_MAIN), '']
|
||||||
|
|
||||||
VENDOR_NAME = ['HTC', 'MOTOROLA', 'GOOGLE_', 'ANDROID', 'ACER',
|
VENDOR_NAME = ['HTC', 'MOTOROLA', 'GOOGLE_', 'ANDROID', 'ACER',
|
||||||
'GT-I5700', 'SAMSUNG', 'DELL', 'LINUX', 'GOOGLE', 'ARCHOS',
|
'GT-I5700', 'SAMSUNG', 'DELL', 'LINUX', 'GOOGLE', 'ARCHOS',
|
||||||
@ -197,7 +202,8 @@ class ANDROID(USBMS):
|
|||||||
'GENERIC-', 'ZTE', 'MID', 'QUALCOMM', 'PANDIGIT', 'HYSTON',
|
'GENERIC-', 'ZTE', 'MID', 'QUALCOMM', 'PANDIGIT', 'HYSTON',
|
||||||
'VIZIO', 'GOOGLE', 'FREESCAL', 'KOBO_INC', 'LENOVO', 'ROCKCHIP',
|
'VIZIO', 'GOOGLE', 'FREESCAL', 'KOBO_INC', 'LENOVO', 'ROCKCHIP',
|
||||||
'POCKET', 'ONDA_MID', 'ZENITHIN', 'INGENIC', 'PMID701C', 'PD',
|
'POCKET', 'ONDA_MID', 'ZENITHIN', 'INGENIC', 'PMID701C', 'PD',
|
||||||
'PMP5097C', 'MASS', 'NOVO7', 'ZEKI', 'COBY', 'SXZ', 'USB_2.0']
|
'PMP5097C', 'MASS', 'NOVO7', 'ZEKI', 'COBY', 'SXZ', 'USB_2.0',
|
||||||
|
'COBY_MID']
|
||||||
WINDOWS_MAIN_MEM = ['ANDROID_PHONE', 'A855', 'A853', 'INC.NEXUS_ONE',
|
WINDOWS_MAIN_MEM = ['ANDROID_PHONE', 'A855', 'A853', 'INC.NEXUS_ONE',
|
||||||
'__UMS_COMPOSITE', '_MB200', 'MASS_STORAGE', '_-_CARD', 'SGH-I897',
|
'__UMS_COMPOSITE', '_MB200', 'MASS_STORAGE', '_-_CARD', 'SGH-I897',
|
||||||
'GT-I9000', 'FILE-STOR_GADGET', 'SGH-T959_CARD', 'SGH-T959', 'SAMSUNG_ANDROID',
|
'GT-I9000', 'FILE-STOR_GADGET', 'SGH-T959_CARD', 'SGH-T959', 'SAMSUNG_ANDROID',
|
||||||
@ -216,7 +222,7 @@ class ANDROID(USBMS):
|
|||||||
'GT-S5830L_CARD', 'UNIVERSE', 'XT875', 'PRO', '.KOBO_VOX',
|
'GT-S5830L_CARD', 'UNIVERSE', 'XT875', 'PRO', '.KOBO_VOX',
|
||||||
'THINKPAD_TABLET', 'SGH-T989', 'YP-G70', 'STORAGE_DEVICE',
|
'THINKPAD_TABLET', 'SGH-T989', 'YP-G70', 'STORAGE_DEVICE',
|
||||||
'ADVANCED', 'SGH-I727', 'USB_FLASH_DRIVER', 'ANDROID',
|
'ADVANCED', 'SGH-I727', 'USB_FLASH_DRIVER', 'ANDROID',
|
||||||
'S5830I_CARD', 'MID7042', 'LINK-CREATE']
|
'S5830I_CARD', 'MID7042', 'LINK-CREATE', '7035']
|
||||||
WINDOWS_CARD_A_MEM = ['ANDROID_PHONE', 'GT-I9000_CARD', 'SGH-I897',
|
WINDOWS_CARD_A_MEM = ['ANDROID_PHONE', 'GT-I9000_CARD', 'SGH-I897',
|
||||||
'FILE-STOR_GADGET', 'SGH-T959_CARD', 'SGH-T959', 'SAMSUNG_ANDROID', 'GT-P1000_CARD',
|
'FILE-STOR_GADGET', 'SGH-T959_CARD', 'SGH-T959', 'SAMSUNG_ANDROID', 'GT-P1000_CARD',
|
||||||
'A70S', 'A101IT', '7', 'INCREDIBLE', 'A7EB', 'SGH-T849_CARD',
|
'A70S', 'A101IT', '7', 'INCREDIBLE', 'A7EB', 'SGH-T849_CARD',
|
||||||
@ -226,7 +232,7 @@ class ANDROID(USBMS):
|
|||||||
'USB_2.0_DRIVER', 'I9100T', 'P999DW_SD_CARD', 'KTABLET_PC',
|
'USB_2.0_DRIVER', 'I9100T', 'P999DW_SD_CARD', 'KTABLET_PC',
|
||||||
'FILE-CD_GADGET', 'GT-I9001_CARD', 'USB_2.0', 'XT875',
|
'FILE-CD_GADGET', 'GT-I9001_CARD', 'USB_2.0', 'XT875',
|
||||||
'UMS_COMPOSITE', 'PRO', '.KOBO_VOX', 'SGH-T989_CARD', 'SGH-I727',
|
'UMS_COMPOSITE', 'PRO', '.KOBO_VOX', 'SGH-T989_CARD', 'SGH-I727',
|
||||||
'USB_FLASH_DRIVER', 'ANDROID', 'MID7042']
|
'USB_FLASH_DRIVER', 'ANDROID', 'MID7042', '7035']
|
||||||
|
|
||||||
OSX_MAIN_MEM = 'Android Device Main Memory'
|
OSX_MAIN_MEM = 'Android Device Main Memory'
|
||||||
|
|
||||||
@ -236,23 +242,35 @@ class ANDROID(USBMS):
|
|||||||
|
|
||||||
def post_open_callback(self):
|
def post_open_callback(self):
|
||||||
opts = self.settings()
|
opts = self.settings()
|
||||||
dirs = opts.extra_customization
|
opts = opts.extra_customization
|
||||||
if not dirs:
|
if not opts:
|
||||||
dirs = self.EBOOK_DIR_MAIN
|
opts = [self.EBOOK_DIR_MAIN, '']
|
||||||
else:
|
|
||||||
dirs = [x.strip() for x in dirs.split(',')]
|
def strtolist(x):
|
||||||
self.EBOOK_DIR_MAIN = dirs
|
if isinstance(x, basestring):
|
||||||
|
x = [y.strip() for y in x.split(',')]
|
||||||
|
return x or []
|
||||||
|
|
||||||
|
opts = [strtolist(x) for x in opts]
|
||||||
|
self._android_main_ebook_dir = opts[0]
|
||||||
|
self._android_card_ebook_dir = opts[1]
|
||||||
|
|
||||||
def get_main_ebook_dir(self, for_upload=False):
|
def get_main_ebook_dir(self, for_upload=False):
|
||||||
dirs = self.EBOOK_DIR_MAIN
|
dirs = self._android_main_ebook_dir
|
||||||
if not for_upload:
|
if not for_upload:
|
||||||
def aldiko_tweak(x):
|
def aldiko_tweak(x):
|
||||||
return 'eBooks' if x == 'eBooks/import' else x
|
return 'eBooks' if x == 'eBooks/import' else x
|
||||||
if isinstance(dirs, basestring):
|
|
||||||
dirs = [dirs]
|
|
||||||
dirs = list(map(aldiko_tweak, dirs))
|
dirs = list(map(aldiko_tweak, dirs))
|
||||||
return dirs
|
return dirs
|
||||||
|
|
||||||
|
def get_carda_ebook_dir(self, for_upload=False):
|
||||||
|
if not for_upload:
|
||||||
|
return ''
|
||||||
|
return self._android_card_ebook_dir
|
||||||
|
|
||||||
|
def get_cardb_ebook_dir(self, for_upload=False):
|
||||||
|
return self.get_carda_ebook_dir()
|
||||||
|
|
||||||
def windows_sort_drives(self, drives):
|
def windows_sort_drives(self, drives):
|
||||||
try:
|
try:
|
||||||
vid, pid, bcd = self.device_being_opened[:3]
|
vid, pid, bcd = self.device_being_opened[:3]
|
||||||
@ -270,7 +288,8 @@ class ANDROID(USBMS):
|
|||||||
proxy = cls._configProxy()
|
proxy = cls._configProxy()
|
||||||
proxy['format_map'] = ['mobi', 'azw', 'azw1', 'azw4', 'pdf']
|
proxy['format_map'] = ['mobi', 'azw', 'azw1', 'azw4', 'pdf']
|
||||||
proxy['use_subdirs'] = False
|
proxy['use_subdirs'] = False
|
||||||
proxy['extra_customization'] = ','.join(['kindle']+cls.EBOOK_DIR_MAIN)
|
proxy['extra_customization'] = [
|
||||||
|
','.join(['kindle']+cls.EBOOK_DIR_MAIN), '']
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def configure_for_generic_epub_app(cls):
|
def configure_for_generic_epub_app(cls):
|
||||||
|
@ -9,7 +9,7 @@ For usage information run the script.
|
|||||||
import StringIO, sys, time, os
|
import StringIO, sys, time, os
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
from calibre import __version__, __appname__
|
from calibre import __version__, __appname__, human_readable
|
||||||
from calibre.devices.errors import PathError
|
from calibre.devices.errors import PathError
|
||||||
from calibre.utils.terminfo import TerminalController
|
from calibre.utils.terminfo import TerminalController
|
||||||
from calibre.devices.errors import ArgumentError, DeviceError, DeviceLocked
|
from calibre.devices.errors import ArgumentError, DeviceError, DeviceLocked
|
||||||
@ -18,16 +18,6 @@ from calibre.devices.scanner import DeviceScanner
|
|||||||
|
|
||||||
MINIMUM_COL_WIDTH = 12 #: Minimum width of columns in ls output
|
MINIMUM_COL_WIDTH = 12 #: Minimum width of columns in ls output
|
||||||
|
|
||||||
def human_readable(size):
|
|
||||||
""" Convert a size in bytes into a human readle form """
|
|
||||||
if size < 1024: divisor, suffix = 1, ""
|
|
||||||
elif size < 1024*1024: divisor, suffix = 1024., "K"
|
|
||||||
elif size < 1024*1024*1024: divisor, suffix = 1024*1024, "M"
|
|
||||||
elif size < 1024*1024*1024*1024: divisor, suffix = 1024*1024, "G"
|
|
||||||
size = str(size/divisor)
|
|
||||||
if size.find(".") > -1: size = size[:size.find(".")+2]
|
|
||||||
return size + suffix
|
|
||||||
|
|
||||||
class FileFormatter(object):
|
class FileFormatter(object):
|
||||||
def __init__(self, file, term):
|
def __init__(self, file, term):
|
||||||
self.term = term
|
self.term = term
|
||||||
@ -207,11 +197,19 @@ def main():
|
|||||||
scanner = DeviceScanner()
|
scanner = DeviceScanner()
|
||||||
scanner.scan()
|
scanner.scan()
|
||||||
connected_devices = []
|
connected_devices = []
|
||||||
|
|
||||||
for d in device_plugins():
|
for d in device_plugins():
|
||||||
try:
|
try:
|
||||||
d.startup()
|
d.startup()
|
||||||
except:
|
except:
|
||||||
print ('Startup failed for device plugin: %s'%d)
|
print ('Startup failed for device plugin: %s'%d)
|
||||||
|
if d.MANAGES_DEVICE_PRESENCE:
|
||||||
|
cd = d.detect_managed_devices(scanner.devices)
|
||||||
|
if cd is not None:
|
||||||
|
connected_devices.append((cd, d))
|
||||||
|
dev = d
|
||||||
|
break
|
||||||
|
continue
|
||||||
ok, det = scanner.is_device_connected(d)
|
ok, det = scanner.is_device_connected(d)
|
||||||
if ok:
|
if ok:
|
||||||
dev = d
|
dev = d
|
||||||
|
@ -81,6 +81,19 @@ class DevicePlugin(Plugin):
|
|||||||
#: by.
|
#: by.
|
||||||
NUKE_COMMENTS = None
|
NUKE_COMMENTS = None
|
||||||
|
|
||||||
|
#: If True indicates that this driver completely manages device detection,
|
||||||
|
#: ejecting and so forth. If you set this to True, you *must* implement the
|
||||||
|
#: detect_managed_devices and debug_managed_device_detection methods.
|
||||||
|
#: A driver with this set to true is responsible for detection of devices,
|
||||||
|
#: managing a blacklist of devices, a list of ejected devices and so forth.
|
||||||
|
#: calibre will periodically call the detect_managed_devices() method and
|
||||||
|
#: is it returns a detected device, calibre will call open(). open() will
|
||||||
|
#: be called every time a device is returned even is previous calls to open()
|
||||||
|
#: failed, therefore the driver must maintain its own blacklist of failed
|
||||||
|
#: devices. Similarly, when ejecting, calibre will call eject() and then
|
||||||
|
#: assuming the next call to detect_managed_devices() returns None, it will
|
||||||
|
#: call post_yank_cleanup().
|
||||||
|
MANAGES_DEVICE_PRESENCE = False
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_gui_name(cls):
|
def get_gui_name(cls):
|
||||||
@ -196,6 +209,37 @@ class DevicePlugin(Plugin):
|
|||||||
return True, dev
|
return True, dev
|
||||||
return False, None
|
return False, None
|
||||||
|
|
||||||
|
def detect_managed_devices(self, devices_on_system, force_refresh=False):
|
||||||
|
'''
|
||||||
|
Called only if MANAGES_DEVICE_PRESENCE is True.
|
||||||
|
|
||||||
|
Scan for devices that this driver can handle. Should return a device
|
||||||
|
object if a device is found. This object will be passed to the open()
|
||||||
|
method as the connected_device. If no device is found, return None.
|
||||||
|
|
||||||
|
This method is called periodically by the GUI, so make sure it is not
|
||||||
|
too resource intensive. Use a cache to avoid repeatedly scanning the
|
||||||
|
system.
|
||||||
|
|
||||||
|
:param devices_on_system: Set of USB devices found on the system.
|
||||||
|
|
||||||
|
:param force_refresh: If True and the driver uses a cache to prevent
|
||||||
|
repeated scanning, the cache must be flushed.
|
||||||
|
'''
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def debug_managed_device_detection(self, devices_on_system, output):
|
||||||
|
'''
|
||||||
|
Called only if MANAGES_DEVICE_PRESENCE is True.
|
||||||
|
|
||||||
|
Should write information about the devices detected on the system to
|
||||||
|
output, which is a file like object.
|
||||||
|
|
||||||
|
Should return True if a device was detected and successfully opened,
|
||||||
|
otherwise False.
|
||||||
|
'''
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
def reset(self, key='-1', log_packets=False, report_progress=None,
|
def reset(self, key='-1', log_packets=False, report_progress=None,
|
||||||
@ -270,6 +314,9 @@ class DevicePlugin(Plugin):
|
|||||||
'''
|
'''
|
||||||
Un-mount / eject the device from the OS. This does not check if there
|
Un-mount / eject the device from the OS. This does not check if there
|
||||||
are pending GUI jobs that need to communicate with the device.
|
are pending GUI jobs that need to communicate with the device.
|
||||||
|
|
||||||
|
NOTE: That this method may not be called on the same thread as the rest
|
||||||
|
of the device methods.
|
||||||
'''
|
'''
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
@ -496,6 +543,10 @@ class DevicePlugin(Plugin):
|
|||||||
'''
|
'''
|
||||||
Given a list of paths, returns another list of paths. These paths
|
Given a list of paths, returns another list of paths. These paths
|
||||||
point to addable versions of the books.
|
point to addable versions of the books.
|
||||||
|
|
||||||
|
If there is an error preparing a book, then instead of a path, the
|
||||||
|
position in the returned list for that book should be a three tuple:
|
||||||
|
(original_path, the exception instance, traceback)
|
||||||
'''
|
'''
|
||||||
return paths
|
return paths
|
||||||
|
|
||||||
|
@ -9,8 +9,14 @@ __docformat__ = 'restructuredtext en'
|
|||||||
|
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
|
from calibre import prints
|
||||||
|
from calibre.constants import DEBUG
|
||||||
from calibre.devices.interface import DevicePlugin
|
from calibre.devices.interface import DevicePlugin
|
||||||
|
|
||||||
|
def debug(*args, **kwargs):
|
||||||
|
if DEBUG:
|
||||||
|
prints('MTP:', *args, **kwargs)
|
||||||
|
|
||||||
def synchronous(func):
|
def synchronous(func):
|
||||||
@wraps(func)
|
@wraps(func)
|
||||||
def synchronizer(self, *args, **kwargs):
|
def synchronizer(self, *args, **kwargs):
|
||||||
@ -19,32 +25,29 @@ def synchronous(func):
|
|||||||
return synchronizer
|
return synchronizer
|
||||||
|
|
||||||
class MTPDeviceBase(DevicePlugin):
|
class MTPDeviceBase(DevicePlugin):
|
||||||
name = 'SmartDevice App Interface'
|
name = 'MTP Device Interface'
|
||||||
gui_name = _('MTP Device')
|
gui_name = _('MTP Device')
|
||||||
icon = I('devices/galaxy_s3.png')
|
icon = I('devices/galaxy_s3.png')
|
||||||
description = _('Communicate with MTP devices')
|
description = _('Communicate with MTP devices')
|
||||||
author = 'Kovid Goyal'
|
author = 'Kovid Goyal'
|
||||||
version = (1, 0, 0)
|
version = (1, 0, 0)
|
||||||
|
|
||||||
THUMBNAIL_HEIGHT = 128
|
|
||||||
CAN_SET_METADATA = []
|
|
||||||
|
|
||||||
BACKLOADING_ERROR_MESSAGE = None
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
DevicePlugin.__init__(self, *args, **kwargs)
|
DevicePlugin.__init__(self, *args, **kwargs)
|
||||||
self.progress_reporter = None
|
self.progress_reporter = None
|
||||||
self.current_friendly_name = None
|
self.current_friendly_name = None
|
||||||
|
self.report_progress = lambda x, y: None
|
||||||
|
|
||||||
def reset(self, key='-1', log_packets=False, report_progress=None,
|
def reset(self, key='-1', log_packets=False, report_progress=None,
|
||||||
detected_device=None):
|
detected_device=None):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def set_progress_reporter(self, report_progress):
|
def set_progress_reporter(self, report_progress):
|
||||||
self.progress_reporter = report_progress
|
self.report_progress = report_progress
|
||||||
|
|
||||||
def get_gui_name(self):
|
@classmethod
|
||||||
return self.current_friendly_name or self.name
|
def get_gui_name(cls):
|
||||||
|
return getattr(cls, 'current_friendly_name', cls.gui_name)
|
||||||
|
|
||||||
def is_usb_connected(self, devices_on_system, debug=False,
|
def is_usb_connected(self, devices_on_system, debug=False,
|
||||||
only_presence=False):
|
only_presence=False):
|
||||||
@ -52,4 +55,17 @@ class MTPDeviceBase(DevicePlugin):
|
|||||||
# return False
|
# return False
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def build_template_regexp(self):
|
||||||
|
from calibre.devices.utils import build_template_regexp
|
||||||
|
return build_template_regexp(self.save_template)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def default_save_template(cls):
|
||||||
|
from calibre.library.save_to_disk import config
|
||||||
|
return config().parse().send_template
|
||||||
|
|
||||||
|
@property
|
||||||
|
def save_template(self):
|
||||||
|
# TODO: Use the device specific template here
|
||||||
|
return self.default_save_template
|
||||||
|
|
||||||
|
68
src/calibre/devices/mtp/books.py
Normal file
68
src/calibre/devices/mtp/books.py
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:fdm=marker:ai
|
||||||
|
from __future__ import (unicode_literals, division, absolute_import,
|
||||||
|
print_function)
|
||||||
|
|
||||||
|
__license__ = 'GPL v3'
|
||||||
|
__copyright__ = '2012, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||||
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
from calibre.devices.interface import BookList as BL
|
||||||
|
from calibre.ebooks.metadata.book.base import Metadata
|
||||||
|
from calibre.ebooks.metadata.book.json_codec import JsonCodec
|
||||||
|
from calibre.utils.date import utcnow
|
||||||
|
|
||||||
|
class BookList(BL):
|
||||||
|
|
||||||
|
def __init__(self, storage_id):
|
||||||
|
self.storage_id = storage_id
|
||||||
|
|
||||||
|
def supports_collections(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def add_book(self, book, replace_metadata=True):
|
||||||
|
try:
|
||||||
|
b = self.index(book)
|
||||||
|
except (ValueError, IndexError):
|
||||||
|
b = None
|
||||||
|
if b is None:
|
||||||
|
self.append(book)
|
||||||
|
return book
|
||||||
|
if replace_metadata:
|
||||||
|
self[b].smart_update(book, replace_metadata=True)
|
||||||
|
return self[b]
|
||||||
|
return None
|
||||||
|
|
||||||
|
def remove_book(self, book):
|
||||||
|
self.remove(book)
|
||||||
|
|
||||||
|
class Book(Metadata):
|
||||||
|
|
||||||
|
def __init__(self, storage_id, lpath, other=None):
|
||||||
|
Metadata.__init__(self, _('Unknown'), other=other)
|
||||||
|
self.storage_id, self.lpath = storage_id, lpath
|
||||||
|
self.lpath = self.path = self.lpath.replace(os.sep, '/')
|
||||||
|
self.mtp_relpath = tuple([icu_lower(x) for x in self.lpath.split('/')])
|
||||||
|
self.datetime = utcnow().timetuple()
|
||||||
|
self.thumbail = None
|
||||||
|
|
||||||
|
def matches_file(self, mtp_file):
|
||||||
|
return (self.storage_id == mtp_file.storage_id and
|
||||||
|
self.mtp_relpath == mtp_file.mtp_relpath)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return (isinstance(other, self.__class__) and (self.storage_id ==
|
||||||
|
other.storage_id and self.mtp_relpath == other.mtp_relpath))
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self.__eq__(other)
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash((self.storage_id, self.mtp_relpath))
|
||||||
|
|
||||||
|
|
||||||
|
class JSONCodec(JsonCodec):
|
||||||
|
pass
|
||||||
|
|
@ -7,14 +7,390 @@ __license__ = 'GPL v3'
|
|||||||
__copyright__ = '2012, Kovid Goyal <kovid at kovidgoyal.net>'
|
__copyright__ = '2012, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||||
__docformat__ = 'restructuredtext en'
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
from calibre.constants import iswindows
|
import json, traceback, posixpath, importlib, os
|
||||||
|
from io import BytesIO
|
||||||
|
from itertools import izip
|
||||||
|
|
||||||
if iswindows:
|
from calibre import prints
|
||||||
from calibre.devices.mtp.windows.driver import MTP_DEVICE as BASE
|
from calibre.constants import iswindows, numeric_version
|
||||||
BASE
|
from calibre.devices.mtp.base import debug
|
||||||
else:
|
from calibre.ptempfile import SpooledTemporaryFile, PersistentTemporaryDirectory
|
||||||
from calibre.devices.mtp.unix.driver import MTP_DEVICE as BASE
|
from calibre.utils.config import from_json, to_json
|
||||||
|
from calibre.utils.date import now, isoformat
|
||||||
|
|
||||||
|
BASE = importlib.import_module('calibre.devices.mtp.%s.driver'%(
|
||||||
|
'windows' if iswindows else 'unix')).MTP_DEVICE
|
||||||
|
|
||||||
class MTP_DEVICE(BASE):
|
class MTP_DEVICE(BASE):
|
||||||
pass
|
|
||||||
|
METADATA_CACHE = 'metadata.calibre'
|
||||||
|
DRIVEINFO = 'driveinfo.calibre'
|
||||||
|
CAN_SET_METADATA = []
|
||||||
|
NEWS_IN_FOLDER = True
|
||||||
|
MAX_PATH_LEN = 230
|
||||||
|
THUMBNAIL_HEIGHT = 160
|
||||||
|
THUMBNAIL_WIDTH = 120
|
||||||
|
CAN_SET_METADATA = []
|
||||||
|
BACKLOADING_ERROR_MESSAGE = None
|
||||||
|
MANAGES_DEVICE_PRESENCE = True
|
||||||
|
FORMATS = ['epub', 'azw3', 'mobi', 'pdf']
|
||||||
|
DEVICE_PLUGBOARD_NAME = 'MTP_DEVICE'
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
BASE.__init__(self, *args, **kwargs)
|
||||||
|
self.plugboards = self.plugboard_func = None
|
||||||
|
|
||||||
|
def open(self, devices, library_uuid):
|
||||||
|
self.current_library_uuid = library_uuid
|
||||||
|
BASE.open(self, devices, library_uuid)
|
||||||
|
|
||||||
|
# Device information {{{
|
||||||
|
def _update_drive_info(self, storage, location_code, name=None):
|
||||||
|
import uuid
|
||||||
|
f = storage.find_path((self.DRIVEINFO,))
|
||||||
|
dinfo = {}
|
||||||
|
if f is not None:
|
||||||
|
stream = self.get_mtp_file(f)
|
||||||
|
try:
|
||||||
|
dinfo = json.load(stream, object_hook=from_json)
|
||||||
|
except:
|
||||||
|
dinfo = None
|
||||||
|
if dinfo.get('device_store_uuid', None) is None:
|
||||||
|
dinfo['device_store_uuid'] = unicode(uuid.uuid4())
|
||||||
|
if dinfo.get('device_name', None) is None:
|
||||||
|
dinfo['device_name'] = self.current_friendly_name
|
||||||
|
if name is not None:
|
||||||
|
dinfo['device_name'] = name
|
||||||
|
dinfo['location_code'] = location_code
|
||||||
|
dinfo['last_library_uuid'] = getattr(self, 'current_library_uuid', None)
|
||||||
|
dinfo['calibre_version'] = '.'.join([unicode(i) for i in numeric_version])
|
||||||
|
dinfo['date_last_connected'] = isoformat(now())
|
||||||
|
dinfo['mtp_prefix'] = storage.storage_prefix
|
||||||
|
raw = json.dumps(dinfo, default=to_json)
|
||||||
|
self.put_file(storage, self.DRIVEINFO, BytesIO(raw), len(raw))
|
||||||
|
self.driveinfo = dinfo
|
||||||
|
|
||||||
|
def get_device_information(self, end_session=True):
|
||||||
|
self.report_progress(1.0, _('Get device information...'))
|
||||||
|
self.driveinfo = {}
|
||||||
|
for sid, location_code in ( (self._main_id, 'main'), (self._carda_id,
|
||||||
|
'A'), (self._cardb_id, 'B')):
|
||||||
|
if sid is None: continue
|
||||||
|
self._update_drive_info(self.filesystem_cache.storage(sid), location_code)
|
||||||
|
dinfo = self.get_basic_device_information()
|
||||||
|
return tuple( list(dinfo) + [self.driveinfo] )
|
||||||
|
|
||||||
|
def card_prefix(self, end_session=True):
|
||||||
|
return (self._carda_id, self._cardb_id)
|
||||||
|
|
||||||
|
def set_driveinfo_name(self, location_code, name):
|
||||||
|
sid = {'main':self._main_id, 'A':self._carda_id,
|
||||||
|
'B':self._cardb_id}.get(location_code, None)
|
||||||
|
if sid is None:
|
||||||
|
return
|
||||||
|
self._update_drive_info(self.filesystem_cache.storage(sid),
|
||||||
|
location_code, name=name)
|
||||||
|
# }}}
|
||||||
|
|
||||||
|
# Get list of books from device, with metadata {{{
|
||||||
|
def books(self, oncard=None, end_session=True):
|
||||||
|
from calibre.devices.mtp.books import JSONCodec
|
||||||
|
from calibre.devices.mtp.books import BookList, Book
|
||||||
|
sid = {'carda':self._carda_id, 'cardb':self._cardb_id}.get(oncard,
|
||||||
|
self._main_id)
|
||||||
|
if sid is None:
|
||||||
|
return BookList(None)
|
||||||
|
|
||||||
|
bl = BookList(sid)
|
||||||
|
# If True then there is a mismatch between the ebooks on the device and
|
||||||
|
# the metadata cache
|
||||||
|
need_sync = False
|
||||||
|
all_books = list(self.filesystem_cache.iterebooks(sid))
|
||||||
|
steps = len(all_books) + 2
|
||||||
|
count = 0
|
||||||
|
|
||||||
|
self.report_progress(0, _('Reading metadata from device'))
|
||||||
|
# Read the cache if it exists
|
||||||
|
storage = self.filesystem_cache.storage(sid)
|
||||||
|
cache = storage.find_path((self.METADATA_CACHE,))
|
||||||
|
if cache is not None:
|
||||||
|
json_codec = JSONCodec()
|
||||||
|
try:
|
||||||
|
stream = self.get_mtp_file(cache)
|
||||||
|
json_codec.decode_from_file(stream, bl, Book, sid)
|
||||||
|
except:
|
||||||
|
need_sync = True
|
||||||
|
|
||||||
|
relpath_cache = {b.mtp_relpath:i for i, b in enumerate(bl)}
|
||||||
|
|
||||||
|
for mtp_file in all_books:
|
||||||
|
count += 1
|
||||||
|
relpath = mtp_file.mtp_relpath
|
||||||
|
idx = relpath_cache.get(relpath, None)
|
||||||
|
if idx is not None:
|
||||||
|
cached_metadata = bl[idx]
|
||||||
|
del relpath_cache[relpath]
|
||||||
|
if cached_metadata.size == mtp_file.size:
|
||||||
|
cached_metadata.datetime = mtp_file.last_modified.timetuple()
|
||||||
|
cached_metadata.path = mtp_file.mtp_id_path
|
||||||
|
debug('Using cached metadata for',
|
||||||
|
'/'.join(mtp_file.full_path))
|
||||||
|
continue # No need to update metadata
|
||||||
|
book = cached_metadata
|
||||||
|
else:
|
||||||
|
book = Book(sid, '/'.join(relpath))
|
||||||
|
bl.append(book)
|
||||||
|
|
||||||
|
need_sync = True
|
||||||
|
self.report_progress(count/steps, _('Reading metadata from %s')%
|
||||||
|
('/'.join(relpath)))
|
||||||
|
try:
|
||||||
|
book.smart_update(self.read_file_metadata(mtp_file))
|
||||||
|
debug('Read metadata for', '/'.join(mtp_file.full_path))
|
||||||
|
except:
|
||||||
|
prints('Failed to read metadata from',
|
||||||
|
'/'.join(mtp_file.full_path))
|
||||||
|
traceback.print_exc()
|
||||||
|
book.size = mtp_file.size
|
||||||
|
book.datetime = mtp_file.last_modified.timetuple()
|
||||||
|
book.path = mtp_file.mtp_id_path
|
||||||
|
|
||||||
|
# Remove books in the cache that no longer exist
|
||||||
|
for idx in sorted(relpath_cache.itervalues(), reverse=True):
|
||||||
|
del bl[idx]
|
||||||
|
need_sync = True
|
||||||
|
|
||||||
|
if need_sync:
|
||||||
|
self.report_progress(count/steps, _('Updating metadata cache on device'))
|
||||||
|
self.write_metadata_cache(storage, bl)
|
||||||
|
self.report_progress(1, _('Finished reading metadata from device'))
|
||||||
|
return bl
|
||||||
|
|
||||||
|
def read_file_metadata(self, mtp_file):
|
||||||
|
from calibre.ebooks.metadata.meta import get_metadata
|
||||||
|
from calibre.customize.ui import quick_metadata
|
||||||
|
ext = mtp_file.name.rpartition('.')[-1].lower()
|
||||||
|
stream = self.get_mtp_file(mtp_file)
|
||||||
|
with quick_metadata:
|
||||||
|
return get_metadata(stream, stream_type=ext,
|
||||||
|
force_read_metadata=True,
|
||||||
|
pattern=self.build_template_regexp())
|
||||||
|
|
||||||
|
def write_metadata_cache(self, storage, bl):
|
||||||
|
from calibre.devices.mtp.books import JSONCodec
|
||||||
|
|
||||||
|
if bl.storage_id != storage.storage_id:
|
||||||
|
# Just a sanity check, should never happen
|
||||||
|
return
|
||||||
|
|
||||||
|
json_codec = JSONCodec()
|
||||||
|
stream = SpooledTemporaryFile(10*(1024**2))
|
||||||
|
json_codec.encode_to_file(stream, bl)
|
||||||
|
size = stream.tell()
|
||||||
|
stream.seek(0)
|
||||||
|
self.put_file(storage, self.METADATA_CACHE, stream, size)
|
||||||
|
|
||||||
|
def sync_booklists(self, booklists, end_session=True):
|
||||||
|
debug('sync_booklists() called')
|
||||||
|
for bl in booklists:
|
||||||
|
if getattr(bl, 'storage_id', None) is None:
|
||||||
|
continue
|
||||||
|
storage = self.filesystem_cache.storage(bl.storage_id)
|
||||||
|
if storage is None:
|
||||||
|
continue
|
||||||
|
self.write_metadata_cache(storage, bl)
|
||||||
|
debug('sync_booklists() ended')
|
||||||
|
|
||||||
|
# }}}
|
||||||
|
|
||||||
|
# Get files from the device {{{
|
||||||
|
def get_file(self, path, outfile, end_session=True):
|
||||||
|
f = self.filesystem_cache.resolve_mtp_id_path(path)
|
||||||
|
self.get_mtp_file(f, outfile)
|
||||||
|
|
||||||
|
def prepare_addable_books(self, paths):
|
||||||
|
tdir = PersistentTemporaryDirectory('_prepare_mtp')
|
||||||
|
ans = []
|
||||||
|
for path in paths:
|
||||||
|
try:
|
||||||
|
f = self.filesystem_cache.resolve_mtp_id_path(path)
|
||||||
|
except Exception as e:
|
||||||
|
ans.append((path, e, traceback.format_exc()))
|
||||||
|
continue
|
||||||
|
base = os.path.join(tdir, '%s'%f.object_id)
|
||||||
|
os.mkdir(base)
|
||||||
|
with open(os.path.join(base, f.name), 'wb') as out:
|
||||||
|
try:
|
||||||
|
self.get_mtp_file(f, out)
|
||||||
|
except Exception as e:
|
||||||
|
ans.append((path, e, traceback.format_exc()))
|
||||||
|
else:
|
||||||
|
ans.append(out.name)
|
||||||
|
return ans
|
||||||
|
# }}}
|
||||||
|
|
||||||
|
# Sending files to the device {{{
|
||||||
|
|
||||||
|
def set_plugboards(self, plugboards, pb_func):
|
||||||
|
self.plugboards = plugboards
|
||||||
|
self.plugboard_func = pb_func
|
||||||
|
|
||||||
|
def create_upload_path(self, path, mdata, fname):
|
||||||
|
from calibre.devices.utils import create_upload_path
|
||||||
|
from calibre.utils.filenames import ascii_filename as sanitize
|
||||||
|
filepath = create_upload_path(mdata, fname, self.save_template, sanitize,
|
||||||
|
prefix_path=path,
|
||||||
|
path_type=posixpath,
|
||||||
|
maxlen=self.MAX_PATH_LEN,
|
||||||
|
use_subdirs = True,
|
||||||
|
news_in_folder = self.NEWS_IN_FOLDER,
|
||||||
|
)
|
||||||
|
return tuple(x for x in filepath.split('/'))
|
||||||
|
|
||||||
|
def prefix_for_location(self, on_card):
|
||||||
|
# TODO: Implement this
|
||||||
|
return 'calibre'
|
||||||
|
|
||||||
|
def ensure_parent(self, storage, path):
|
||||||
|
parent = storage
|
||||||
|
pos = list(path)[:-1]
|
||||||
|
while pos:
|
||||||
|
name = pos[0]
|
||||||
|
pos = pos[1:]
|
||||||
|
parent = self.create_folder(parent, name)
|
||||||
|
return parent
|
||||||
|
|
||||||
|
def upload_books(self, files, names, on_card=None, end_session=True,
|
||||||
|
metadata=None):
|
||||||
|
debug('upload_books() called')
|
||||||
|
from calibre.devices.utils import sanity_check
|
||||||
|
sanity_check(on_card, files, self.card_prefix(), self.free_space())
|
||||||
|
prefix = self.prefix_for_location(on_card)
|
||||||
|
sid = {'carda':self._carda_id, 'cardb':self._cardb_id}.get(on_card,
|
||||||
|
self._main_id)
|
||||||
|
bl_idx = {'carda':1, 'cardb':2}.get(on_card, 0)
|
||||||
|
storage = self.filesystem_cache.storage(sid)
|
||||||
|
|
||||||
|
ans = []
|
||||||
|
self.report_progress(0, _('Transferring books to device...'))
|
||||||
|
i, total = 0, len(files)
|
||||||
|
|
||||||
|
for infile, fname, mi in izip(files, names, metadata):
|
||||||
|
path = self.create_upload_path(prefix, mi, fname)
|
||||||
|
parent = self.ensure_parent(storage, path)
|
||||||
|
if hasattr(infile, 'read'):
|
||||||
|
pos = infile.tell()
|
||||||
|
infile.seek(0, 2)
|
||||||
|
sz = infile.tell()
|
||||||
|
infile.seek(pos)
|
||||||
|
stream = infile
|
||||||
|
close = False
|
||||||
|
else:
|
||||||
|
sz = os.path.getsize(infile)
|
||||||
|
stream = lopen(infile, 'rb')
|
||||||
|
close = True
|
||||||
|
try:
|
||||||
|
mtp_file = self.put_file(parent, path[-1], stream, sz)
|
||||||
|
finally:
|
||||||
|
if close:
|
||||||
|
stream.close()
|
||||||
|
ans.append((mtp_file, bl_idx))
|
||||||
|
i += 1
|
||||||
|
self.report_progress(i/total, _('Transferred %s to device')%mi.title)
|
||||||
|
|
||||||
|
self.report_progress(1, _('Transfer to device finished...'))
|
||||||
|
debug('upload_books() ended')
|
||||||
|
return ans
|
||||||
|
|
||||||
|
def add_books_to_metadata(self, mtp_files, metadata, booklists):
|
||||||
|
debug('add_books_to_metadata() called')
|
||||||
|
from calibre.devices.mtp.books import Book
|
||||||
|
|
||||||
|
i, total = 0, len(mtp_files)
|
||||||
|
self.report_progress(0, _('Adding books to device metadata listing...'))
|
||||||
|
for x, mi in izip(mtp_files, metadata):
|
||||||
|
mtp_file, bl_idx = x
|
||||||
|
bl = booklists[bl_idx]
|
||||||
|
book = Book(mtp_file.storage_id, '/'.join(mtp_file.mtp_relpath),
|
||||||
|
other=mi)
|
||||||
|
book = bl.add_book(book, replace_metadata=True)
|
||||||
|
if book is not None:
|
||||||
|
book.size = mtp_file.size
|
||||||
|
book.datetime = mtp_file.last_modified.timetuple()
|
||||||
|
book.path = mtp_file.mtp_id_path
|
||||||
|
i += 1
|
||||||
|
self.report_progress(i/total, _('Added %s')%mi.title)
|
||||||
|
|
||||||
|
self.report_progress(1, _('Adding complete'))
|
||||||
|
debug('add_books_to_metadata() ended')
|
||||||
|
|
||||||
|
# }}}
|
||||||
|
|
||||||
|
# Removing books from the device {{{
|
||||||
|
def recursive_delete(self, obj):
|
||||||
|
parent = self.delete_file_or_folder(obj)
|
||||||
|
if parent.empty and parent.can_delete and not parent.is_system:
|
||||||
|
try:
|
||||||
|
self.recursive_delete(parent)
|
||||||
|
except:
|
||||||
|
prints('Failed to delete parent: %s, ignoring'%(
|
||||||
|
'/'.join(parent.full_path)))
|
||||||
|
|
||||||
|
def delete_books(self, paths, end_session=True):
|
||||||
|
self.report_progress(0, _('Deleting books from device...'))
|
||||||
|
|
||||||
|
for i, path in enumerate(paths):
|
||||||
|
f = self.filesystem_cache.resolve_mtp_id_path(path)
|
||||||
|
self.recursive_delete(f)
|
||||||
|
self.report_progress((i+1) / float(len(paths)),
|
||||||
|
_('Deleted %s')%path)
|
||||||
|
self.report_progress(1, _('All books deleted'))
|
||||||
|
|
||||||
|
def remove_books_from_metadata(self, paths, booklists):
|
||||||
|
self.report_progress(0, _('Removing books from metadata'))
|
||||||
|
class NextPath(Exception): pass
|
||||||
|
|
||||||
|
for i, path in enumerate(paths):
|
||||||
|
try:
|
||||||
|
for bl in booklists:
|
||||||
|
for book in bl:
|
||||||
|
if book.path == path:
|
||||||
|
bl.remove_book(book)
|
||||||
|
raise NextPath('')
|
||||||
|
except NextPath:
|
||||||
|
pass
|
||||||
|
self.report_progress((i+1)/len(paths), _('Removed %s')%path)
|
||||||
|
|
||||||
|
self.report_progress(1, _('All books removed'))
|
||||||
|
|
||||||
|
# }}}
|
||||||
|
|
||||||
|
# Settings {{{
|
||||||
|
@classmethod
|
||||||
|
def settings(self):
|
||||||
|
# TODO: Implement this
|
||||||
|
class Opts(object):
|
||||||
|
def __init__(s):
|
||||||
|
s.format_map = self.FORMATS
|
||||||
|
return Opts()
|
||||||
|
|
||||||
|
# }}}
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
dev = MTP_DEVICE(None)
|
||||||
|
dev.startup()
|
||||||
|
try:
|
||||||
|
from calibre.devices.scanner import DeviceScanner
|
||||||
|
scanner = DeviceScanner()
|
||||||
|
scanner.scan()
|
||||||
|
devs = scanner.devices
|
||||||
|
cd = dev.detect_managed_devices(devs)
|
||||||
|
if cd is None:
|
||||||
|
raise ValueError('Failed to detect MTP device')
|
||||||
|
dev.set_progress_reporter(prints)
|
||||||
|
dev.open(cd, None)
|
||||||
|
dev.filesystem_cache.dump()
|
||||||
|
finally:
|
||||||
|
dev.shutdown()
|
||||||
|
|
||||||
|
|
||||||
|
@ -7,17 +7,24 @@ __license__ = 'GPL v3'
|
|||||||
__copyright__ = '2012, Kovid Goyal <kovid at kovidgoyal.net>'
|
__copyright__ = '2012, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||||
__docformat__ = 'restructuredtext en'
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
import weakref, sys
|
import weakref, sys, json
|
||||||
from collections import deque
|
from collections import deque
|
||||||
from operator import attrgetter
|
from operator import attrgetter
|
||||||
from future_builtins import map
|
from future_builtins import map
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
from calibre import human_readable, prints, force_unicode
|
from calibre import human_readable, prints, force_unicode
|
||||||
|
from calibre.utils.date import local_tz, as_utc
|
||||||
from calibre.utils.icu import sort_key, lower
|
from calibre.utils.icu import sort_key, lower
|
||||||
|
from calibre.ebooks import BOOK_EXTENSIONS
|
||||||
|
|
||||||
|
bexts = frozenset(BOOK_EXTENSIONS)
|
||||||
|
|
||||||
class FileOrFolder(object):
|
class FileOrFolder(object):
|
||||||
|
|
||||||
def __init__(self, entry, fs_cache):
|
def __init__(self, entry, fs_cache):
|
||||||
|
self.all_storage_ids = fs_cache.all_storage_ids
|
||||||
|
|
||||||
self.object_id = entry['id']
|
self.object_id = entry['id']
|
||||||
self.is_folder = entry['is_folder']
|
self.is_folder = entry['is_folder']
|
||||||
self.storage_id = entry['storage_id']
|
self.storage_id = entry['storage_id']
|
||||||
@ -28,7 +35,16 @@ class FileOrFolder(object):
|
|||||||
self.name = force_unicode(n, 'utf-8')
|
self.name = force_unicode(n, 'utf-8')
|
||||||
self.persistent_id = entry.get('persistent_id', self.object_id)
|
self.persistent_id = entry.get('persistent_id', self.object_id)
|
||||||
self.size = entry.get('size', 0)
|
self.size = entry.get('size', 0)
|
||||||
self.all_storage_ids = fs_cache.all_storage_ids
|
md = entry.get('modified', 0)
|
||||||
|
try:
|
||||||
|
if isinstance(md, tuple):
|
||||||
|
self.last_modified = datetime(*(list(md)+[local_tz]))
|
||||||
|
else:
|
||||||
|
self.last_modified = datetime.fromtimestamp(md, local_tz)
|
||||||
|
except:
|
||||||
|
self.last_modified = datetime.fromtimestamp(0, local_tz)
|
||||||
|
self.last_mod_string = self.last_modified.strftime('%Y/%m/%d %H:%M')
|
||||||
|
self.last_modified = as_utc(self.last_modified)
|
||||||
|
|
||||||
if self.storage_id not in self.all_storage_ids:
|
if self.storage_id not in self.all_storage_ids:
|
||||||
raise ValueError('Storage id %s not valid for %s, valid values: %s'%(self.storage_id,
|
raise ValueError('Storage id %s not valid for %s, valid values: %s'%(self.storage_id,
|
||||||
@ -47,6 +63,12 @@ class FileOrFolder(object):
|
|||||||
self.fs_cache = weakref.ref(fs_cache)
|
self.fs_cache = weakref.ref(fs_cache)
|
||||||
self.deleted = False
|
self.deleted = False
|
||||||
|
|
||||||
|
if self.storage_id == self.object_id:
|
||||||
|
self.storage_prefix = 'mtp:::%s:::'%self.persistent_id
|
||||||
|
|
||||||
|
self.is_ebook = (not self.is_folder and
|
||||||
|
self.name.rpartition('.')[-1].lower() in bexts)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
name = 'Folder' if self.is_folder else 'File'
|
name = 'Folder' if self.is_folder else 'File'
|
||||||
try:
|
try:
|
||||||
@ -56,12 +78,16 @@ class FileOrFolder(object):
|
|||||||
datum = 'size=%s'%(self.size)
|
datum = 'size=%s'%(self.size)
|
||||||
if self.is_folder:
|
if self.is_folder:
|
||||||
datum = 'children=%s'%(len(self.files) + len(self.folders))
|
datum = 'children=%s'%(len(self.files) + len(self.folders))
|
||||||
return '%s(id=%s, storage_id=%s, %s, path=%s)'%(name, self.object_id,
|
return '%s(id=%s, storage_id=%s, %s, path=%s, modified=%s)'%(name, self.object_id,
|
||||||
self.storage_id, datum, path)
|
self.storage_id, datum, path, self.last_mod_string)
|
||||||
|
|
||||||
__str__ = __repr__
|
__str__ = __repr__
|
||||||
__unicode__ = __repr__
|
__unicode__ = __repr__
|
||||||
|
|
||||||
|
@property
|
||||||
|
def empty(self):
|
||||||
|
return not self.files and not self.folders
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def id_map(self):
|
def id_map(self):
|
||||||
return self.fs_cache().id_map
|
return self.fs_cache().id_map
|
||||||
@ -105,6 +131,7 @@ class FileOrFolder(object):
|
|||||||
c = '+' if self.is_folder else '-'
|
c = '+' if self.is_folder else '-'
|
||||||
data = ('%s children'%(sum(map(len, (self.files, self.folders))))
|
data = ('%s children'%(sum(map(len, (self.files, self.folders))))
|
||||||
if self.is_folder else human_readable(self.size))
|
if self.is_folder else human_readable(self.size))
|
||||||
|
data += ' modified=%s'%self.last_mod_string
|
||||||
line = '%s%s %s [id:%s %s]'%(prefix, c, self.name, self.object_id, data)
|
line = '%s%s %s [id:%s %s]'%(prefix, c, self.name, self.object_id, data)
|
||||||
prints(line, file=out)
|
prints(line, file=out)
|
||||||
for c in (self.folders, self.files):
|
for c in (self.folders, self.files):
|
||||||
@ -125,6 +152,33 @@ class FileOrFolder(object):
|
|||||||
return e
|
return e
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def find_path(self, path):
|
||||||
|
'''
|
||||||
|
Find a path in this folder, where path is a
|
||||||
|
tuple of folder and file names like ('eBooks', 'newest',
|
||||||
|
'calibre.epub'). Finding is case-insensitive.
|
||||||
|
'''
|
||||||
|
parent = self
|
||||||
|
components = list(path)
|
||||||
|
while components:
|
||||||
|
child = components[0]
|
||||||
|
components = components[1:]
|
||||||
|
c = parent.folder_named(child)
|
||||||
|
if c is None:
|
||||||
|
c = parent.file_named(child)
|
||||||
|
if c is None:
|
||||||
|
return None
|
||||||
|
parent = c
|
||||||
|
return parent
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mtp_relpath(self):
|
||||||
|
return tuple(x.lower() for x in self.full_path[1:])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mtp_id_path(self):
|
||||||
|
return 'mtp:::' + json.dumps(self.object_id) + ':::' + '/'.join(self.full_path)
|
||||||
|
|
||||||
class FilesystemCache(object):
|
class FilesystemCache(object):
|
||||||
|
|
||||||
def __init__(self, all_storage, entries):
|
def __init__(self, all_storage, entries):
|
||||||
@ -164,4 +218,31 @@ class FilesystemCache(object):
|
|||||||
for e in self.entries:
|
for e in self.entries:
|
||||||
e.dump(out=out)
|
e.dump(out=out)
|
||||||
|
|
||||||
|
def storage(self, storage_id):
|
||||||
|
for e in self.entries:
|
||||||
|
if e.storage_id == storage_id:
|
||||||
|
return e
|
||||||
|
|
||||||
|
def iterebooks(self, storage_id):
|
||||||
|
for x in self.id_map.itervalues():
|
||||||
|
if x.storage_id == storage_id and x.is_ebook:
|
||||||
|
if x.parent_id == storage_id and x.name.lower().endswith('.txt'):
|
||||||
|
continue # Ignore .txt files in the root
|
||||||
|
yield x
|
||||||
|
|
||||||
|
def resolve_mtp_id_path(self, path):
|
||||||
|
if not path.startswith('mtp:::'):
|
||||||
|
raise ValueError('%s is not a valid MTP path'%path)
|
||||||
|
parts = path.split(':::')
|
||||||
|
if len(parts) < 3:
|
||||||
|
raise ValueError('%s is not a valid MTP path'%path)
|
||||||
|
try:
|
||||||
|
object_id = json.loads(parts[1])
|
||||||
|
except:
|
||||||
|
raise ValueError('%s is not a valid MTP path'%path)
|
||||||
|
try:
|
||||||
|
return self.id_map[object_id]
|
||||||
|
except KeyError:
|
||||||
|
raise ValueError('No object found with MTP path: %s'%path)
|
||||||
|
|
||||||
|
|
||||||
|
@ -128,7 +128,7 @@ class TestDeviceInteraction(unittest.TestCase):
|
|||||||
|
|
||||||
raw2 = io.BytesIO()
|
raw2 = io.BytesIO()
|
||||||
pc = ProgressCallback()
|
pc = ProgressCallback()
|
||||||
self.dev.get_file(f, raw2, callback=pc)
|
self.dev.get_mtp_file(f, raw2, callback=pc)
|
||||||
self.assertEqual(raw.getvalue(), raw2.getvalue())
|
self.assertEqual(raw.getvalue(), raw2.getvalue())
|
||||||
self.assertTrue(pc.end_called,
|
self.assertTrue(pc.end_called,
|
||||||
msg='Progress callback not called with equal values (get_file)')
|
msg='Progress callback not called with equal values (get_file)')
|
||||||
@ -162,7 +162,7 @@ class TestDeviceInteraction(unittest.TestCase):
|
|||||||
self.assertEqual(f.storage_id, self.storage.storage_id)
|
self.assertEqual(f.storage_id, self.storage.storage_id)
|
||||||
|
|
||||||
raw2 = io.BytesIO()
|
raw2 = io.BytesIO()
|
||||||
self.dev.get_file(f, raw2)
|
self.dev.get_mtp_file(f, raw2)
|
||||||
self.assertEqual(raw.getvalue(), raw2.getvalue())
|
self.assertEqual(raw.getvalue(), raw2.getvalue())
|
||||||
|
|
||||||
def measure_memory_usage(self, repetitions, func, *args, **kwargs):
|
def measure_memory_usage(self, repetitions, func, *args, **kwargs):
|
||||||
@ -226,7 +226,7 @@ class TestDeviceInteraction(unittest.TestCase):
|
|||||||
def get_file(f):
|
def get_file(f):
|
||||||
raw = io.BytesIO()
|
raw = io.BytesIO()
|
||||||
pc = ProgressCallback()
|
pc = ProgressCallback()
|
||||||
self.dev.get_file(f, raw, callback=pc)
|
self.dev.get_mtp_file(f, raw, callback=pc)
|
||||||
raw.truncate(0)
|
raw.truncate(0)
|
||||||
del raw
|
del raw
|
||||||
del pc
|
del pc
|
||||||
|
@ -17,7 +17,6 @@ from calibre.constants import plugins
|
|||||||
from calibre.ptempfile import SpooledTemporaryFile
|
from calibre.ptempfile import SpooledTemporaryFile
|
||||||
from calibre.devices.errors import OpenFailed, DeviceError
|
from calibre.devices.errors import OpenFailed, DeviceError
|
||||||
from calibre.devices.mtp.base import MTPDeviceBase, synchronous
|
from calibre.devices.mtp.base import MTPDeviceBase, synchronous
|
||||||
from calibre.devices.mtp.filesystem_cache import FilesystemCache
|
|
||||||
|
|
||||||
MTPDevice = namedtuple('MTPDevice', 'busnum devnum vendor_id product_id '
|
MTPDevice = namedtuple('MTPDevice', 'busnum devnum vendor_id product_id '
|
||||||
'bcd serial manufacturer product')
|
'bcd serial manufacturer product')
|
||||||
@ -28,7 +27,8 @@ def fingerprint(d):
|
|||||||
|
|
||||||
class MTP_DEVICE(MTPDeviceBase):
|
class MTP_DEVICE(MTPDeviceBase):
|
||||||
|
|
||||||
supported_platforms = ['linux', 'osx']
|
# libusb(x) does not work on OS X. So no MTP support for OS X
|
||||||
|
supported_platforms = ['linux']
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
MTPDeviceBase.__init__(self, *args, **kwargs)
|
MTPDeviceBase.__init__(self, *args, **kwargs)
|
||||||
@ -46,14 +46,6 @@ class MTP_DEVICE(MTPDeviceBase):
|
|||||||
def set_debug_level(self, lvl):
|
def set_debug_level(self, lvl):
|
||||||
self.libmtp.set_debug_level(lvl)
|
self.libmtp.set_debug_level(lvl)
|
||||||
|
|
||||||
def report_progress(self, sent, total):
|
|
||||||
try:
|
|
||||||
p = int(sent/total * 100)
|
|
||||||
except ZeroDivisionError:
|
|
||||||
p = 100
|
|
||||||
if self.progress_reporter is not None:
|
|
||||||
self.progress_reporter(p)
|
|
||||||
|
|
||||||
@synchronous
|
@synchronous
|
||||||
def detect_managed_devices(self, devices_on_system, force_refresh=False):
|
def detect_managed_devices(self, devices_on_system, force_refresh=False):
|
||||||
if self.libmtp is None: return None
|
if self.libmtp is None: return None
|
||||||
@ -91,6 +83,8 @@ class MTP_DEVICE(MTPDeviceBase):
|
|||||||
|
|
||||||
@synchronous
|
@synchronous
|
||||||
def debug_managed_device_detection(self, devices_on_system, output):
|
def debug_managed_device_detection(self, devices_on_system, output):
|
||||||
|
if self.currently_connected_dev is not None:
|
||||||
|
return True
|
||||||
p = partial(prints, file=output)
|
p = partial(prints, file=output)
|
||||||
if self.libmtp is None:
|
if self.libmtp is None:
|
||||||
err = plugins['libmtp'][1]
|
err = plugins['libmtp'][1]
|
||||||
@ -183,6 +177,7 @@ class MTP_DEVICE(MTPDeviceBase):
|
|||||||
@property
|
@property
|
||||||
def filesystem_cache(self):
|
def filesystem_cache(self):
|
||||||
if self._filesystem_cache is None:
|
if self._filesystem_cache is None:
|
||||||
|
from calibre.devices.mtp.filesystem_cache import FilesystemCache
|
||||||
with self.lock:
|
with self.lock:
|
||||||
storage, all_items, all_errs = [], [], []
|
storage, all_items, all_errs = [], [], []
|
||||||
for sid, capacity in zip([self._main_id, self._carda_id,
|
for sid, capacity in zip([self._main_id, self._carda_id,
|
||||||
@ -212,19 +207,10 @@ class MTP_DEVICE(MTPDeviceBase):
|
|||||||
return self._filesystem_cache
|
return self._filesystem_cache
|
||||||
|
|
||||||
@synchronous
|
@synchronous
|
||||||
def get_device_information(self, end_session=True):
|
def get_basic_device_information(self):
|
||||||
d = self.dev
|
d = self.dev
|
||||||
return (self.current_friendly_name, d.device_version, d.device_version, '')
|
return (self.current_friendly_name, d.device_version, d.device_version, '')
|
||||||
|
|
||||||
@synchronous
|
|
||||||
def card_prefix(self, end_session=True):
|
|
||||||
ans = [None, None]
|
|
||||||
if self._carda_id is not None:
|
|
||||||
ans[0] = 'mtp:::%d:::'%self._carda_id
|
|
||||||
if self._cardb_id is not None:
|
|
||||||
ans[1] = 'mtp:::%d:::'%self._cardb_id
|
|
||||||
return tuple(ans)
|
|
||||||
|
|
||||||
@synchronous
|
@synchronous
|
||||||
def total_space(self, end_session=True):
|
def total_space(self, end_session=True):
|
||||||
ans = [0, 0, 0]
|
ans = [0, 0, 0]
|
||||||
@ -288,7 +274,7 @@ class MTP_DEVICE(MTPDeviceBase):
|
|||||||
return parent.add_child(ans)
|
return parent.add_child(ans)
|
||||||
|
|
||||||
@synchronous
|
@synchronous
|
||||||
def get_file(self, f, stream=None, callback=None):
|
def get_mtp_file(self, f, stream=None, callback=None):
|
||||||
if f.is_folder:
|
if f.is_folder:
|
||||||
raise ValueError('%s if a folder'%(f.full_path,))
|
raise ValueError('%s if a folder'%(f.full_path,))
|
||||||
if stream is None:
|
if stream is None:
|
||||||
@ -298,6 +284,7 @@ class MTP_DEVICE(MTPDeviceBase):
|
|||||||
if not ok:
|
if not ok:
|
||||||
raise DeviceError('Failed to get file: %s with errors: %s'%(
|
raise DeviceError('Failed to get file: %s with errors: %s'%(
|
||||||
f.full_path, self.format_errorstack(errs)))
|
f.full_path, self.format_errorstack(errs)))
|
||||||
|
stream.seek(0)
|
||||||
return stream
|
return stream
|
||||||
|
|
||||||
@synchronous
|
@synchronous
|
||||||
@ -319,6 +306,7 @@ class MTP_DEVICE(MTPDeviceBase):
|
|||||||
raise DeviceError('Failed to delete %s with error: %s'%
|
raise DeviceError('Failed to delete %s with error: %s'%
|
||||||
(obj.full_path, self.format_errorstack(errs)))
|
(obj.full_path, self.format_errorstack(errs)))
|
||||||
parent.remove_child(obj)
|
parent.remove_child(obj)
|
||||||
|
return parent
|
||||||
|
|
||||||
def develop():
|
def develop():
|
||||||
from calibre.devices.scanner import DeviceScanner
|
from calibre.devices.scanner import DeviceScanner
|
||||||
|
@ -121,12 +121,13 @@ static uint16_t data_from_python(void *params, void *priv, uint32_t wantlen, uns
|
|||||||
static PyObject* build_file_metadata(LIBMTP_file_t *nf, uint32_t storage_id) {
|
static PyObject* build_file_metadata(LIBMTP_file_t *nf, uint32_t storage_id) {
|
||||||
PyObject *ans = NULL;
|
PyObject *ans = NULL;
|
||||||
|
|
||||||
ans = Py_BuildValue("{s:s, s:k, s:k, s:k, s:K, s:O}",
|
ans = Py_BuildValue("{s:s, s:k, s:k, s:k, s:K, s:L, s:O}",
|
||||||
"name", (unsigned long)nf->filename,
|
"name", (unsigned long)nf->filename,
|
||||||
"id", (unsigned long)nf->item_id,
|
"id", (unsigned long)nf->item_id,
|
||||||
"parent_id", (unsigned long)nf->parent_id,
|
"parent_id", (unsigned long)nf->parent_id,
|
||||||
"storage_id", (unsigned long)storage_id,
|
"storage_id", (unsigned long)storage_id,
|
||||||
"size", nf->filesize,
|
"size", nf->filesize,
|
||||||
|
"modified", (PY_LONG_LONG)nf->modificationdate,
|
||||||
"is_folder", (nf->filetype == LIBMTP_FILETYPE_FOLDER) ? Py_True : Py_False
|
"is_folder", (nf->filetype == LIBMTP_FILETYPE_FOLDER) ? Py_True : Py_False
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -34,6 +34,7 @@ static IPortableDeviceKeyCollection* create_filesystem_properties_collection() {
|
|||||||
ADDPROP(WPD_OBJECT_ISHIDDEN);
|
ADDPROP(WPD_OBJECT_ISHIDDEN);
|
||||||
ADDPROP(WPD_OBJECT_CAN_DELETE);
|
ADDPROP(WPD_OBJECT_CAN_DELETE);
|
||||||
ADDPROP(WPD_OBJECT_SIZE);
|
ADDPROP(WPD_OBJECT_SIZE);
|
||||||
|
ADDPROP(WPD_OBJECT_DATE_MODIFIED);
|
||||||
|
|
||||||
return properties;
|
return properties;
|
||||||
|
|
||||||
@ -81,6 +82,24 @@ static void set_size_property(PyObject *dict, REFPROPERTYKEY key, const char *py
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static void set_date_property(PyObject *dict, REFPROPERTYKEY key, const char *pykey, IPortableDeviceValues *properties) {
|
||||||
|
FLOAT val = 0;
|
||||||
|
SYSTEMTIME st;
|
||||||
|
unsigned int microseconds;
|
||||||
|
PyObject *t;
|
||||||
|
|
||||||
|
if (SUCCEEDED(properties->GetFloatValue(key, &val))) {
|
||||||
|
if (VariantTimeToSystemTime(val, &st)) {
|
||||||
|
microseconds = 1000 * st.wMilliseconds;
|
||||||
|
t = Py_BuildValue("H H H H H H I", (unsigned short)st.wYear,
|
||||||
|
(unsigned short)st.wMonth, (unsigned short)st.wDay,
|
||||||
|
(unsigned short)st.wHour, (unsigned short)st.wMinute,
|
||||||
|
(unsigned short)st.wSecond, microseconds);
|
||||||
|
if (t != NULL) { PyDict_SetItemString(dict, pykey, t); Py_DECREF(t); }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static void set_content_type_property(PyObject *dict, IPortableDeviceValues *properties) {
|
static void set_content_type_property(PyObject *dict, IPortableDeviceValues *properties) {
|
||||||
GUID guid = GUID_NULL;
|
GUID guid = GUID_NULL;
|
||||||
BOOL is_folder = 0;
|
BOOL is_folder = 0;
|
||||||
@ -103,6 +122,8 @@ static void set_properties(PyObject *obj, IPortableDeviceValues *values) {
|
|||||||
set_bool_property(obj, WPD_OBJECT_ISSYSTEM, "is_system", values);
|
set_bool_property(obj, WPD_OBJECT_ISSYSTEM, "is_system", values);
|
||||||
|
|
||||||
set_size_property(obj, WPD_OBJECT_SIZE, "size", values);
|
set_size_property(obj, WPD_OBJECT_SIZE, "size", values);
|
||||||
|
set_date_property(obj, WPD_OBJECT_DATE_MODIFIED, "modified", values);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// }}}
|
// }}}
|
||||||
|
@ -7,8 +7,8 @@ __license__ = 'GPL v3'
|
|||||||
__copyright__ = '2012, Kovid Goyal <kovid at kovidgoyal.net>'
|
__copyright__ = '2012, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||||
__docformat__ = 'restructuredtext en'
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
import time, threading
|
import time, threading, traceback
|
||||||
from functools import wraps
|
from functools import wraps, partial
|
||||||
from future_builtins import zip
|
from future_builtins import zip
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
|
|
||||||
@ -17,12 +17,12 @@ from calibre.constants import plugins, __appname__, numeric_version
|
|||||||
from calibre.ptempfile import SpooledTemporaryFile
|
from calibre.ptempfile import SpooledTemporaryFile
|
||||||
from calibre.devices.errors import OpenFailed, DeviceError
|
from calibre.devices.errors import OpenFailed, DeviceError
|
||||||
from calibre.devices.mtp.base import MTPDeviceBase
|
from calibre.devices.mtp.base import MTPDeviceBase
|
||||||
from calibre.devices.mtp.filesystem_cache import FilesystemCache
|
|
||||||
|
|
||||||
class ThreadingViolation(Exception):
|
class ThreadingViolation(Exception):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
Exception.__init__('You cannot use the MTP driver from a thread other than the '
|
Exception.__init__(self,
|
||||||
|
'You cannot use the MTP driver from a thread other than the '
|
||||||
' thread in which startup() was called')
|
' thread in which startup() was called')
|
||||||
|
|
||||||
def same_thread(func):
|
def same_thread(func):
|
||||||
@ -51,6 +51,7 @@ class MTP_DEVICE(MTPDeviceBase):
|
|||||||
self._main_id = self._carda_id = self._cardb_id = None
|
self._main_id = self._carda_id = self._cardb_id = None
|
||||||
self.start_thread = None
|
self.start_thread = None
|
||||||
self._filesystem_cache = None
|
self._filesystem_cache = None
|
||||||
|
self.eject_dev_on_next_scan = False
|
||||||
|
|
||||||
def startup(self):
|
def startup(self):
|
||||||
self.start_thread = threading.current_thread()
|
self.start_thread = threading.current_thread()
|
||||||
@ -75,6 +76,10 @@ class MTP_DEVICE(MTPDeviceBase):
|
|||||||
@same_thread
|
@same_thread
|
||||||
def detect_managed_devices(self, devices_on_system, force_refresh=False):
|
def detect_managed_devices(self, devices_on_system, force_refresh=False):
|
||||||
if self.wpd is None: return None
|
if self.wpd is None: return None
|
||||||
|
if self.eject_dev_on_next_scan:
|
||||||
|
self.eject_dev_on_next_scan = False
|
||||||
|
if self.currently_connected_pnp_id is not None:
|
||||||
|
self.do_eject()
|
||||||
|
|
||||||
devices_on_system = frozenset(devices_on_system)
|
devices_on_system = frozenset(devices_on_system)
|
||||||
if (force_refresh or
|
if (force_refresh or
|
||||||
@ -124,6 +129,54 @@ class MTP_DEVICE(MTPDeviceBase):
|
|||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@same_thread
|
||||||
|
def debug_managed_device_detection(self, devices_on_system, output):
|
||||||
|
import pprint
|
||||||
|
p = partial(prints, file=output)
|
||||||
|
if self.currently_connected_pnp_id is not None:
|
||||||
|
return True
|
||||||
|
if self.wpd_error:
|
||||||
|
p('Cannot detect MTP devices')
|
||||||
|
p(self.wpd_error)
|
||||||
|
return False
|
||||||
|
try:
|
||||||
|
pnp_ids = frozenset(self.wpd.enumerate_devices())
|
||||||
|
except:
|
||||||
|
p("Failed to get list of PNP ids on system")
|
||||||
|
p(traceback.format_exc())
|
||||||
|
return False
|
||||||
|
|
||||||
|
for pnp_id in pnp_ids:
|
||||||
|
try:
|
||||||
|
data = self.wpd.device_info(pnp_id)
|
||||||
|
except:
|
||||||
|
p('Failed to get data for device:', pnp_id)
|
||||||
|
p(traceback.format_exc())
|
||||||
|
continue
|
||||||
|
protocol = data.get('protocol', '').lower()
|
||||||
|
if not protocol.startswith('mtp:'): continue
|
||||||
|
p('MTP device:', pnp_id)
|
||||||
|
p(pprint.pformat(data))
|
||||||
|
if not self.is_suitable_wpd_device(data):
|
||||||
|
p('Not a suitable MTP device, ignoring\n')
|
||||||
|
continue
|
||||||
|
p('\nTrying to open:', pnp_id)
|
||||||
|
try:
|
||||||
|
self.open(pnp_id, 'debug-detection')
|
||||||
|
except:
|
||||||
|
p('Open failed:')
|
||||||
|
p(traceback.format_exc())
|
||||||
|
continue
|
||||||
|
break
|
||||||
|
if self.currently_connected_pnp_id:
|
||||||
|
p('Opened', self.current_friendly_name, 'successfully')
|
||||||
|
p('Device info:')
|
||||||
|
p(pprint.pformat(self.dev.data))
|
||||||
|
self.eject()
|
||||||
|
return True
|
||||||
|
p('No suitable MTP devices found')
|
||||||
|
return False
|
||||||
|
|
||||||
def is_suitable_wpd_device(self, devdata):
|
def is_suitable_wpd_device(self, devdata):
|
||||||
# Check that protocol is MTP
|
# Check that protocol is MTP
|
||||||
protocol = devdata.get('protocol', '').lower()
|
protocol = devdata.get('protocol', '').lower()
|
||||||
@ -143,6 +196,7 @@ class MTP_DEVICE(MTPDeviceBase):
|
|||||||
@property
|
@property
|
||||||
def filesystem_cache(self):
|
def filesystem_cache(self):
|
||||||
if self._filesystem_cache is None:
|
if self._filesystem_cache is None:
|
||||||
|
from calibre.devices.mtp.filesystem_cache import FilesystemCache
|
||||||
ts = self.total_space()
|
ts = self.total_space()
|
||||||
all_storage = []
|
all_storage = []
|
||||||
items = []
|
items = []
|
||||||
@ -164,19 +218,24 @@ class MTP_DEVICE(MTPDeviceBase):
|
|||||||
return self._filesystem_cache
|
return self._filesystem_cache
|
||||||
|
|
||||||
@same_thread
|
@same_thread
|
||||||
def post_yank_cleanup(self):
|
def do_eject(self):
|
||||||
self.currently_connected_pnp_id = self.current_friendly_name = None
|
|
||||||
self._main_id = self._carda_id = self._cardb_id = None
|
|
||||||
self.dev = self._filesystem_cache = None
|
|
||||||
|
|
||||||
@same_thread
|
|
||||||
def eject(self):
|
|
||||||
if self.currently_connected_pnp_id is None: return
|
if self.currently_connected_pnp_id is None: return
|
||||||
self.ejected_devices.add(self.currently_connected_pnp_id)
|
self.ejected_devices.add(self.currently_connected_pnp_id)
|
||||||
self.currently_connected_pnp_id = self.current_friendly_name = None
|
self.currently_connected_pnp_id = self.current_friendly_name = None
|
||||||
self._main_id = self._carda_id = self._cardb_id = None
|
self._main_id = self._carda_id = self._cardb_id = None
|
||||||
self.dev = self._filesystem_cache = None
|
self.dev = self._filesystem_cache = None
|
||||||
|
|
||||||
|
|
||||||
|
@same_thread
|
||||||
|
def post_yank_cleanup(self):
|
||||||
|
self.currently_connected_pnp_id = self.current_friendly_name = None
|
||||||
|
self._main_id = self._carda_id = self._cardb_id = None
|
||||||
|
self.dev = self._filesystem_cache = None
|
||||||
|
|
||||||
|
def eject(self):
|
||||||
|
if self.currently_connected_pnp_id is None: return
|
||||||
|
self.eject_dev_on_next_scan = True
|
||||||
|
|
||||||
@same_thread
|
@same_thread
|
||||||
def open(self, connected_device, library_uuid):
|
def open(self, connected_device, library_uuid):
|
||||||
self.dev = self._filesystem_cache = None
|
self.dev = self._filesystem_cache = None
|
||||||
@ -200,27 +259,16 @@ class MTP_DEVICE(MTPDeviceBase):
|
|||||||
self._carda_id = storage[1]['id']
|
self._carda_id = storage[1]['id']
|
||||||
if len(storage) > 2:
|
if len(storage) > 2:
|
||||||
self._cardb_id = storage[2]['id']
|
self._cardb_id = storage[2]['id']
|
||||||
self.current_friendly_name = devdata.get('friendly_name', None)
|
self.current_friendly_name = devdata.get('friendly_name',
|
||||||
|
_('Unknown MTP device'))
|
||||||
|
self.currently_connected_pnp_id = connected_device
|
||||||
|
|
||||||
@same_thread
|
@same_thread
|
||||||
def get_device_information(self, end_session=True):
|
def get_basic_device_information(self):
|
||||||
d = self.dev.data
|
d = self.dev.data
|
||||||
dv = d.get('device_version', '')
|
dv = d.get('device_version', '')
|
||||||
for sid, location_code in ( (self._main_id, 'main'), (self._carda_id,
|
|
||||||
'A'), (self._cardb_id, 'B')):
|
|
||||||
if sid is None: continue
|
|
||||||
# TODO: Implement the drive info dict
|
|
||||||
return (self.current_friendly_name, dv, dv, '')
|
return (self.current_friendly_name, dv, dv, '')
|
||||||
|
|
||||||
@same_thread
|
|
||||||
def card_prefix(self, end_session=True):
|
|
||||||
ans = [None, None]
|
|
||||||
if self._carda_id is not None:
|
|
||||||
ans[0] = 'mtp:::%s:::'%self._carda_id
|
|
||||||
if self._cardb_id is not None:
|
|
||||||
ans[1] = 'mtp:::%s:::'%self._cardb_id
|
|
||||||
return tuple(ans)
|
|
||||||
|
|
||||||
@same_thread
|
@same_thread
|
||||||
def total_space(self, end_session=True):
|
def total_space(self, end_session=True):
|
||||||
ans = [0, 0, 0]
|
ans = [0, 0, 0]
|
||||||
@ -245,7 +293,7 @@ class MTP_DEVICE(MTPDeviceBase):
|
|||||||
return tuple(ans)
|
return tuple(ans)
|
||||||
|
|
||||||
@same_thread
|
@same_thread
|
||||||
def get_file(self, f, stream=None, callback=None):
|
def get_mtp_file(self, f, stream=None, callback=None):
|
||||||
if f.is_folder:
|
if f.is_folder:
|
||||||
raise ValueError('%s if a folder'%(f.full_path,))
|
raise ValueError('%s if a folder'%(f.full_path,))
|
||||||
if stream is None:
|
if stream is None:
|
||||||
@ -260,6 +308,7 @@ class MTP_DEVICE(MTPDeviceBase):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise DeviceError('Failed to fetch the file %s with error: %s'%
|
raise DeviceError('Failed to fetch the file %s with error: %s'%
|
||||||
f.full_path, as_unicode(e))
|
f.full_path, as_unicode(e))
|
||||||
|
stream.seek(0)
|
||||||
return stream
|
return stream
|
||||||
|
|
||||||
@same_thread
|
@same_thread
|
||||||
@ -289,6 +338,7 @@ class MTP_DEVICE(MTPDeviceBase):
|
|||||||
parent = obj.parent
|
parent = obj.parent
|
||||||
self.dev.delete_object(obj.object_id)
|
self.dev.delete_object(obj.object_id)
|
||||||
parent.remove_child(obj)
|
parent.remove_child(obj)
|
||||||
|
return parent
|
||||||
|
|
||||||
@same_thread
|
@same_thread
|
||||||
def put_file(self, parent, name, stream, size, callback=None, replace=True):
|
def put_file(self, parent, name, stream, size, callback=None, replace=True):
|
||||||
|
@ -54,9 +54,9 @@ def main():
|
|||||||
plugins._plugins['wpd'] = (wpd, '')
|
plugins._plugins['wpd'] = (wpd, '')
|
||||||
sys.path.pop(0)
|
sys.path.pop(0)
|
||||||
|
|
||||||
from calibre.devices.mtp.test import run
|
# from calibre.devices.mtp.test import run
|
||||||
run()
|
# run()
|
||||||
return
|
# return
|
||||||
|
|
||||||
from calibre.devices.scanner import win_scanner
|
from calibre.devices.scanner import win_scanner
|
||||||
from calibre.devices.mtp.windows.driver import MTP_DEVICE
|
from calibre.devices.mtp.windows.driver import MTP_DEVICE
|
||||||
@ -81,13 +81,13 @@ def main():
|
|||||||
# print ('Fetching file: oFF (198214 bytes)')
|
# print ('Fetching file: oFF (198214 bytes)')
|
||||||
# stream = dev.get_file('oFF')
|
# stream = dev.get_file('oFF')
|
||||||
# print ("Fetched size: ", stream.tell())
|
# print ("Fetched size: ", stream.tell())
|
||||||
size = 4
|
# size = 4
|
||||||
stream = io.BytesIO(b'a'*size)
|
# stream = io.BytesIO(b'a'*size)
|
||||||
name = 'zzz-test-file.txt'
|
# name = 'zzz-test-file.txt'
|
||||||
stream.seek(0)
|
# stream.seek(0)
|
||||||
f = dev.put_file(dev.filesystem_cache.entries[0], name, stream, size)
|
# f = dev.put_file(dev.filesystem_cache.entries[0], name, stream, size)
|
||||||
print ('Put file:', f)
|
# print ('Put file:', f)
|
||||||
# dev.filesystem_cache.dump()
|
dev.filesystem_cache.dump()
|
||||||
finally:
|
finally:
|
||||||
dev.shutdown()
|
dev.shutdown()
|
||||||
|
|
||||||
|
@ -120,14 +120,14 @@ wpd_enumerate_devices(PyObject *self, PyObject *args) {
|
|||||||
hresult_set_exc("Failed to get list of portable devices", hr);
|
hresult_set_exc("Failed to get list of portable devices", hr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Py_BEGIN_ALLOW_THREADS;
|
||||||
for (i = 0; i < num_of_devices; i++) {
|
for (i = 0; i < num_of_devices; i++) {
|
||||||
Py_BEGIN_ALLOW_THREADS;
|
|
||||||
CoTaskMemFree(pnp_device_ids[i]);
|
CoTaskMemFree(pnp_device_ids[i]);
|
||||||
Py_END_ALLOW_THREADS;
|
|
||||||
pnp_device_ids[i] = NULL;
|
pnp_device_ids[i] = NULL;
|
||||||
}
|
}
|
||||||
free(pnp_device_ids);
|
free(pnp_device_ids);
|
||||||
pnp_device_ids = NULL;
|
pnp_device_ids = NULL;
|
||||||
|
Py_END_ALLOW_THREADS;
|
||||||
|
|
||||||
return Py_BuildValue("N", ans);
|
return Py_BuildValue("N", ans);
|
||||||
} // }}}
|
} // }}}
|
||||||
|
@ -292,7 +292,15 @@ if islinux:
|
|||||||
|
|
||||||
libusb_scanner = LibUSBScanner()
|
libusb_scanner = LibUSBScanner()
|
||||||
if isosx:
|
if isosx:
|
||||||
osx_scanner = libusb_scanner
|
# Apparently libusb causes mem leaks on some Macs and hangs on others and
|
||||||
|
# works on a few. OS X users will just have to live without MTP support.
|
||||||
|
# See https://bugs.launchpad.net/calibre/+bug/1044706
|
||||||
|
# See https://bugs.launchpad.net/calibre/+bug/1044758
|
||||||
|
# osx_scanner = libusb_scanner
|
||||||
|
usbobserver, usbobserver_err = plugins['usbobserver']
|
||||||
|
if usbobserver is None:
|
||||||
|
raise RuntimeError('Failed to load usbobserver: %s'%usbobserver_err)
|
||||||
|
osx_scanner = usbobserver.get_usb_devices
|
||||||
|
|
||||||
if isfreebsd:
|
if isfreebsd:
|
||||||
freebsd_scanner = FreeBSDScanner()
|
freebsd_scanner = FreeBSDScanner()
|
||||||
|
@ -15,11 +15,10 @@ import os, subprocess, time, re, sys, glob
|
|||||||
from itertools import repeat
|
from itertools import repeat
|
||||||
|
|
||||||
from calibre.devices.interface import DevicePlugin
|
from calibre.devices.interface import DevicePlugin
|
||||||
from calibre.devices.errors import (DeviceError, FreeSpaceError,
|
from calibre.devices.errors import DeviceError
|
||||||
WrongDestinationError)
|
|
||||||
from calibre.devices.usbms.deviceconfig import DeviceConfig
|
from calibre.devices.usbms.deviceconfig import DeviceConfig
|
||||||
from calibre.constants import iswindows, islinux, isosx, isfreebsd, plugins
|
from calibre.constants import iswindows, islinux, isosx, isfreebsd, plugins
|
||||||
from calibre.utils.filenames import ascii_filename as sanitize, shorten_components_to
|
from calibre.utils.filenames import ascii_filename as sanitize
|
||||||
|
|
||||||
if isosx:
|
if isosx:
|
||||||
usbobserver, usbobserver_err = plugins['usbobserver']
|
usbobserver, usbobserver_err = plugins['usbobserver']
|
||||||
@ -976,53 +975,32 @@ class Device(DeviceConfig, DevicePlugin):
|
|||||||
return self.EBOOK_DIR_CARD_A
|
return self.EBOOK_DIR_CARD_A
|
||||||
|
|
||||||
def _sanity_check(self, on_card, files):
|
def _sanity_check(self, on_card, files):
|
||||||
if on_card == 'carda' and not self._card_a_prefix:
|
from calibre.devices.utils import sanity_check
|
||||||
raise WrongDestinationError(_(
|
sanity_check(on_card, files, self.card_prefix(), self.free_space())
|
||||||
'The reader has no storage card %s. You may have changed '
|
|
||||||
'the default send to device action. Right click on the send '
|
|
||||||
'to device button and reset the default action to be '
|
|
||||||
'"Send to main memory".')%'A')
|
|
||||||
elif on_card == 'cardb' and not self._card_b_prefix:
|
|
||||||
raise WrongDestinationError(_(
|
|
||||||
'The reader has no storage card %s. You may have changed '
|
|
||||||
'the default send to device action. Right click on the send '
|
|
||||||
'to device button and reset the default action to be '
|
|
||||||
'"Send to main memory".')%'B')
|
|
||||||
elif on_card and on_card not in ('carda', 'cardb'):
|
|
||||||
raise DeviceError(_('Selected slot: %s is not supported.') % on_card)
|
|
||||||
|
|
||||||
if on_card == 'carda':
|
def get_dest_dir(prefix, candidates):
|
||||||
path = os.path.join(self._card_a_prefix,
|
|
||||||
*(self.get_carda_ebook_dir(for_upload=True).split('/')))
|
|
||||||
elif on_card == 'cardb':
|
|
||||||
path = os.path.join(self._card_b_prefix,
|
|
||||||
*(self.EBOOK_DIR_CARD_B.split('/')))
|
|
||||||
else:
|
|
||||||
candidates = self.get_main_ebook_dir(for_upload=True)
|
|
||||||
if isinstance(candidates, basestring):
|
if isinstance(candidates, basestring):
|
||||||
candidates = [candidates]
|
candidates = [candidates]
|
||||||
|
if not candidates:
|
||||||
|
candidates = ['']
|
||||||
candidates = [
|
candidates = [
|
||||||
((os.path.join(self._main_prefix, *(x.split('/')))) if x else
|
((os.path.join(prefix, *(x.split('/')))) if x else prefix)
|
||||||
self._main_prefix) for x
|
for x in candidates]
|
||||||
in candidates]
|
|
||||||
existing = [x for x in candidates if os.path.exists(x)]
|
existing = [x for x in candidates if os.path.exists(x)]
|
||||||
if not existing:
|
if not existing:
|
||||||
existing = candidates[:1]
|
existing = candidates
|
||||||
path = existing[0]
|
return existing[0]
|
||||||
|
|
||||||
def get_size(obj):
|
if on_card == 'carda':
|
||||||
path = getattr(obj, 'name', obj)
|
candidates = self.get_carda_ebook_dir(for_upload=True)
|
||||||
return os.path.getsize(path)
|
path = get_dest_dir(self._carda_prefix, candidates)
|
||||||
|
elif on_card == 'cardb':
|
||||||
|
candidates = self.get_cardb_ebook_dir(for_upload=True)
|
||||||
|
path = get_dest_dir(self._cardb_prefix, candidates)
|
||||||
|
else:
|
||||||
|
candidates = self.get_main_ebook_dir(for_upload=True)
|
||||||
|
path = get_dest_dir(self._main_prefix, candidates)
|
||||||
|
|
||||||
sizes = [get_size(f) for f in files]
|
|
||||||
size = sum(sizes)
|
|
||||||
|
|
||||||
if not on_card and size > self.free_space()[0] - 2*1024*1024:
|
|
||||||
raise FreeSpaceError(_("There is insufficient free space in main memory"))
|
|
||||||
if on_card == 'carda' and size > self.free_space()[1] - 1024*1024:
|
|
||||||
raise FreeSpaceError(_("There is insufficient free space on the storage card"))
|
|
||||||
if on_card == 'cardb' and size > self.free_space()[2] - 1024*1024:
|
|
||||||
raise FreeSpaceError(_("There is insufficient free space on the storage card"))
|
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def filename_callback(self, default, mi):
|
def filename_callback(self, default, mi):
|
||||||
@ -1052,78 +1030,16 @@ class Device(DeviceConfig, DevicePlugin):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def create_upload_path(self, path, mdata, fname, create_dirs=True):
|
def create_upload_path(self, path, mdata, fname, create_dirs=True):
|
||||||
path = os.path.abspath(path)
|
from calibre.devices.utils import create_upload_path
|
||||||
maxlen = self.MAX_PATH_LEN
|
|
||||||
|
|
||||||
special_tag = None
|
|
||||||
if mdata.tags:
|
|
||||||
for t in mdata.tags:
|
|
||||||
if t.startswith(_('News')) or t.startswith('/'):
|
|
||||||
special_tag = t
|
|
||||||
break
|
|
||||||
|
|
||||||
settings = self.settings()
|
settings = self.settings()
|
||||||
template = self.save_template()
|
filepath = create_upload_path(mdata, fname, self.save_template(), sanitize,
|
||||||
if mdata.tags and _('News') in mdata.tags:
|
prefix_path=os.path.abspath(path),
|
||||||
try:
|
maxlen=self.MAX_PATH_LEN,
|
||||||
p = mdata.pubdate
|
use_subdirs = self.SUPPORTS_SUB_DIRS and settings.use_subdirs,
|
||||||
date = (p.year, p.month, p.day)
|
news_in_folder = self.NEWS_IN_FOLDER,
|
||||||
except:
|
filename_callback=self.filename_callback,
|
||||||
today = time.localtime()
|
sanitize_path_components=self.sanitize_path_components
|
||||||
date = (today[0], today[1], today[2])
|
)
|
||||||
template = "{title}_%d-%d-%d" % date
|
|
||||||
use_subdirs = self.SUPPORTS_SUB_DIRS and settings.use_subdirs
|
|
||||||
|
|
||||||
fname = sanitize(fname)
|
|
||||||
ext = os.path.splitext(fname)[1]
|
|
||||||
|
|
||||||
from calibre.library.save_to_disk import get_components
|
|
||||||
from calibre.library.save_to_disk import config
|
|
||||||
opts = config().parse()
|
|
||||||
if not isinstance(template, unicode):
|
|
||||||
template = template.decode('utf-8')
|
|
||||||
app_id = str(getattr(mdata, 'application_id', ''))
|
|
||||||
id_ = mdata.get('id', fname)
|
|
||||||
extra_components = get_components(template, mdata, id_,
|
|
||||||
timefmt=opts.send_timefmt, length=maxlen-len(app_id)-1)
|
|
||||||
if not extra_components:
|
|
||||||
extra_components.append(sanitize(self.filename_callback(fname,
|
|
||||||
mdata)))
|
|
||||||
else:
|
|
||||||
extra_components[-1] = sanitize(self.filename_callback(extra_components[-1]+ext, mdata))
|
|
||||||
|
|
||||||
if extra_components[-1] and extra_components[-1][0] in ('.', '_'):
|
|
||||||
extra_components[-1] = 'x' + extra_components[-1][1:]
|
|
||||||
|
|
||||||
if special_tag is not None:
|
|
||||||
name = extra_components[-1]
|
|
||||||
extra_components = []
|
|
||||||
tag = special_tag
|
|
||||||
if tag.startswith(_('News')):
|
|
||||||
if self.NEWS_IN_FOLDER:
|
|
||||||
extra_components.append('News')
|
|
||||||
else:
|
|
||||||
for c in tag.split('/'):
|
|
||||||
c = sanitize(c)
|
|
||||||
if not c: continue
|
|
||||||
extra_components.append(c)
|
|
||||||
extra_components.append(name)
|
|
||||||
|
|
||||||
if not use_subdirs:
|
|
||||||
extra_components = extra_components[-1:]
|
|
||||||
|
|
||||||
def remove_trailing_periods(x):
|
|
||||||
ans = x
|
|
||||||
while ans.endswith('.'):
|
|
||||||
ans = ans[:-1].strip()
|
|
||||||
if not ans:
|
|
||||||
ans = 'x'
|
|
||||||
return ans
|
|
||||||
|
|
||||||
extra_components = list(map(remove_trailing_periods, extra_components))
|
|
||||||
components = shorten_components_to(maxlen - len(path), extra_components)
|
|
||||||
components = self.sanitize_path_components(components)
|
|
||||||
filepath = os.path.join(path, *components)
|
|
||||||
filedir = os.path.dirname(filepath)
|
filedir = os.path.dirname(filepath)
|
||||||
|
|
||||||
if create_dirs and not os.path.exists(filedir):
|
if create_dirs and not os.path.exists(filedir):
|
||||||
|
@ -10,7 +10,7 @@ driver. It is intended to be subclassed with the relevant parts implemented
|
|||||||
for a particular device.
|
for a particular device.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
import os, re, time, json, functools, shutil
|
import os, time, json, shutil
|
||||||
from itertools import cycle
|
from itertools import cycle
|
||||||
|
|
||||||
from calibre.constants import numeric_version
|
from calibre.constants import numeric_version
|
||||||
@ -63,7 +63,7 @@ class USBMS(CLI, Device):
|
|||||||
dinfo = {}
|
dinfo = {}
|
||||||
if dinfo.get('device_store_uuid', None) is None:
|
if dinfo.get('device_store_uuid', None) is None:
|
||||||
dinfo['device_store_uuid'] = unicode(uuid.uuid4())
|
dinfo['device_store_uuid'] = unicode(uuid.uuid4())
|
||||||
if dinfo.get('device_name') is None:
|
if dinfo.get('device_name', None) is None:
|
||||||
dinfo['device_name'] = self.get_gui_name()
|
dinfo['device_name'] = self.get_gui_name()
|
||||||
if name is not None:
|
if name is not None:
|
||||||
dinfo['device_name'] = name
|
dinfo['device_name'] = name
|
||||||
@ -166,7 +166,7 @@ class USBMS(CLI, Device):
|
|||||||
|
|
||||||
# make a dict cache of paths so the lookup in the loop below is faster.
|
# make a dict cache of paths so the lookup in the loop below is faster.
|
||||||
bl_cache = {}
|
bl_cache = {}
|
||||||
for idx,b in enumerate(bl):
|
for idx, b in enumerate(bl):
|
||||||
bl_cache[b.lpath] = idx
|
bl_cache[b.lpath] = idx
|
||||||
|
|
||||||
all_formats = self.formats_to_scan_for()
|
all_formats = self.formats_to_scan_for()
|
||||||
@ -404,25 +404,8 @@ class USBMS(CLI, Device):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def build_template_regexp(cls):
|
def build_template_regexp(cls):
|
||||||
def replfunc(match, seen=None):
|
from calibre.devices.utils import build_template_regexp
|
||||||
v = match.group(1)
|
return build_template_regexp(cls.save_template())
|
||||||
if v in ['authors', 'author_sort']:
|
|
||||||
v = 'author'
|
|
||||||
if v in ('title', 'series', 'series_index', 'isbn', 'author'):
|
|
||||||
if v not in seen:
|
|
||||||
seen.add(v)
|
|
||||||
return '(?P<' + v + '>.+?)'
|
|
||||||
return '(.+?)'
|
|
||||||
s = set()
|
|
||||||
f = functools.partial(replfunc, seen=s)
|
|
||||||
template = None
|
|
||||||
try:
|
|
||||||
template = cls.save_template().rpartition('/')[2]
|
|
||||||
return re.compile(re.sub('{([^}]*)}', f, template) + '([_\d]*$)')
|
|
||||||
except:
|
|
||||||
prints(u'Failed to parse template: %r'%template)
|
|
||||||
template = u'{title} - {authors}'
|
|
||||||
return re.compile(re.sub('{([^}]*)}', f, template) + '([_\d]*$)')
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def path_to_unicode(cls, path):
|
def path_to_unicode(cls, path):
|
||||||
|
148
src/calibre/devices/utils.py
Normal file
148
src/calibre/devices/utils.py
Normal file
@ -0,0 +1,148 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:fdm=marker:ai
|
||||||
|
from __future__ import (unicode_literals, division, absolute_import,
|
||||||
|
print_function)
|
||||||
|
|
||||||
|
__license__ = 'GPL v3'
|
||||||
|
__copyright__ = '2012, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||||
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
|
import os, time, re
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
from calibre.devices.errors import DeviceError, WrongDestinationError, FreeSpaceError
|
||||||
|
|
||||||
|
def sanity_check(on_card, files, card_prefixes, free_space):
|
||||||
|
if on_card == 'carda' and not card_prefixes[0]:
|
||||||
|
raise WrongDestinationError(_(
|
||||||
|
'The reader has no storage card %s. You may have changed '
|
||||||
|
'the default send to device action. Right click on the send '
|
||||||
|
'to device button and reset the default action to be '
|
||||||
|
'"Send to main memory".')%'A')
|
||||||
|
elif on_card == 'cardb' and not card_prefixes[1]:
|
||||||
|
raise WrongDestinationError(_(
|
||||||
|
'The reader has no storage card %s. You may have changed '
|
||||||
|
'the default send to device action. Right click on the send '
|
||||||
|
'to device button and reset the default action to be '
|
||||||
|
'"Send to main memory".')%'B')
|
||||||
|
elif on_card and on_card not in ('carda', 'cardb'):
|
||||||
|
raise DeviceError(_('Selected slot: %s is not supported.') % on_card)
|
||||||
|
|
||||||
|
size = 0
|
||||||
|
for f in files:
|
||||||
|
size += os.path.getsize(getattr(f, 'name', f))
|
||||||
|
|
||||||
|
if not on_card and size > free_space[0] - 2*1024*1024:
|
||||||
|
raise FreeSpaceError(_("There is insufficient free space in main memory"))
|
||||||
|
if on_card == 'carda' and size > free_space[1] - 1024*1024:
|
||||||
|
raise FreeSpaceError(_("There is insufficient free space on the storage card"))
|
||||||
|
if on_card == 'cardb' and size > free_space[2] - 1024*1024:
|
||||||
|
raise FreeSpaceError(_("There is insufficient free space on the storage card"))
|
||||||
|
|
||||||
|
def build_template_regexp(template):
|
||||||
|
from calibre import prints
|
||||||
|
|
||||||
|
def replfunc(match, seen=None):
|
||||||
|
v = match.group(1)
|
||||||
|
if v in ['authors', 'author_sort']:
|
||||||
|
v = 'author'
|
||||||
|
if v in ('title', 'series', 'series_index', 'isbn', 'author'):
|
||||||
|
if v not in seen:
|
||||||
|
seen.add(v)
|
||||||
|
return '(?P<' + v + '>.+?)'
|
||||||
|
return '(.+?)'
|
||||||
|
s = set()
|
||||||
|
f = partial(replfunc, seen=s)
|
||||||
|
|
||||||
|
try:
|
||||||
|
template = template.rpartition('/')[2]
|
||||||
|
return re.compile(re.sub('{([^}]*)}', f, template) + '([_\d]*$)')
|
||||||
|
except:
|
||||||
|
prints(u'Failed to parse template: %r'%template)
|
||||||
|
template = u'{title} - {authors}'
|
||||||
|
return re.compile(re.sub('{([^}]*)}', f, template) + '([_\d]*$)')
|
||||||
|
|
||||||
|
def create_upload_path(mdata, fname, template, sanitize,
|
||||||
|
prefix_path='',
|
||||||
|
path_type=os.path,
|
||||||
|
maxlen=250,
|
||||||
|
use_subdirs=True,
|
||||||
|
news_in_folder=True,
|
||||||
|
filename_callback=lambda x, y:x,
|
||||||
|
sanitize_path_components=lambda x: x
|
||||||
|
):
|
||||||
|
from calibre.library.save_to_disk import get_components, config
|
||||||
|
from calibre.utils.filenames import shorten_components_to
|
||||||
|
|
||||||
|
special_tag = None
|
||||||
|
if mdata.tags:
|
||||||
|
for t in mdata.tags:
|
||||||
|
if t.startswith(_('News')) or t.startswith('/'):
|
||||||
|
special_tag = t
|
||||||
|
break
|
||||||
|
|
||||||
|
if mdata.tags and _('News') in mdata.tags:
|
||||||
|
try:
|
||||||
|
p = mdata.pubdate
|
||||||
|
date = (p.year, p.month, p.day)
|
||||||
|
except:
|
||||||
|
today = time.localtime()
|
||||||
|
date = (today[0], today[1], today[2])
|
||||||
|
template = u"{title}_%d-%d-%d" % date
|
||||||
|
|
||||||
|
fname = sanitize(fname)
|
||||||
|
ext = path_type.splitext(fname)[1]
|
||||||
|
|
||||||
|
opts = config().parse()
|
||||||
|
if not isinstance(template, unicode):
|
||||||
|
template = template.decode('utf-8')
|
||||||
|
app_id = str(getattr(mdata, 'application_id', ''))
|
||||||
|
id_ = mdata.get('id', fname)
|
||||||
|
extra_components = get_components(template, mdata, id_,
|
||||||
|
timefmt=opts.send_timefmt, length=maxlen-len(app_id)-1)
|
||||||
|
if not extra_components:
|
||||||
|
extra_components.append(sanitize(filename_callback(fname,
|
||||||
|
mdata)))
|
||||||
|
else:
|
||||||
|
extra_components[-1] = sanitize(filename_callback(extra_components[-1]+ext, mdata))
|
||||||
|
|
||||||
|
if extra_components[-1] and extra_components[-1][0] in ('.', '_'):
|
||||||
|
extra_components[-1] = 'x' + extra_components[-1][1:]
|
||||||
|
|
||||||
|
if special_tag is not None:
|
||||||
|
name = extra_components[-1]
|
||||||
|
extra_components = []
|
||||||
|
tag = special_tag
|
||||||
|
if tag.startswith(_('News')):
|
||||||
|
if news_in_folder:
|
||||||
|
extra_components.append('News')
|
||||||
|
else:
|
||||||
|
for c in tag.split('/'):
|
||||||
|
c = sanitize(c)
|
||||||
|
if not c: continue
|
||||||
|
extra_components.append(c)
|
||||||
|
extra_components.append(name)
|
||||||
|
|
||||||
|
if not use_subdirs:
|
||||||
|
extra_components = extra_components[-1:]
|
||||||
|
|
||||||
|
def remove_trailing_periods(x):
|
||||||
|
ans = x
|
||||||
|
while ans.endswith('.'):
|
||||||
|
ans = ans[:-1].strip()
|
||||||
|
if not ans:
|
||||||
|
ans = 'x'
|
||||||
|
return ans
|
||||||
|
|
||||||
|
extra_components = list(map(remove_trailing_periods, extra_components))
|
||||||
|
components = shorten_components_to(maxlen - len(prefix_path), extra_components)
|
||||||
|
components = sanitize_path_components(components)
|
||||||
|
if prefix_path:
|
||||||
|
filepath = path_type.join(prefix_path, *components)
|
||||||
|
else:
|
||||||
|
filepath = path_type.join(*components)
|
||||||
|
|
||||||
|
return filepath
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -170,7 +170,7 @@ def add_pipeline_options(parser, plumber):
|
|||||||
'chapter', 'chapter_mark',
|
'chapter', 'chapter_mark',
|
||||||
'prefer_metadata_cover', 'remove_first_image',
|
'prefer_metadata_cover', 'remove_first_image',
|
||||||
'insert_metadata', 'page_breaks_before',
|
'insert_metadata', 'page_breaks_before',
|
||||||
'remove_fake_margins',
|
'remove_fake_margins', 'start_reading_at',
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
|
|
||||||
|
@ -304,6 +304,16 @@ OptionRecommendation(name='chapter_mark',
|
|||||||
'to mark chapters.')
|
'to mark chapters.')
|
||||||
),
|
),
|
||||||
|
|
||||||
|
OptionRecommendation(name='start_reading_at',
|
||||||
|
recommended_value=None, level=OptionRecommendation.LOW,
|
||||||
|
help=_('An XPath expression to detect the location in the document'
|
||||||
|
' at which to start reading. Some ebook reading programs'
|
||||||
|
' (most prominently the Kindle) use this location as the'
|
||||||
|
' position at which to open the book. See the XPath tutorial'
|
||||||
|
' in the calibre User Manual for further help using this'
|
||||||
|
' feature.')
|
||||||
|
),
|
||||||
|
|
||||||
OptionRecommendation(name='extra_css',
|
OptionRecommendation(name='extra_css',
|
||||||
recommended_value=None, level=OptionRecommendation.LOW,
|
recommended_value=None, level=OptionRecommendation.LOW,
|
||||||
help=_('Either the path to a CSS stylesheet or raw CSS. '
|
help=_('Either the path to a CSS stylesheet or raw CSS. '
|
||||||
|
@ -161,7 +161,9 @@ class JsonCodec(object):
|
|||||||
try:
|
try:
|
||||||
js = json.load(file_, encoding='utf-8')
|
js = json.load(file_, encoding='utf-8')
|
||||||
for item in js:
|
for item in js:
|
||||||
booklist.append(self.raw_to_book(item, book_class, prefix))
|
entry = self.raw_to_book(item, book_class, prefix)
|
||||||
|
if entry is not None:
|
||||||
|
booklist.append(entry)
|
||||||
except:
|
except:
|
||||||
print 'exception during JSON decode_from_file'
|
print 'exception during JSON decode_from_file'
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
|
@ -510,6 +510,7 @@ class OPF(object): # {{{
|
|||||||
tags_path = XPath('descendant::*[re:match(name(), "subject", "i")]')
|
tags_path = XPath('descendant::*[re:match(name(), "subject", "i")]')
|
||||||
isbn_path = XPath('descendant::*[re:match(name(), "identifier", "i") and '+
|
isbn_path = XPath('descendant::*[re:match(name(), "identifier", "i") and '+
|
||||||
'(re:match(@scheme, "isbn", "i") or re:match(@opf:scheme, "isbn", "i"))]')
|
'(re:match(@scheme, "isbn", "i") or re:match(@opf:scheme, "isbn", "i"))]')
|
||||||
|
pubdate_path = XPath('descendant::*[re:match(name(), "date", "i")]')
|
||||||
raster_cover_path = XPath('descendant::*[re:match(name(), "meta", "i") and ' +
|
raster_cover_path = XPath('descendant::*[re:match(name(), "meta", "i") and ' +
|
||||||
're:match(@name, "cover", "i") and @content]')
|
're:match(@name, "cover", "i") and @content]')
|
||||||
identifier_path = XPath('descendant::*[re:match(name(), "identifier", "i")]')
|
identifier_path = XPath('descendant::*[re:match(name(), "identifier", "i")]')
|
||||||
@ -538,8 +539,6 @@ class OPF(object): # {{{
|
|||||||
formatter=float, none_is=1)
|
formatter=float, none_is=1)
|
||||||
title_sort = TitleSortField('title_sort', is_dc=False)
|
title_sort = TitleSortField('title_sort', is_dc=False)
|
||||||
rating = MetadataField('rating', is_dc=False, formatter=float)
|
rating = MetadataField('rating', is_dc=False, formatter=float)
|
||||||
pubdate = MetadataField('date', formatter=parse_date,
|
|
||||||
renderer=isoformat)
|
|
||||||
publication_type = MetadataField('publication_type', is_dc=False)
|
publication_type = MetadataField('publication_type', is_dc=False)
|
||||||
timestamp = MetadataField('timestamp', is_dc=False,
|
timestamp = MetadataField('timestamp', is_dc=False,
|
||||||
formatter=parse_date, renderer=isoformat)
|
formatter=parse_date, renderer=isoformat)
|
||||||
@ -852,6 +851,44 @@ class OPF(object): # {{{
|
|||||||
|
|
||||||
return property(fget=fget, fset=fset)
|
return property(fget=fget, fset=fset)
|
||||||
|
|
||||||
|
@dynamic_property
|
||||||
|
def pubdate(self):
|
||||||
|
|
||||||
|
def fget(self):
|
||||||
|
ans = None
|
||||||
|
for match in self.pubdate_path(self.metadata):
|
||||||
|
try:
|
||||||
|
val = parse_date(etree.tostring(match, encoding=unicode,
|
||||||
|
method='text', with_tail=False).strip())
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
if ans is None or val < ans:
|
||||||
|
ans = val
|
||||||
|
return ans
|
||||||
|
|
||||||
|
def fset(self, val):
|
||||||
|
least_val = least_elem = None
|
||||||
|
for match in self.pubdate_path(self.metadata):
|
||||||
|
try:
|
||||||
|
cval = parse_date(etree.tostring(match, encoding=unicode,
|
||||||
|
method='text', with_tail=False).strip())
|
||||||
|
except:
|
||||||
|
match.getparent().remove(match)
|
||||||
|
else:
|
||||||
|
if not val:
|
||||||
|
match.getparent().remove(match)
|
||||||
|
if least_val is None or cval < least_val:
|
||||||
|
least_val, least_elem = cval, match
|
||||||
|
|
||||||
|
if val:
|
||||||
|
if least_val is None:
|
||||||
|
least_elem = self.create_metadata_element('date')
|
||||||
|
|
||||||
|
least_elem.attrib.clear()
|
||||||
|
least_elem.text = isoformat(val)
|
||||||
|
|
||||||
|
return property(fget=fget, fset=fset)
|
||||||
|
|
||||||
@dynamic_property
|
@dynamic_property
|
||||||
def isbn(self):
|
def isbn(self):
|
||||||
|
|
||||||
|
@ -79,7 +79,7 @@ class PagedDisplay
|
|||||||
if not this.in_paged_mode
|
if not this.in_paged_mode
|
||||||
# Check if the current document is a full screen layout like
|
# Check if the current document is a full screen layout like
|
||||||
# cover, if so we treat it specially.
|
# cover, if so we treat it specially.
|
||||||
single_screen = (document.body.scrollWidth < window.innerWidth + 25 and document.body.scrollHeight < window.innerHeight + 25)
|
single_screen = (document.body.scrollHeight < window.innerHeight + 75)
|
||||||
first_layout = true
|
first_layout = true
|
||||||
|
|
||||||
ww = window.innerWidth
|
ww = window.innerWidth
|
||||||
@ -149,7 +149,7 @@ class PagedDisplay
|
|||||||
# current page (when cols_per_screen == 1). Similarly img elements
|
# current page (when cols_per_screen == 1). Similarly img elements
|
||||||
# with height=100% overflow the first column
|
# with height=100% overflow the first column
|
||||||
has_svg = document.getElementsByTagName('svg').length > 0
|
has_svg = document.getElementsByTagName('svg').length > 0
|
||||||
only_img = document.getElementsByTagName('img').length == 1 and document.getElementsByTagName('div').length < 2 and document.getElementsByTagName('p').length < 2
|
only_img = document.getElementsByTagName('img').length == 1 and document.getElementsByTagName('div').length < 3 and document.getElementsByTagName('p').length < 2
|
||||||
this.is_full_screen_layout = (only_img or has_svg) and single_screen and document.body.scrollWidth > document.body.clientWidth
|
this.is_full_screen_layout = (only_img or has_svg) and single_screen and document.body.scrollWidth > document.body.clientWidth
|
||||||
|
|
||||||
this.in_paged_mode = true
|
this.in_paged_mode = true
|
||||||
|
@ -6,7 +6,7 @@ __license__ = 'GPL v3'
|
|||||||
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
|
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||||
__docformat__ = 'restructuredtext en'
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
import re
|
import re, uuid
|
||||||
|
|
||||||
from lxml import etree
|
from lxml import etree
|
||||||
from urlparse import urlparse
|
from urlparse import urlparse
|
||||||
@ -80,6 +80,35 @@ class DetectStructure(object):
|
|||||||
if not node.title or not node.title.strip():
|
if not node.title or not node.title.strip():
|
||||||
node.title = _('Unnamed')
|
node.title = _('Unnamed')
|
||||||
|
|
||||||
|
if self.opts.start_reading_at:
|
||||||
|
self.detect_start_reading()
|
||||||
|
|
||||||
|
def detect_start_reading(self):
|
||||||
|
expr = self.opts.start_reading_at
|
||||||
|
try:
|
||||||
|
expr = XPath(expr)
|
||||||
|
except:
|
||||||
|
self.log.warn(
|
||||||
|
'Invalid start reading at XPath expression, ignoring: %s'%expr)
|
||||||
|
return
|
||||||
|
for item in self.oeb.spine:
|
||||||
|
if not hasattr(item.data, 'xpath'): continue
|
||||||
|
matches = expr(item.data)
|
||||||
|
if matches:
|
||||||
|
elem = matches[0]
|
||||||
|
eid = elem.get('id', None)
|
||||||
|
if not eid:
|
||||||
|
eid = u'start_reading_at_'+unicode(uuid.uuid4()).replace(u'-', u'')
|
||||||
|
elem.set('id', eid)
|
||||||
|
if u'text' in self.oeb.guide:
|
||||||
|
self.oeb.guide.remove(u'text')
|
||||||
|
self.oeb.guide.add(u'text', u'Start', item.href+u'#'+eid)
|
||||||
|
self.log('Setting start reading at position to %s in %s'%(
|
||||||
|
self.opts.start_reading_at, item.href))
|
||||||
|
return
|
||||||
|
self.log.warn("Failed to find start reading at position: %s"%
|
||||||
|
self.opts.start_reading_at)
|
||||||
|
|
||||||
def detect_chapters(self):
|
def detect_chapters(self):
|
||||||
self.detected_chapters = []
|
self.detected_chapters = []
|
||||||
|
|
||||||
|
@ -35,26 +35,31 @@ class Outline(object):
|
|||||||
page, ypos = 0, 0
|
page, ypos = 0, 0
|
||||||
item = getattr(toc, 'outline_item_', None)
|
item = getattr(toc, 'outline_item_', None)
|
||||||
if item is not None:
|
if item is not None:
|
||||||
|
# First use the item URL without fragment
|
||||||
|
page, ypos = self.pos_map.get(item, {}).get(None, (0, 0))
|
||||||
if toc.fragment:
|
if toc.fragment:
|
||||||
amap = self.pos_map.get(item, None)
|
amap = self.pos_map.get(item, None)
|
||||||
if amap is not None:
|
if amap is not None:
|
||||||
page, ypos = amap.get(toc.fragment, (0, 0))
|
page, ypos = amap.get(toc.fragment, (page, ypos))
|
||||||
else:
|
|
||||||
page, ypos = self.pos_map.get(item, {}).get(None, (0, 0))
|
|
||||||
return page, ypos
|
return page, ypos
|
||||||
|
|
||||||
def add_children(self, toc, parent):
|
def add_children(self, toc, parent):
|
||||||
for child in toc:
|
for child in toc:
|
||||||
page, ypos = self.get_pos(child)
|
page, ypos = self.get_pos(child)
|
||||||
text = child.text or _('Page %d')%page
|
text = child.text or _('Page %d')%page
|
||||||
|
if page >= self.page_count:
|
||||||
|
page = self.page_count - 1
|
||||||
cn = parent.create(text, page, True)
|
cn = parent.create(text, page, True)
|
||||||
self.add_children(child, cn)
|
self.add_children(child, cn)
|
||||||
|
|
||||||
def __call__(self, doc):
|
def __call__(self, doc):
|
||||||
self.pos_map = dict(self.pos_map)
|
self.pos_map = dict(self.pos_map)
|
||||||
|
self.page_count = doc.page_count()
|
||||||
for child in self.toc:
|
for child in self.toc:
|
||||||
page, ypos = self.get_pos(child)
|
page, ypos = self.get_pos(child)
|
||||||
text = child.text or _('Page %d')%page
|
text = child.text or _('Page %d')%page
|
||||||
|
if page >= self.page_count:
|
||||||
|
page = self.page_count - 1
|
||||||
node = doc.create_outline(text, page)
|
node = doc.create_outline(text, page)
|
||||||
self.add_children(child, node)
|
self.add_children(child, node)
|
||||||
|
|
||||||
|
@ -121,7 +121,7 @@ class PDFMetadata(object): # {{{
|
|||||||
self.author = force_unicode(self.author)
|
self.author = force_unicode(self.author)
|
||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
class Page(QWebPage):
|
class Page(QWebPage): # {{{
|
||||||
|
|
||||||
def __init__(self, opts, log):
|
def __init__(self, opts, log):
|
||||||
self.log = log
|
self.log = log
|
||||||
@ -137,17 +137,22 @@ class Page(QWebPage):
|
|||||||
std = {'serif':opts.pdf_serif_family, 'sans':opts.pdf_sans_family,
|
std = {'serif':opts.pdf_serif_family, 'sans':opts.pdf_sans_family,
|
||||||
'mono':opts.pdf_mono_family}.get(opts.pdf_standard_font,
|
'mono':opts.pdf_mono_family}.get(opts.pdf_standard_font,
|
||||||
opts.pdf_serif_family)
|
opts.pdf_serif_family)
|
||||||
settings.setFontFamily(QWebSettings.StandardFont, std)
|
if std:
|
||||||
settings.setFontFamily(QWebSettings.SerifFont, opts.pdf_serif_family)
|
settings.setFontFamily(QWebSettings.StandardFont, std)
|
||||||
settings.setFontFamily(QWebSettings.SansSerifFont,
|
if opts.pdf_serif_family:
|
||||||
opts.pdf_sans_family)
|
settings.setFontFamily(QWebSettings.SerifFont, opts.pdf_serif_family)
|
||||||
settings.setFontFamily(QWebSettings.FixedFont, opts.pdf_mono_family)
|
if opts.pdf_sans_family:
|
||||||
|
settings.setFontFamily(QWebSettings.SansSerifFont,
|
||||||
|
opts.pdf_sans_family)
|
||||||
|
if opts.pdf_mono_family:
|
||||||
|
settings.setFontFamily(QWebSettings.FixedFont, opts.pdf_mono_family)
|
||||||
|
|
||||||
def javaScriptConsoleMessage(self, msg, lineno, msgid):
|
def javaScriptConsoleMessage(self, msg, lineno, msgid):
|
||||||
self.log.debug(u'JS:', unicode(msg))
|
self.log.debug(u'JS:', unicode(msg))
|
||||||
|
|
||||||
def javaScriptAlert(self, frame, msg):
|
def javaScriptAlert(self, frame, msg):
|
||||||
self.log(unicode(msg))
|
self.log(unicode(msg))
|
||||||
|
# }}}
|
||||||
|
|
||||||
class PDFWriter(QObject): # {{{
|
class PDFWriter(QObject): # {{{
|
||||||
|
|
||||||
@ -192,6 +197,7 @@ class PDFWriter(QObject): # {{{
|
|||||||
self.insert_cover()
|
self.insert_cover()
|
||||||
|
|
||||||
self.render_succeeded = False
|
self.render_succeeded = False
|
||||||
|
self.current_page_num = self.doc.page_count()
|
||||||
self.combine_queue.append(os.path.join(self.tmp_path,
|
self.combine_queue.append(os.path.join(self.tmp_path,
|
||||||
'qprinter_out.pdf'))
|
'qprinter_out.pdf'))
|
||||||
self.first_page = True
|
self.first_page = True
|
||||||
@ -279,9 +285,13 @@ class PDFWriter(QObject): # {{{
|
|||||||
paged_display.fit_images();
|
paged_display.fit_images();
|
||||||
''')
|
''')
|
||||||
mf = self.view.page().mainFrame()
|
mf = self.view.page().mainFrame()
|
||||||
|
start_page = self.current_page_num
|
||||||
|
if not self.first_page:
|
||||||
|
start_page += 1
|
||||||
while True:
|
while True:
|
||||||
if not self.first_page:
|
if not self.first_page:
|
||||||
self.printer.newPage()
|
if self.printer.newPage():
|
||||||
|
self.current_page_num += 1
|
||||||
self.first_page = False
|
self.first_page = False
|
||||||
mf.render(self.painter)
|
mf.render(self.painter)
|
||||||
nsl = evaljs('paged_display.next_screen_location()').toInt()
|
nsl = evaljs('paged_display.next_screen_location()').toInt()
|
||||||
@ -293,11 +303,10 @@ class PDFWriter(QObject): # {{{
|
|||||||
amap = self.bridge_value
|
amap = self.bridge_value
|
||||||
if not isinstance(amap, dict):
|
if not isinstance(amap, dict):
|
||||||
amap = {} # Some javascript error occurred
|
amap = {} # Some javascript error occurred
|
||||||
pages = self.doc.page_count()
|
self.outline.set_pos(self.current_item, None, start_page, 0)
|
||||||
self.outline.set_pos(self.current_item, None, pages, 0)
|
|
||||||
for anchor, x in amap.iteritems():
|
for anchor, x in amap.iteritems():
|
||||||
pagenum, ypos = x
|
pagenum, ypos = x
|
||||||
self.outline.set_pos(self.current_item, anchor, pages + pagenum, ypos)
|
self.outline.set_pos(self.current_item, anchor, start_page + pagenum, ypos)
|
||||||
|
|
||||||
def append_doc(self, outpath):
|
def append_doc(self, outpath):
|
||||||
doc = self.podofo.PDFDoc()
|
doc = self.podofo.PDFDoc()
|
||||||
@ -342,8 +351,7 @@ class PDFWriter(QObject): # {{{
|
|||||||
if self.metadata.tags:
|
if self.metadata.tags:
|
||||||
self.doc.keywords = self.metadata.tags
|
self.doc.keywords = self.metadata.tags
|
||||||
self.outline(self.doc)
|
self.outline(self.doc)
|
||||||
raw = self.doc.write()
|
self.doc.save_to_fileobj(self.out_stream)
|
||||||
self.out_stream.write(raw)
|
|
||||||
self.render_succeeded = True
|
self.render_succeeded = True
|
||||||
finally:
|
finally:
|
||||||
self._delete_tmpdir()
|
self._delete_tmpdir()
|
||||||
|
@ -101,6 +101,7 @@ gprefs.defaults['auto_add_auto_convert'] = True
|
|||||||
gprefs.defaults['ui_style'] = 'calibre' if iswindows or isosx else 'system'
|
gprefs.defaults['ui_style'] = 'calibre' if iswindows or isosx else 'system'
|
||||||
gprefs.defaults['tag_browser_old_look'] = False
|
gprefs.defaults['tag_browser_old_look'] = False
|
||||||
gprefs.defaults['book_list_tooltips'] = True
|
gprefs.defaults['book_list_tooltips'] = True
|
||||||
|
gprefs.defaults['bd_show_cover'] = True
|
||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
NONE = QVariant() #: Null value to return from the data function of item models
|
NONE = QVariant() #: Null value to return from the data function of item models
|
||||||
|
@ -10,9 +10,9 @@ from functools import partial
|
|||||||
|
|
||||||
from PyQt4.Qt import QPixmap, QTimer
|
from PyQt4.Qt import QPixmap, QTimer
|
||||||
|
|
||||||
|
from calibre import as_unicode
|
||||||
from calibre.gui2 import error_dialog, choose_files, \
|
from calibre.gui2 import (error_dialog, choose_files, choose_dir,
|
||||||
choose_dir, warning_dialog, info_dialog
|
warning_dialog, info_dialog)
|
||||||
from calibre.gui2.dialogs.add_empty_book import AddEmptyBookDialog
|
from calibre.gui2.dialogs.add_empty_book import AddEmptyBookDialog
|
||||||
from calibre.gui2.dialogs.progress import ProgressDialog
|
from calibre.gui2.dialogs.progress import ProgressDialog
|
||||||
from calibre.gui2.widgets import IMAGE_EXTENSIONS
|
from calibre.gui2.widgets import IMAGE_EXTENSIONS
|
||||||
@ -400,12 +400,45 @@ class AddAction(InterfaceAction):
|
|||||||
d = error_dialog(self.gui, _('Add to library'), _('No book files found'))
|
d = error_dialog(self.gui, _('Add to library'), _('No book files found'))
|
||||||
d.exec_()
|
d.exec_()
|
||||||
return
|
return
|
||||||
paths = self.gui.device_manager.device.prepare_addable_books(paths)
|
|
||||||
from calibre.gui2.add import Adder
|
self.gui.device_manager.prepare_addable_books(self.Dispatcher(partial(
|
||||||
self.__adder_func = partial(self._add_from_device_adder, on_card=None,
|
self.books_prepared, view)), paths)
|
||||||
model=view.model())
|
self.bpd = ProgressDialog(_('Downloading books'),
|
||||||
self._adder = Adder(self.gui, self.gui.library_view.model().db,
|
msg=_('Downloading books from device'), parent=self.gui,
|
||||||
self.Dispatcher(self.__adder_func), spare_server=self.gui.spare_server)
|
cancelable=False)
|
||||||
self._adder.add(paths)
|
QTimer.singleShot(1000, self.show_bpd)
|
||||||
|
|
||||||
|
def show_bpd(self):
|
||||||
|
if self.bpd is not None:
|
||||||
|
self.bpd.show()
|
||||||
|
|
||||||
|
def books_prepared(self, view, job):
|
||||||
|
self.bpd.hide()
|
||||||
|
self.bpd = None
|
||||||
|
if job.exception is not None:
|
||||||
|
self.gui.device_job_exception(job)
|
||||||
|
return
|
||||||
|
paths = job.result
|
||||||
|
ok_paths = [x for x in paths if isinstance(x, basestring)]
|
||||||
|
failed_paths = [x for x in paths if isinstance(x, tuple)]
|
||||||
|
if failed_paths:
|
||||||
|
if not ok_paths:
|
||||||
|
msg = _('Could not download files from the device')
|
||||||
|
typ = error_dialog
|
||||||
|
else:
|
||||||
|
msg = _('Could not download some files from the device')
|
||||||
|
typ = warning_dialog
|
||||||
|
det_msg = [x[0]+ '\n ' + as_unicode(x[1]) for x in failed_paths]
|
||||||
|
det_msg = '\n\n'.join(det_msg)
|
||||||
|
typ(self.gui, _('Could not download files'), msg, det_msg=det_msg,
|
||||||
|
show=True)
|
||||||
|
|
||||||
|
if ok_paths:
|
||||||
|
from calibre.gui2.add import Adder
|
||||||
|
self.__adder_func = partial(self._add_from_device_adder, on_card=None,
|
||||||
|
model=view.model())
|
||||||
|
self._adder = Adder(self.gui, self.gui.library_view.model().db,
|
||||||
|
self.Dispatcher(self.__adder_func), spare_server=self.gui.spare_server)
|
||||||
|
self._adder.add(ok_paths)
|
||||||
|
|
||||||
|
|
||||||
|
@ -256,6 +256,15 @@ class ViewAction(InterfaceAction):
|
|||||||
db.prefs['gui_view_history'] = history[:vh]
|
db.prefs['gui_view_history'] = history[:vh]
|
||||||
self.build_menus(db)
|
self.build_menus(db)
|
||||||
|
|
||||||
|
def view_device_book(self, path):
|
||||||
|
pt = PersistentTemporaryFile('_view_device_book'+\
|
||||||
|
os.path.splitext(path)[1])
|
||||||
|
self.persistent_files.append(pt)
|
||||||
|
pt.close()
|
||||||
|
self.gui.device_manager.view_book(
|
||||||
|
Dispatcher(self.book_downloaded_for_viewing),
|
||||||
|
path, pt.name)
|
||||||
|
|
||||||
def _view_books(self, rows):
|
def _view_books(self, rows):
|
||||||
if not rows or len(rows) == 0:
|
if not rows or len(rows) == 0:
|
||||||
self._launch_viewer()
|
self._launch_viewer()
|
||||||
@ -270,12 +279,5 @@ class ViewAction(InterfaceAction):
|
|||||||
else:
|
else:
|
||||||
paths = self.gui.current_view().model().paths(rows)
|
paths = self.gui.current_view().model().paths(rows)
|
||||||
for path in paths:
|
for path in paths:
|
||||||
pt = PersistentTemporaryFile('_viewer_'+\
|
self.view_device_book(path)
|
||||||
os.path.splitext(path)[1])
|
|
||||||
self.persistent_files.append(pt)
|
|
||||||
pt.close()
|
|
||||||
self.gui.device_manager.view_book(\
|
|
||||||
Dispatcher(self.book_downloaded_for_viewing),
|
|
||||||
path, pt.name)
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -19,8 +19,8 @@ from calibre.ebooks.metadata import fmt_sidx
|
|||||||
from calibre.ebooks.metadata.sources.identify import urls_from_identifiers
|
from calibre.ebooks.metadata.sources.identify import urls_from_identifiers
|
||||||
from calibre.constants import filesystem_encoding
|
from calibre.constants import filesystem_encoding
|
||||||
from calibre.library.comments import comments_to_html
|
from calibre.library.comments import comments_to_html
|
||||||
from calibre.gui2 import (config, open_local_file, open_url, pixmap_to_data,
|
from calibre.gui2 import (config, open_url, pixmap_to_data, gprefs,
|
||||||
gprefs, rating_font)
|
rating_font)
|
||||||
from calibre.utils.icu import sort_key
|
from calibre.utils.icu import sort_key
|
||||||
from calibre.utils.formatter import EvalFormatter
|
from calibre.utils.formatter import EvalFormatter
|
||||||
from calibre.utils.date import is_date_undefined
|
from calibre.utils.date import is_date_undefined
|
||||||
@ -297,7 +297,8 @@ class CoverView(QWidget): # {{{
|
|||||||
self.pixmap = self.default_pixmap
|
self.pixmap = self.default_pixmap
|
||||||
self.do_layout()
|
self.do_layout()
|
||||||
self.update()
|
self.update()
|
||||||
if not same_item and not config['disable_animations']:
|
if (not same_item and not config['disable_animations'] and
|
||||||
|
self.isVisible()):
|
||||||
self.animation.start()
|
self.animation.start()
|
||||||
|
|
||||||
def paintEvent(self, event):
|
def paintEvent(self, event):
|
||||||
@ -512,6 +513,7 @@ class DetailsLayout(QLayout): # {{{
|
|||||||
self.do_layout(r)
|
self.do_layout(r)
|
||||||
|
|
||||||
def cover_height(self, r):
|
def cover_height(self, r):
|
||||||
|
if not self._children[0].widget().isVisible(): return 0
|
||||||
mh = min(int(r.height()/2.), int(4/3. * r.width())+1)
|
mh = min(int(r.height()/2.), int(4/3. * r.width())+1)
|
||||||
try:
|
try:
|
||||||
ph = self._children[0].widget().pixmap.height()
|
ph = self._children[0].widget().pixmap.height()
|
||||||
@ -522,6 +524,7 @@ class DetailsLayout(QLayout): # {{{
|
|||||||
return mh
|
return mh
|
||||||
|
|
||||||
def cover_width(self, r):
|
def cover_width(self, r):
|
||||||
|
if not self._children[0].widget().isVisible(): return 0
|
||||||
mw = 1 + int(3/4. * r.height())
|
mw = 1 + int(3/4. * r.height())
|
||||||
try:
|
try:
|
||||||
pw = self._children[0].widget().pixmap.width()
|
pw = self._children[0].widget().pixmap.width()
|
||||||
@ -566,6 +569,7 @@ class BookDetails(QWidget): # {{{
|
|||||||
files_dropped = pyqtSignal(object, object)
|
files_dropped = pyqtSignal(object, object)
|
||||||
cover_changed = pyqtSignal(object, object)
|
cover_changed = pyqtSignal(object, object)
|
||||||
cover_removed = pyqtSignal(object)
|
cover_removed = pyqtSignal(object)
|
||||||
|
view_device_book = pyqtSignal(object)
|
||||||
|
|
||||||
# Drag 'n drop {{{
|
# Drag 'n drop {{{
|
||||||
DROPABBLE_EXTENSIONS = IMAGE_EXTENSIONS+BOOK_EXTENSIONS
|
DROPABBLE_EXTENSIONS = IMAGE_EXTENSIONS+BOOK_EXTENSIONS
|
||||||
@ -640,7 +644,7 @@ class BookDetails(QWidget): # {{{
|
|||||||
id_, fmt = val.split(':')
|
id_, fmt = val.split(':')
|
||||||
self.view_specific_format.emit(int(id_), fmt)
|
self.view_specific_format.emit(int(id_), fmt)
|
||||||
elif typ == 'devpath':
|
elif typ == 'devpath':
|
||||||
open_local_file(val)
|
self.view_device_book.emit(val)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
open_url(QUrl(link, QUrl.TolerantMode))
|
open_url(QUrl(link, QUrl.TolerantMode))
|
||||||
@ -660,6 +664,7 @@ class BookDetails(QWidget): # {{{
|
|||||||
self.update_layout()
|
self.update_layout()
|
||||||
|
|
||||||
def update_layout(self):
|
def update_layout(self):
|
||||||
|
self.cover_view.setVisible(gprefs['bd_show_cover'])
|
||||||
self._layout.do_layout(self.rect())
|
self._layout.do_layout(self.rect())
|
||||||
self.cover_view.update_tooltip(self.current_path)
|
self.cover_view.update_tooltip(self.current_path)
|
||||||
|
|
||||||
|
@ -143,7 +143,7 @@ class Widget(QWidget):
|
|||||||
ans = None
|
ans = None
|
||||||
return ans
|
return ans
|
||||||
elif isinstance(g, QFontComboBox):
|
elif isinstance(g, QFontComboBox):
|
||||||
ans = unicode(QFontInfo(g.currentFont().family()))
|
return unicode(QFontInfo(g.currentFont()).family())
|
||||||
elif isinstance(g, EncodingComboBox):
|
elif isinstance(g, EncodingComboBox):
|
||||||
ans = unicode(g.currentText()).strip()
|
ans = unicode(g.currentText()).strip()
|
||||||
try:
|
try:
|
||||||
|
@ -20,7 +20,7 @@ class StructureDetectionWidget(Widget, Ui_Form):
|
|||||||
|
|
||||||
def __init__(self, parent, get_option, get_help, db=None, book_id=None):
|
def __init__(self, parent, get_option, get_help, db=None, book_id=None):
|
||||||
Widget.__init__(self, parent,
|
Widget.__init__(self, parent,
|
||||||
['chapter', 'chapter_mark',
|
['chapter', 'chapter_mark', 'start_reading_at',
|
||||||
'remove_first_image', 'remove_fake_margins',
|
'remove_first_image', 'remove_fake_margins',
|
||||||
'insert_metadata', 'page_breaks_before']
|
'insert_metadata', 'page_breaks_before']
|
||||||
)
|
)
|
||||||
@ -31,15 +31,18 @@ class StructureDetectionWidget(Widget, Ui_Form):
|
|||||||
self.opt_chapter.set_msg(_('Detect chapters at (XPath expression):'))
|
self.opt_chapter.set_msg(_('Detect chapters at (XPath expression):'))
|
||||||
self.opt_page_breaks_before.set_msg(_('Insert page breaks before '
|
self.opt_page_breaks_before.set_msg(_('Insert page breaks before '
|
||||||
'(XPath expression):'))
|
'(XPath expression):'))
|
||||||
|
self.opt_start_reading_at.set_msg(
|
||||||
|
_('Start reading at (XPath expression):'))
|
||||||
|
|
||||||
def break_cycles(self):
|
def break_cycles(self):
|
||||||
Widget.break_cycles(self)
|
Widget.break_cycles(self)
|
||||||
|
|
||||||
def pre_commit_check(self):
|
def pre_commit_check(self):
|
||||||
for x in ('chapter', 'page_breaks_before'):
|
for x in ('chapter', 'page_breaks_before', 'start_reading_at'):
|
||||||
x = getattr(self, 'opt_'+x)
|
x = getattr(self, 'opt_'+x)
|
||||||
if not x.check():
|
if not x.check():
|
||||||
error_dialog(self, _('Invalid XPath'),
|
error_dialog(self, _('Invalid XPath'),
|
||||||
_('The XPath expression %s is invalid.')%x.text).exec_()
|
_('The XPath expression %s is invalid.')%x.text).exec_()
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -14,10 +14,40 @@
|
|||||||
<string>Form</string>
|
<string>Form</string>
|
||||||
</property>
|
</property>
|
||||||
<layout class="QGridLayout" name="gridLayout">
|
<layout class="QGridLayout" name="gridLayout">
|
||||||
<item row="0" column="0" colspan="3">
|
<item row="2" column="3">
|
||||||
|
<widget class="QCheckBox" name="opt_remove_fake_margins">
|
||||||
|
<property name="text">
|
||||||
|
<string>Remove &fake margins</string>
|
||||||
|
</property>
|
||||||
|
</widget>
|
||||||
|
</item>
|
||||||
|
<item row="4" column="0" colspan="4">
|
||||||
|
<widget class="QLabel" name="label_2">
|
||||||
|
<property name="text">
|
||||||
|
<string>The header and footer removal options have been replaced by the Search & Replace options. Click the Search & Replace category in the bar to the left to use these options. Leave the replace field blank and enter your header/footer removal regexps into the search field.</string>
|
||||||
|
</property>
|
||||||
|
<property name="wordWrap">
|
||||||
|
<bool>true</bool>
|
||||||
|
</property>
|
||||||
|
</widget>
|
||||||
|
</item>
|
||||||
|
<item row="5" column="0" rowspan="2" colspan="4">
|
||||||
|
<widget class="XPathEdit" name="opt_page_breaks_before" native="true"/>
|
||||||
|
</item>
|
||||||
|
<item row="3" column="0" colspan="4">
|
||||||
|
<widget class="QCheckBox" name="opt_insert_metadata">
|
||||||
|
<property name="text">
|
||||||
|
<string>Insert &metadata as page at start of book</string>
|
||||||
|
</property>
|
||||||
|
</widget>
|
||||||
|
</item>
|
||||||
|
<item row="7" column="0" colspan="4">
|
||||||
|
<widget class="XPathEdit" name="opt_start_reading_at" native="true"/>
|
||||||
|
</item>
|
||||||
|
<item row="0" column="0" colspan="4">
|
||||||
<widget class="XPathEdit" name="opt_chapter" native="true"/>
|
<widget class="XPathEdit" name="opt_chapter" native="true"/>
|
||||||
</item>
|
</item>
|
||||||
<item row="1" column="0">
|
<item row="1" column="0" colspan="2">
|
||||||
<widget class="QLabel" name="label">
|
<widget class="QLabel" name="label">
|
||||||
<property name="text">
|
<property name="text">
|
||||||
<string>Chapter &mark:</string>
|
<string>Chapter &mark:</string>
|
||||||
@ -27,44 +57,14 @@
|
|||||||
</property>
|
</property>
|
||||||
</widget>
|
</widget>
|
||||||
</item>
|
</item>
|
||||||
<item row="1" column="1">
|
<item row="1" column="2">
|
||||||
<widget class="QComboBox" name="opt_chapter_mark">
|
<widget class="QComboBox" name="opt_chapter_mark">
|
||||||
<property name="minimumContentsLength">
|
<property name="minimumContentsLength">
|
||||||
<number>20</number>
|
<number>20</number>
|
||||||
</property>
|
</property>
|
||||||
</widget>
|
</widget>
|
||||||
</item>
|
</item>
|
||||||
<item row="2" column="0" colspan="2">
|
<item row="1" column="3">
|
||||||
<widget class="QCheckBox" name="opt_remove_first_image">
|
|
||||||
<property name="text">
|
|
||||||
<string>Remove first &image</string>
|
|
||||||
</property>
|
|
||||||
</widget>
|
|
||||||
</item>
|
|
||||||
<item row="3" column="0" colspan="2">
|
|
||||||
<widget class="QCheckBox" name="opt_insert_metadata">
|
|
||||||
<property name="text">
|
|
||||||
<string>Insert &metadata as page at start of book</string>
|
|
||||||
</property>
|
|
||||||
</widget>
|
|
||||||
</item>
|
|
||||||
<item row="7" column="0" colspan="3">
|
|
||||||
<widget class="XPathEdit" name="opt_page_breaks_before" native="true"/>
|
|
||||||
</item>
|
|
||||||
<item row="8" column="0" colspan="3">
|
|
||||||
<spacer name="verticalSpacer">
|
|
||||||
<property name="orientation">
|
|
||||||
<enum>Qt::Vertical</enum>
|
|
||||||
</property>
|
|
||||||
<property name="sizeHint" stdset="0">
|
|
||||||
<size>
|
|
||||||
<width>20</width>
|
|
||||||
<height>40</height>
|
|
||||||
</size>
|
|
||||||
</property>
|
|
||||||
</spacer>
|
|
||||||
</item>
|
|
||||||
<item row="1" column="2">
|
|
||||||
<spacer name="horizontalSpacer">
|
<spacer name="horizontalSpacer">
|
||||||
<property name="orientation">
|
<property name="orientation">
|
||||||
<enum>Qt::Horizontal</enum>
|
<enum>Qt::Horizontal</enum>
|
||||||
@ -77,22 +77,25 @@
|
|||||||
</property>
|
</property>
|
||||||
</spacer>
|
</spacer>
|
||||||
</item>
|
</item>
|
||||||
<item row="5" column="0" colspan="3">
|
<item row="2" column="0" colspan="3">
|
||||||
<widget class="QLabel" name="label_2">
|
<widget class="QCheckBox" name="opt_remove_first_image">
|
||||||
<property name="text">
|
<property name="text">
|
||||||
<string>The header and footer removal options have been replaced by the Search & Replace options. Click the Search & Replace category in the bar to the left to use these options. Leave the replace field blank and enter your header/footer removal regexps into the search field.</string>
|
<string>Remove first &image</string>
|
||||||
</property>
|
|
||||||
<property name="wordWrap">
|
|
||||||
<bool>true</bool>
|
|
||||||
</property>
|
</property>
|
||||||
</widget>
|
</widget>
|
||||||
</item>
|
</item>
|
||||||
<item row="2" column="2">
|
<item row="8" column="0">
|
||||||
<widget class="QCheckBox" name="opt_remove_fake_margins">
|
<spacer name="verticalSpacer">
|
||||||
<property name="text">
|
<property name="orientation">
|
||||||
<string>Remove &fake margins</string>
|
<enum>Qt::Vertical</enum>
|
||||||
</property>
|
</property>
|
||||||
</widget>
|
<property name="sizeHint" stdset="0">
|
||||||
|
<size>
|
||||||
|
<width>20</width>
|
||||||
|
<height>40</height>
|
||||||
|
</size>
|
||||||
|
</property>
|
||||||
|
</spacer>
|
||||||
</item>
|
</item>
|
||||||
</layout>
|
</layout>
|
||||||
</widget>
|
</widget>
|
||||||
|
@ -128,6 +128,10 @@ class DeviceManager(Thread): # {{{
|
|||||||
self.setDaemon(True)
|
self.setDaemon(True)
|
||||||
# [Device driver, Showing in GUI, Ejected]
|
# [Device driver, Showing in GUI, Ejected]
|
||||||
self.devices = list(device_plugins())
|
self.devices = list(device_plugins())
|
||||||
|
self.managed_devices = [x for x in self.devices if
|
||||||
|
not x.MANAGES_DEVICE_PRESENCE]
|
||||||
|
self.unmanaged_devices = [x for x in self.devices if
|
||||||
|
x.MANAGES_DEVICE_PRESENCE]
|
||||||
self.sleep_time = sleep_time
|
self.sleep_time = sleep_time
|
||||||
self.connected_slot = connected_slot
|
self.connected_slot = connected_slot
|
||||||
self.jobs = Queue.Queue(0)
|
self.jobs = Queue.Queue(0)
|
||||||
@ -182,12 +186,15 @@ class DeviceManager(Thread): # {{{
|
|||||||
prints('Unable to open device', str(dev))
|
prints('Unable to open device', str(dev))
|
||||||
prints(tb)
|
prints(tb)
|
||||||
continue
|
continue
|
||||||
self.connected_device = dev
|
self.after_device_connect(dev, device_kind)
|
||||||
self.connected_device_kind = device_kind
|
|
||||||
self.connected_slot(True, device_kind)
|
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def after_device_connect(self, dev, device_kind):
|
||||||
|
self.connected_device = dev
|
||||||
|
self.connected_device_kind = device_kind
|
||||||
|
self.connected_slot(True, device_kind)
|
||||||
|
|
||||||
def connected_device_removed(self):
|
def connected_device_removed(self):
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
@ -215,22 +222,45 @@ class DeviceManager(Thread): # {{{
|
|||||||
|
|
||||||
def detect_device(self):
|
def detect_device(self):
|
||||||
self.scanner.scan()
|
self.scanner.scan()
|
||||||
|
|
||||||
if self.is_device_connected:
|
if self.is_device_connected:
|
||||||
connected, detected_device = \
|
if self.connected_device.MANAGES_DEVICE_PRESENCE:
|
||||||
self.scanner.is_device_connected(self.connected_device,
|
cd = self.connected_device.detect_managed_devices(self.scanner.devices)
|
||||||
only_presence=True)
|
if cd is None:
|
||||||
if not connected:
|
self.connected_device_removed()
|
||||||
if DEBUG:
|
else:
|
||||||
# Allow the device subsystem to output debugging info about
|
connected, detected_device = \
|
||||||
# why it thinks the device is not connected. Used, for e.g.
|
|
||||||
# in the can_handle() method of the T1 driver
|
|
||||||
self.scanner.is_device_connected(self.connected_device,
|
self.scanner.is_device_connected(self.connected_device,
|
||||||
only_presence=True, debug=True)
|
only_presence=True)
|
||||||
self.connected_device_removed()
|
if not connected:
|
||||||
|
if DEBUG:
|
||||||
|
# Allow the device subsystem to output debugging info about
|
||||||
|
# why it thinks the device is not connected. Used, for e.g.
|
||||||
|
# in the can_handle() method of the T1 driver
|
||||||
|
self.scanner.is_device_connected(self.connected_device,
|
||||||
|
only_presence=True, debug=True)
|
||||||
|
self.connected_device_removed()
|
||||||
else:
|
else:
|
||||||
|
for dev in self.unmanaged_devices:
|
||||||
|
try:
|
||||||
|
cd = dev.detect_managed_devices(self.scanner.devices)
|
||||||
|
except:
|
||||||
|
prints('Error during device detection for %s:'%dev)
|
||||||
|
traceback.print_exc()
|
||||||
|
else:
|
||||||
|
if cd is not None:
|
||||||
|
try:
|
||||||
|
dev.open(cd, self.current_library_uuid)
|
||||||
|
except:
|
||||||
|
prints('Error while trying to open %s (Driver: %s)'%
|
||||||
|
(cd, dev))
|
||||||
|
traceback.print_exc()
|
||||||
|
else:
|
||||||
|
self.after_device_connect(dev, 'unmanaged-device')
|
||||||
|
return
|
||||||
try:
|
try:
|
||||||
possibly_connected_devices = []
|
possibly_connected_devices = []
|
||||||
for device in self.devices:
|
for device in self.managed_devices:
|
||||||
if device in self.ejected_devices:
|
if device in self.ejected_devices:
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
@ -248,7 +278,7 @@ class DeviceManager(Thread): # {{{
|
|||||||
prints('Connect to device failed, retrying in 5 seconds...')
|
prints('Connect to device failed, retrying in 5 seconds...')
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
if not self.do_connect(possibly_connected_devices,
|
if not self.do_connect(possibly_connected_devices,
|
||||||
device_kind='usb'):
|
device_kind='device'):
|
||||||
if DEBUG:
|
if DEBUG:
|
||||||
prints('Device connect failed again, giving up')
|
prints('Device connect failed again, giving up')
|
||||||
except OpenFailed as e:
|
except OpenFailed as e:
|
||||||
@ -264,9 +294,10 @@ class DeviceManager(Thread): # {{{
|
|||||||
# disconnect a device
|
# disconnect a device
|
||||||
def umount_device(self, *args):
|
def umount_device(self, *args):
|
||||||
if self.is_device_connected and not self.job_manager.has_device_jobs():
|
if self.is_device_connected and not self.job_manager.has_device_jobs():
|
||||||
if self.connected_device_kind == 'device':
|
if self.connected_device_kind in {'unmanaged-device', 'device'}:
|
||||||
self.connected_device.eject()
|
self.connected_device.eject()
|
||||||
self.ejected_devices.add(self.connected_device)
|
if self.connected_device_kind != 'unmanaged-device':
|
||||||
|
self.ejected_devices.add(self.connected_device)
|
||||||
self.connected_slot(False, self.connected_device_kind)
|
self.connected_slot(False, self.connected_device_kind)
|
||||||
elif hasattr(self.connected_device, 'unmount_device'):
|
elif hasattr(self.connected_device, 'unmount_device'):
|
||||||
# As we are on the wrong thread, this call must *not* do
|
# As we are on the wrong thread, this call must *not* do
|
||||||
@ -412,6 +443,14 @@ class DeviceManager(Thread): # {{{
|
|||||||
return self.create_job_step(self._books, done,
|
return self.create_job_step(self._books, done,
|
||||||
description=_('Get list of books on device'), to_job=add_as_step_to_job)
|
description=_('Get list of books on device'), to_job=add_as_step_to_job)
|
||||||
|
|
||||||
|
def _prepare_addable_books(self, paths):
|
||||||
|
return self.device.prepare_addable_books(paths)
|
||||||
|
|
||||||
|
def prepare_addable_books(self, done, paths, add_as_step_to_job=None):
|
||||||
|
return self.create_job_step(self._prepare_addable_books, done, args=[paths],
|
||||||
|
description=_('Prepare files for transfer from device'),
|
||||||
|
to_job=add_as_step_to_job)
|
||||||
|
|
||||||
def _annotations(self, path_map):
|
def _annotations(self, path_map):
|
||||||
return self.device.get_annotations(path_map)
|
return self.device.get_annotations(path_map)
|
||||||
|
|
||||||
@ -525,9 +564,8 @@ class DeviceManager(Thread): # {{{
|
|||||||
to_job=add_as_step_to_job)
|
to_job=add_as_step_to_job)
|
||||||
|
|
||||||
def _view_book(self, path, target):
|
def _view_book(self, path, target):
|
||||||
f = open(target, 'wb')
|
with open(target, 'wb') as f:
|
||||||
self.device.get_file(path, f)
|
self.device.get_file(path, f)
|
||||||
f.close()
|
|
||||||
return target
|
return target
|
||||||
|
|
||||||
def view_book(self, done, path, target, add_as_step_to_job=None):
|
def view_book(self, done, path, target, add_as_step_to_job=None):
|
||||||
@ -939,7 +977,7 @@ class DeviceMixin(object): # {{{
|
|||||||
self.set_default_thumbnail(\
|
self.set_default_thumbnail(\
|
||||||
self.device_manager.device.THUMBNAIL_HEIGHT)
|
self.device_manager.device.THUMBNAIL_HEIGHT)
|
||||||
self.status_bar.show_message(_('Device: ')+\
|
self.status_bar.show_message(_('Device: ')+\
|
||||||
self.device_manager.device.__class__.get_gui_name()+\
|
self.device_manager.device.get_gui_name()+\
|
||||||
_(' detected.'), 3000)
|
_(' detected.'), 3000)
|
||||||
self.device_connected = device_kind
|
self.device_connected = device_kind
|
||||||
self.library_view.set_device_connected(self.device_connected)
|
self.library_view.set_device_connected(self.device_connected)
|
||||||
@ -1457,8 +1495,12 @@ class DeviceMixin(object): # {{{
|
|||||||
self.device_job_exception(job)
|
self.device_job_exception(job)
|
||||||
return
|
return
|
||||||
|
|
||||||
self.device_manager.add_books_to_metadata(job.result,
|
try:
|
||||||
metadata, self.booklists())
|
self.device_manager.add_books_to_metadata(job.result,
|
||||||
|
metadata, self.booklists())
|
||||||
|
except:
|
||||||
|
traceback.print_exc()
|
||||||
|
raise
|
||||||
|
|
||||||
books_to_be_deleted = []
|
books_to_be_deleted = []
|
||||||
if memory and memory[1]:
|
if memory and memory[1]:
|
||||||
|
@ -89,6 +89,7 @@ class ConfigWidget(QWidget, Ui_ConfigWidget):
|
|||||||
l.setBuddy(self.opt_extra_customization[i])
|
l.setBuddy(self.opt_extra_customization[i])
|
||||||
l.setWordWrap(True)
|
l.setWordWrap(True)
|
||||||
self.opt_extra_customization[i].setText(settings.extra_customization[i])
|
self.opt_extra_customization[i].setText(settings.extra_customization[i])
|
||||||
|
self.opt_extra_customization[i].setCursorPosition(0)
|
||||||
self.extra_layout.addWidget(l, row_func(i, 0), col_func(i))
|
self.extra_layout.addWidget(l, row_func(i, 0), col_func(i))
|
||||||
self.extra_layout.addWidget(self.opt_extra_customization[i],
|
self.extra_layout.addWidget(self.opt_extra_customization[i],
|
||||||
row_func(i, 1), col_func(i))
|
row_func(i, 1), col_func(i))
|
||||||
@ -101,6 +102,7 @@ class ConfigWidget(QWidget, Ui_ConfigWidget):
|
|||||||
l.setWordWrap(True)
|
l.setWordWrap(True)
|
||||||
if settings.extra_customization:
|
if settings.extra_customization:
|
||||||
self.opt_extra_customization.setText(settings.extra_customization)
|
self.opt_extra_customization.setText(settings.extra_customization)
|
||||||
|
self.opt_extra_customization.setCursorPosition(0)
|
||||||
self.opt_extra_customization.setCursorPosition(0)
|
self.opt_extra_customization.setCursorPosition(0)
|
||||||
self.extra_layout.addWidget(l, 0, 0)
|
self.extra_layout.addWidget(l, 0, 0)
|
||||||
self.extra_layout.addWidget(self.opt_extra_customization, 1, 0)
|
self.extra_layout.addWidget(self.opt_extra_customization, 1, 0)
|
||||||
|
@ -269,6 +269,8 @@ class LayoutMixin(object): # {{{
|
|||||||
self.iactions['Remove Books'].remove_format_by_id)
|
self.iactions['Remove Books'].remove_format_by_id)
|
||||||
self.book_details.save_specific_format.connect(
|
self.book_details.save_specific_format.connect(
|
||||||
self.iactions['Save To Disk'].save_library_format_by_ids)
|
self.iactions['Save To Disk'].save_library_format_by_ids)
|
||||||
|
self.book_details.view_device_book.connect(
|
||||||
|
self.iactions['View'].view_device_book)
|
||||||
|
|
||||||
m = self.library_view.model()
|
m = self.library_view.model()
|
||||||
if m.rowCount(None) > 0:
|
if m.rowCount(None) > 0:
|
||||||
|
@ -106,6 +106,7 @@ class ConfigWidget(ConfigWidgetBase, Ui_Form):
|
|||||||
'calibre')])
|
'calibre')])
|
||||||
r('book_list_tooltips', gprefs)
|
r('book_list_tooltips', gprefs)
|
||||||
r('tag_browser_old_look', gprefs, restart_required=True)
|
r('tag_browser_old_look', gprefs, restart_required=True)
|
||||||
|
r('bd_show_cover', gprefs)
|
||||||
|
|
||||||
r('cover_flow_queue_length', config, restart_required=True)
|
r('cover_flow_queue_length', config, restart_required=True)
|
||||||
|
|
||||||
|
@ -212,19 +212,32 @@
|
|||||||
<string>Book Details</string>
|
<string>Book Details</string>
|
||||||
</attribute>
|
</attribute>
|
||||||
<layout class="QGridLayout" name="gridLayout_12">
|
<layout class="QGridLayout" name="gridLayout_12">
|
||||||
<item row="1" column="0" rowspan="2">
|
<item row="2" column="1">
|
||||||
|
<widget class="QLabel" name="label_3">
|
||||||
|
<property name="text">
|
||||||
|
<string>Note that <b>comments</b> will always be displayed at the end, regardless of the position you assign here.</string>
|
||||||
|
</property>
|
||||||
|
<property name="wordWrap">
|
||||||
|
<bool>true</bool>
|
||||||
|
</property>
|
||||||
|
</widget>
|
||||||
|
</item>
|
||||||
|
<item row="0" column="1">
|
||||||
|
<widget class="QCheckBox" name="opt_use_roman_numerals_for_series_number">
|
||||||
|
<property name="text">
|
||||||
|
<string>Use &Roman numerals for series</string>
|
||||||
|
</property>
|
||||||
|
<property name="checked">
|
||||||
|
<bool>true</bool>
|
||||||
|
</property>
|
||||||
|
</widget>
|
||||||
|
</item>
|
||||||
|
<item row="2" column="0" rowspan="2">
|
||||||
<widget class="QGroupBox" name="groupBox">
|
<widget class="QGroupBox" name="groupBox">
|
||||||
<property name="title">
|
<property name="title">
|
||||||
<string>Select displayed metadata</string>
|
<string>Select displayed metadata</string>
|
||||||
</property>
|
</property>
|
||||||
<layout class="QGridLayout" name="gridLayout_3">
|
<layout class="QGridLayout" name="gridLayout_3">
|
||||||
<item row="0" column="0" rowspan="3">
|
|
||||||
<widget class="QListView" name="field_display_order">
|
|
||||||
<property name="alternatingRowColors">
|
|
||||||
<bool>true</bool>
|
|
||||||
</property>
|
|
||||||
</widget>
|
|
||||||
</item>
|
|
||||||
<item row="0" column="1">
|
<item row="0" column="1">
|
||||||
<widget class="QToolButton" name="df_up_button">
|
<widget class="QToolButton" name="df_up_button">
|
||||||
<property name="toolTip">
|
<property name="toolTip">
|
||||||
@ -247,6 +260,13 @@
|
|||||||
</property>
|
</property>
|
||||||
</widget>
|
</widget>
|
||||||
</item>
|
</item>
|
||||||
|
<item row="0" column="0" rowspan="3">
|
||||||
|
<widget class="QListView" name="field_display_order">
|
||||||
|
<property name="alternatingRowColors">
|
||||||
|
<bool>true</bool>
|
||||||
|
</property>
|
||||||
|
</widget>
|
||||||
|
</item>
|
||||||
<item row="1" column="1">
|
<item row="1" column="1">
|
||||||
<spacer name="verticalSpacer_5">
|
<spacer name="verticalSpacer_5">
|
||||||
<property name="orientation">
|
<property name="orientation">
|
||||||
@ -288,23 +308,10 @@ Manage Authors. You can use the values {author} and
|
|||||||
</item>
|
</item>
|
||||||
</layout>
|
</layout>
|
||||||
</item>
|
</item>
|
||||||
<item row="0" column="1">
|
<item row="1" column="0" colspan="2">
|
||||||
<widget class="QCheckBox" name="opt_use_roman_numerals_for_series_number">
|
<widget class="QCheckBox" name="opt_bd_show_cover">
|
||||||
<property name="text">
|
<property name="text">
|
||||||
<string>Use &Roman numerals for series</string>
|
<string>Show &cover in the book details panel</string>
|
||||||
</property>
|
|
||||||
<property name="checked">
|
|
||||||
<bool>true</bool>
|
|
||||||
</property>
|
|
||||||
</widget>
|
|
||||||
</item>
|
|
||||||
<item row="1" column="1">
|
|
||||||
<widget class="QLabel" name="label_3">
|
|
||||||
<property name="text">
|
|
||||||
<string>Note that <b>comments</b> will always be displayed at the end, regardless of the position you assign here.</string>
|
|
||||||
</property>
|
|
||||||
<property name="wordWrap">
|
|
||||||
<bool>true</bool>
|
|
||||||
</property>
|
</property>
|
||||||
</widget>
|
</widget>
|
||||||
</item>
|
</item>
|
||||||
|
@ -1294,7 +1294,7 @@ Author '{0}':
|
|||||||
def add_books_to_HTML_by_month(this_months_list, dtc):
|
def add_books_to_HTML_by_month(this_months_list, dtc):
|
||||||
if len(this_months_list):
|
if len(this_months_list):
|
||||||
|
|
||||||
this_months_list = sorted(this_months_list, key=self.booksByAuthorSorter_author_sort)
|
this_months_list = sorted(this_months_list, key=lambda x: sort_key(self.booksByAuthorSorter_author_sort(x)))
|
||||||
|
|
||||||
# Create a new month anchor
|
# Create a new month anchor
|
||||||
date_string = strftime(u'%B %Y', current_date.timetuple())
|
date_string = strftime(u'%B %Y', current_date.timetuple())
|
||||||
@ -3091,14 +3091,14 @@ Author '{0}':
|
|||||||
Sort non-series books before series books
|
Sort non-series books before series books
|
||||||
'''
|
'''
|
||||||
if not book['series']:
|
if not book['series']:
|
||||||
key = '%s %s' % (capitalize(book['author_sort']),
|
key = '%s ~%s' % (capitalize(book['author_sort']),
|
||||||
capitalize(book['title_sort']))
|
capitalize(book['title_sort']))
|
||||||
else:
|
else:
|
||||||
index = book['series_index']
|
index = book['series_index']
|
||||||
integer = int(index)
|
integer = int(index)
|
||||||
fraction = index-integer
|
fraction = index-integer
|
||||||
series_index = '%04d%s' % (integer, str('%0.4f' % fraction).lstrip('0'))
|
series_index = '%04d%s' % (integer, str('%0.4f' % fraction).lstrip('0'))
|
||||||
key = '%s ~%s %s' % (capitalize(book['author_sort']),
|
key = '%s %s %s' % (capitalize(book['author_sort']),
|
||||||
self.generateSortTitle(book['series']),
|
self.generateSortTitle(book['series']),
|
||||||
series_index)
|
series_index)
|
||||||
return key
|
return key
|
||||||
|
@ -32,7 +32,7 @@ from calibre.customize.ui import run_plugins_on_import
|
|||||||
from calibre import isbytestring
|
from calibre import isbytestring
|
||||||
from calibre.utils.filenames import ascii_filename, samefile
|
from calibre.utils.filenames import ascii_filename, samefile
|
||||||
from calibre.utils.date import (utcnow, now as nowf, utcfromtimestamp,
|
from calibre.utils.date import (utcnow, now as nowf, utcfromtimestamp,
|
||||||
parse_only_date)
|
parse_only_date, UNDEFINED_DATE)
|
||||||
from calibre.utils.config import prefs, tweaks, from_json, to_json
|
from calibre.utils.config import prefs, tweaks, from_json, to_json
|
||||||
from calibre.utils.icu import sort_key, strcmp, lower
|
from calibre.utils.icu import sort_key, strcmp, lower
|
||||||
from calibre.utils.search_query_parser import saved_searches, set_saved_searches
|
from calibre.utils.search_query_parser import saved_searches, set_saved_searches
|
||||||
@ -2498,16 +2498,17 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
self.notify('metadata', [id])
|
self.notify('metadata', [id])
|
||||||
|
|
||||||
def set_pubdate(self, id, dt, notify=True, commit=True):
|
def set_pubdate(self, id, dt, notify=True, commit=True):
|
||||||
if dt:
|
if not dt:
|
||||||
if isinstance(dt, basestring):
|
dt = UNDEFINED_DATE
|
||||||
dt = parse_only_date(dt)
|
if isinstance(dt, basestring):
|
||||||
self.conn.execute('UPDATE books SET pubdate=? WHERE id=?', (dt, id))
|
dt = parse_only_date(dt)
|
||||||
self.data.set(id, self.FIELD_MAP['pubdate'], dt, row_is_id=True)
|
self.conn.execute('UPDATE books SET pubdate=? WHERE id=?', (dt, id))
|
||||||
self.dirtied([id], commit=False)
|
self.data.set(id, self.FIELD_MAP['pubdate'], dt, row_is_id=True)
|
||||||
if commit:
|
self.dirtied([id], commit=False)
|
||||||
self.conn.commit()
|
if commit:
|
||||||
if notify:
|
self.conn.commit()
|
||||||
self.notify('metadata', [id])
|
if notify:
|
||||||
|
self.notify('metadata', [id])
|
||||||
|
|
||||||
|
|
||||||
def set_publisher(self, id, publisher, notify=True, commit=True,
|
def set_publisher(self, id, publisher, notify=True, commit=True,
|
||||||
@ -3344,7 +3345,7 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
if mi.timestamp is None:
|
if mi.timestamp is None:
|
||||||
mi.timestamp = utcnow()
|
mi.timestamp = utcnow()
|
||||||
if mi.pubdate is None:
|
if mi.pubdate is None:
|
||||||
mi.pubdate = utcnow()
|
mi.pubdate = UNDEFINED_DATE
|
||||||
self.set_metadata(id, mi, ignore_errors=True, commit=True)
|
self.set_metadata(id, mi, ignore_errors=True, commit=True)
|
||||||
if cover is not None:
|
if cover is not None:
|
||||||
try:
|
try:
|
||||||
@ -3386,7 +3387,7 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
if mi.timestamp is None:
|
if mi.timestamp is None:
|
||||||
mi.timestamp = utcnow()
|
mi.timestamp = utcnow()
|
||||||
if mi.pubdate is None:
|
if mi.pubdate is None:
|
||||||
mi.pubdate = utcnow()
|
mi.pubdate = UNDEFINED_DATE
|
||||||
self.set_metadata(id, mi, commit=True, ignore_errors=True)
|
self.set_metadata(id, mi, commit=True, ignore_errors=True)
|
||||||
npath = self.run_import_plugins(path, format)
|
npath = self.run_import_plugins(path, format)
|
||||||
format = os.path.splitext(npath)[-1].lower().replace('.', '').upper()
|
format = os.path.splitext(npath)[-1].lower().replace('.', '').upper()
|
||||||
@ -3426,7 +3427,7 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
if mi.timestamp is None:
|
if mi.timestamp is None:
|
||||||
mi.timestamp = utcnow()
|
mi.timestamp = utcnow()
|
||||||
if mi.pubdate is None:
|
if mi.pubdate is None:
|
||||||
mi.pubdate = utcnow()
|
mi.pubdate = UNDEFINED_DATE
|
||||||
self.set_metadata(id, mi, ignore_errors=True, commit=True)
|
self.set_metadata(id, mi, ignore_errors=True, commit=True)
|
||||||
if preserve_uuid and mi.uuid:
|
if preserve_uuid and mi.uuid:
|
||||||
self.set_uuid(id, mi.uuid, commit=False)
|
self.set_uuid(id, mi.uuid, commit=False)
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -203,6 +203,11 @@ def samefile_windows(src, dst):
|
|||||||
import win32file
|
import win32file
|
||||||
from pywintypes import error
|
from pywintypes import error
|
||||||
|
|
||||||
|
samestring = (os.path.normcase(os.path.abspath(src)) ==
|
||||||
|
os.path.normcase(os.path.abspath(dst)))
|
||||||
|
if samestring:
|
||||||
|
return True
|
||||||
|
|
||||||
def get_fileid(x):
|
def get_fileid(x):
|
||||||
if isbytestring(x): x = x.decode(filesystem_encoding)
|
if isbytestring(x): x = x.decode(filesystem_encoding)
|
||||||
try:
|
try:
|
||||||
@ -224,6 +229,10 @@ def samefile(src, dst):
|
|||||||
symlinks, case insensitivity, mapped drives, etc.
|
symlinks, case insensitivity, mapped drives, etc.
|
||||||
|
|
||||||
Returns True iff both paths exist and point to the same file on disk.
|
Returns True iff both paths exist and point to the same file on disk.
|
||||||
|
|
||||||
|
Note: On windows will return True if the two string are identical (upto
|
||||||
|
case) even if the file does not exist. This is because I have no way of
|
||||||
|
knowing how reliable the GetFileInformationByHandle method is.
|
||||||
'''
|
'''
|
||||||
if iswindows:
|
if iswindows:
|
||||||
return samefile_windows(src, dst)
|
return samefile_windows(src, dst)
|
||||||
|
@ -82,6 +82,17 @@ def icu_sort_key(collator, obj):
|
|||||||
obj = obj.replace(b'\0', b'')
|
obj = obj.replace(b'\0', b'')
|
||||||
return _secondary_collator.sort_key(obj)
|
return _secondary_collator.sort_key(obj)
|
||||||
|
|
||||||
|
def icu_change_case(upper, locale, obj):
|
||||||
|
func = _icu.upper if upper else _icu.lower
|
||||||
|
try:
|
||||||
|
return func(locale, obj)
|
||||||
|
except TypeError:
|
||||||
|
if isinstance(obj, unicode):
|
||||||
|
obj = obj.replace(u'\0', u'')
|
||||||
|
else:
|
||||||
|
obj = obj.replace(b'\0', b'')
|
||||||
|
return func(locale, obj)
|
||||||
|
|
||||||
def py_find(pattern, source):
|
def py_find(pattern, source):
|
||||||
pos = source.find(pattern)
|
pos = source.find(pattern)
|
||||||
if pos > -1:
|
if pos > -1:
|
||||||
@ -163,10 +174,10 @@ case_sensitive_sort_key = py_case_sensitive_sort_key if _icu_not_ok else \
|
|||||||
case_sensitive_strcmp = cmp if _icu_not_ok else icu_case_sensitive_strcmp
|
case_sensitive_strcmp = cmp if _icu_not_ok else icu_case_sensitive_strcmp
|
||||||
|
|
||||||
upper = (lambda s: s.upper()) if _icu_not_ok else \
|
upper = (lambda s: s.upper()) if _icu_not_ok else \
|
||||||
partial(_icu.upper, get_locale())
|
partial(icu_change_case, True, get_locale())
|
||||||
|
|
||||||
lower = (lambda s: s.lower()) if _icu_not_ok else \
|
lower = (lambda s: s.lower()) if _icu_not_ok else \
|
||||||
partial(_icu.lower, get_locale())
|
partial(icu_change_case, False, get_locale())
|
||||||
|
|
||||||
title_case = (lambda s: s.title()) if _icu_not_ok else \
|
title_case = (lambda s: s.title()) if _icu_not_ok else \
|
||||||
partial(_icu.title, get_locale())
|
partial(_icu.title, get_locale())
|
||||||
|
@ -1104,6 +1104,41 @@ magick_Image_type_setter(magick_Image *self, PyObject *val, void *closure) {
|
|||||||
|
|
||||||
// }}}
|
// }}}
|
||||||
|
|
||||||
|
// Image.depth {{{
|
||||||
|
static PyObject *
|
||||||
|
magick_Image_depth_getter(magick_Image *self, void *closure) {
|
||||||
|
NULL_CHECK(NULL)
|
||||||
|
|
||||||
|
return Py_BuildValue("n", MagickGetImageDepth(self->wand));
|
||||||
|
}
|
||||||
|
|
||||||
|
static int
|
||||||
|
magick_Image_depth_setter(magick_Image *self, PyObject *val, void *closure) {
|
||||||
|
size_t depth;
|
||||||
|
|
||||||
|
NULL_CHECK(-1)
|
||||||
|
|
||||||
|
if (val == NULL) {
|
||||||
|
PyErr_SetString(PyExc_TypeError, "Cannot delete image depth");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!PyInt_Check(val)) {
|
||||||
|
PyErr_SetString(PyExc_TypeError, "Depth must be an integer");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
depth = (size_t)PyInt_AsSsize_t(val);
|
||||||
|
if (!MagickSetImageDepth(self->wand, depth)) {
|
||||||
|
PyErr_Format(PyExc_ValueError, "Could not set image depth to %lu", depth);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// }}}
|
||||||
|
|
||||||
// Image.destroy {{{
|
// Image.destroy {{{
|
||||||
|
|
||||||
static PyObject *
|
static PyObject *
|
||||||
@ -1238,7 +1273,7 @@ static PyMethodDef magick_Image_methods[] = {
|
|||||||
},
|
},
|
||||||
|
|
||||||
{"quantize", (PyCFunction)magick_Image_quantize, METH_VARARGS,
|
{"quantize", (PyCFunction)magick_Image_quantize, METH_VARARGS,
|
||||||
"quantize(number_colors, colorspace, treedepth, dither, measure_error) \n\n nalyzes the colors within a reference image and chooses a fixed number of colors to represent the image. The goal of the algorithm is to minimize the color difference between the input and output image while minimizing the processing time."
|
"quantize(number_colors, colorspace, treedepth, dither, measure_error) \n\n analyzes the colors within a reference image and chooses a fixed number of colors to represent the image. The goal of the algorithm is to minimize the color difference between the input and output image while minimizing the processing time."
|
||||||
},
|
},
|
||||||
|
|
||||||
{NULL} /* Sentinel */
|
{NULL} /* Sentinel */
|
||||||
@ -1260,6 +1295,12 @@ static PyGetSetDef magick_Image_getsetters[] = {
|
|||||||
(char *)"the image type: UndefinedType, BilevelType, GrayscaleType, GrayscaleMatteType, PaletteType, PaletteMatteType, TrueColorType, TrueColorMatteType, ColorSeparationType, ColorSeparationMatteType, or OptimizeType.",
|
(char *)"the image type: UndefinedType, BilevelType, GrayscaleType, GrayscaleMatteType, PaletteType, PaletteMatteType, TrueColorType, TrueColorMatteType, ColorSeparationType, ColorSeparationMatteType, or OptimizeType.",
|
||||||
NULL},
|
NULL},
|
||||||
|
|
||||||
|
{(char *)"depth",
|
||||||
|
(getter)magick_Image_depth_getter, (setter)magick_Image_depth_setter,
|
||||||
|
(char *)"the image depth.",
|
||||||
|
NULL},
|
||||||
|
|
||||||
|
|
||||||
{NULL} /* Sentinel */
|
{NULL} /* Sentinel */
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -13,188 +13,23 @@ You can pass a number to memory and it will be subtracted from the returned
|
|||||||
value.
|
value.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
import gc, os, re
|
import gc, os
|
||||||
|
|
||||||
from calibre.constants import iswindows, islinux
|
from calibre.constants import iswindows, islinux
|
||||||
|
|
||||||
if islinux:
|
def get_memory():
|
||||||
# Taken, with thanks, from:
|
'Return memory usage in bytes'
|
||||||
# http://wingolog.org/archives/2007/11/27/reducing-the-footprint-of-python-applications
|
import psutil
|
||||||
|
p = psutil.Process(os.getpid())
|
||||||
def permute(args):
|
mem = p.get_ext_memory_info()
|
||||||
ret = []
|
attr = 'wset' if iswindows else 'data' if islinux else 'rss'
|
||||||
if args:
|
return getattr(mem, attr)
|
||||||
first = args.pop(0)
|
|
||||||
for y in permute(args):
|
|
||||||
for x in first:
|
|
||||||
ret.append(x + y)
|
|
||||||
else:
|
|
||||||
ret.append('')
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def parsed_groups(match, *types):
|
|
||||||
groups = match.groups()
|
|
||||||
assert len(groups) == len(types)
|
|
||||||
return tuple([type(group) for group, type in zip(groups, types)])
|
|
||||||
|
|
||||||
class VMA(dict):
|
|
||||||
def __init__(self, *args):
|
|
||||||
(self.start, self.end, self.perms, self.offset,
|
|
||||||
self.major, self.minor, self.inode, self.filename) = args
|
|
||||||
|
|
||||||
def parse_smaps(pid):
|
|
||||||
with open('/proc/%s/smaps'%pid, 'r') as maps:
|
|
||||||
hex = lambda s: int(s, 16)
|
|
||||||
|
|
||||||
ret = []
|
|
||||||
header = re.compile(r'^([0-9a-f]+)-([0-9a-f]+) (....) ([0-9a-f]+) '
|
|
||||||
r'(..):(..) (\d+) *(.*)$')
|
|
||||||
detail = re.compile(r'^(.*): +(\d+) kB')
|
|
||||||
for line in maps:
|
|
||||||
m = header.match(line)
|
|
||||||
if m:
|
|
||||||
vma = VMA(*parsed_groups(m, hex, hex, str, hex, str, str, int, str))
|
|
||||||
ret.append(vma)
|
|
||||||
else:
|
|
||||||
m = detail.match(line)
|
|
||||||
if m:
|
|
||||||
k, v = parsed_groups(m, str, int)
|
|
||||||
assert k not in vma
|
|
||||||
vma[k] = v
|
|
||||||
else:
|
|
||||||
print 'unparseable line:', line
|
|
||||||
return ret
|
|
||||||
|
|
||||||
perms = permute(['r-', 'w-', 'x-', 'ps'])
|
|
||||||
|
|
||||||
def make_summary_dicts(vmas):
|
|
||||||
mapped = {}
|
|
||||||
anon = {}
|
|
||||||
for d in mapped, anon:
|
|
||||||
# per-perm
|
|
||||||
for k in perms:
|
|
||||||
d[k] = {}
|
|
||||||
d[k]['Size'] = 0
|
|
||||||
for y in 'Shared', 'Private':
|
|
||||||
d[k][y] = {}
|
|
||||||
for z in 'Clean', 'Dirty':
|
|
||||||
d[k][y][z] = 0
|
|
||||||
# totals
|
|
||||||
for y in 'Shared', 'Private':
|
|
||||||
d[y] = {}
|
|
||||||
for z in 'Clean', 'Dirty':
|
|
||||||
d[y][z] = 0
|
|
||||||
|
|
||||||
for vma in vmas:
|
|
||||||
if vma.major == '00' and vma.minor == '00':
|
|
||||||
d = anon
|
|
||||||
else:
|
|
||||||
d = mapped
|
|
||||||
for y in 'Shared', 'Private':
|
|
||||||
for z in 'Clean', 'Dirty':
|
|
||||||
d[vma.perms][y][z] += vma.get(y + '_' + z, 0)
|
|
||||||
d[y][z] += vma.get(y + '_' + z, 0)
|
|
||||||
d[vma.perms]['Size'] += vma.get('Size', 0)
|
|
||||||
return mapped, anon
|
|
||||||
|
|
||||||
def values(d, args):
|
|
||||||
if args:
|
|
||||||
ret = ()
|
|
||||||
first = args[0]
|
|
||||||
for k in first:
|
|
||||||
ret += values(d[k], args[1:])
|
|
||||||
return ret
|
|
||||||
else:
|
|
||||||
return (d,)
|
|
||||||
|
|
||||||
def print_summary(dicts_and_titles):
|
|
||||||
def desc(title, perms):
|
|
||||||
ret = {('Anonymous', 'rw-p'): 'Data (malloc, mmap)',
|
|
||||||
('Anonymous', 'rwxp'): 'Writable code (stack)',
|
|
||||||
('Mapped', 'r-xp'): 'Code',
|
|
||||||
('Mapped', 'rwxp'): 'Writable code (jump tables)',
|
|
||||||
('Mapped', 'r--p'): 'Read-only data',
|
|
||||||
('Mapped', 'rw-p'): 'Data'}.get((title, perms), None)
|
|
||||||
if ret:
|
|
||||||
return ' -- ' + ret
|
|
||||||
else:
|
|
||||||
return ''
|
|
||||||
|
|
||||||
for d, title in dicts_and_titles:
|
|
||||||
print title, 'memory:'
|
|
||||||
print ' Shared Private'
|
|
||||||
print ' Clean Dirty Clean Dirty'
|
|
||||||
for k in perms:
|
|
||||||
if d[k]['Size']:
|
|
||||||
print (' %s %7d %7d %7d %7d%s'
|
|
||||||
% ((k,)
|
|
||||||
+ values(d[k], (('Shared', 'Private'),
|
|
||||||
('Clean', 'Dirty')))
|
|
||||||
+ (desc(title, k),)))
|
|
||||||
print (' total %7d %7d %7d %7d'
|
|
||||||
% values(d, (('Shared', 'Private'),
|
|
||||||
('Clean', 'Dirty'))))
|
|
||||||
|
|
||||||
print ' ' + '-' * 40
|
|
||||||
print (' total %7d %7d %7d %7d'
|
|
||||||
% tuple(map(sum, zip(*[values(d, (('Shared', 'Private'),
|
|
||||||
('Clean', 'Dirty')))
|
|
||||||
for d, title in dicts_and_titles]))))
|
|
||||||
|
|
||||||
def print_stats(pid=None):
|
|
||||||
if pid is None:
|
|
||||||
pid = os.getpid()
|
|
||||||
vmas = parse_smaps(pid)
|
|
||||||
mapped, anon = make_summary_dicts(vmas)
|
|
||||||
print_summary(((mapped, "Mapped"), (anon, "Anonymous")))
|
|
||||||
|
|
||||||
def linux_memory(since=0.0):
|
|
||||||
vmas = parse_smaps(os.getpid())
|
|
||||||
mapped, anon = make_summary_dicts(vmas)
|
|
||||||
dicts_and_titles = ((mapped, "Mapped"), (anon, "Anonymous"))
|
|
||||||
totals = tuple(map(sum, zip(*[values(d, (('Shared', 'Private'),
|
|
||||||
('Clean', 'Dirty')))
|
|
||||||
for d, title in dicts_and_titles])))
|
|
||||||
return (totals[-1]/1024.) - since
|
|
||||||
|
|
||||||
memory = linux_memory
|
|
||||||
|
|
||||||
elif iswindows:
|
|
||||||
import win32process
|
|
||||||
import win32con
|
|
||||||
import win32api
|
|
||||||
|
|
||||||
# See http://msdn.microsoft.com/en-us/library/ms684877.aspx
|
|
||||||
# for details on the info returned by get_meminfo
|
|
||||||
|
|
||||||
def get_handle(pid):
|
|
||||||
return win32api.OpenProcess(win32con.PROCESS_QUERY_INFORMATION, 0,
|
|
||||||
pid)
|
|
||||||
|
|
||||||
def listprocesses(self):
|
|
||||||
for process in win32process.EnumProcesses():
|
|
||||||
try:
|
|
||||||
han = get_handle(process)
|
|
||||||
procmeminfo = meminfo(han)
|
|
||||||
procmemusage = procmeminfo["WorkingSetSize"]
|
|
||||||
yield process, procmemusage
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def get_meminfo(pid):
|
|
||||||
han = win32api.OpenProcess(win32con.PROCESS_QUERY_INFORMATION, 0,
|
|
||||||
pid)
|
|
||||||
return meminfo(han)
|
|
||||||
|
|
||||||
def meminfo(handle):
|
|
||||||
return win32process.GetProcessMemoryInfo(handle)
|
|
||||||
|
|
||||||
def win_memory(since=0.0):
|
|
||||||
info = meminfo(get_handle(os.getpid()))
|
|
||||||
return (info['WorkingSetSize']/1024.**2) - since
|
|
||||||
|
|
||||||
memory = win_memory
|
|
||||||
|
|
||||||
|
def memory(since=0.0):
|
||||||
|
'Return memory used in MB. The value of since is subtracted from the used memory'
|
||||||
|
ans = get_memory()
|
||||||
|
ans /= float(1024**2)
|
||||||
|
return ans - since
|
||||||
|
|
||||||
def gc_histogram():
|
def gc_histogram():
|
||||||
"""Returns per-class counts of existing objects."""
|
"""Returns per-class counts of existing objects."""
|
||||||
|
@ -94,9 +94,8 @@ def delete_all_but(path, pages):
|
|||||||
if page not in pages:
|
if page not in pages:
|
||||||
p.delete_page(page)
|
p.delete_page(page)
|
||||||
|
|
||||||
raw = p.write()
|
|
||||||
with open(path, 'wb') as f:
|
with open(path, 'wb') as f:
|
||||||
f.write(raw)
|
f.save_to_fileobj(path)
|
||||||
|
|
||||||
def test_outline(src):
|
def test_outline(src):
|
||||||
podofo = get_podofo()
|
podofo = get_podofo()
|
||||||
@ -114,7 +113,17 @@ def test_outline(src):
|
|||||||
f.write(raw)
|
f.write(raw)
|
||||||
print 'Outlined PDF:', out
|
print 'Outlined PDF:', out
|
||||||
|
|
||||||
|
def test_save_to(src, dest):
|
||||||
|
podofo = get_podofo()
|
||||||
|
p = podofo.PDFDoc()
|
||||||
|
with open(src, 'rb') as f:
|
||||||
|
raw = f.read()
|
||||||
|
p.load(raw)
|
||||||
|
with open(dest, 'wb') as out:
|
||||||
|
p.save_to_fileobj(out)
|
||||||
|
print ('Wrote PDF of size:', out.tell())
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import sys
|
import sys
|
||||||
test_outline(sys.argv[-1])
|
test_save_to(sys.argv[-2], sys.argv[-1])
|
||||||
|
|
||||||
|
@ -104,6 +104,15 @@ PDFDoc_write(PDFDoc *self, PyObject *args) {
|
|||||||
|
|
||||||
return ans;
|
return ans;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static PyObject *
|
||||||
|
PDFDoc_save_to_fileobj(PDFDoc *self, PyObject *args) {
|
||||||
|
PyObject *f;
|
||||||
|
|
||||||
|
if (!PyArg_ParseTuple(args, "O", &f)) return NULL;
|
||||||
|
return write_doc(self->doc, f);
|
||||||
|
}
|
||||||
|
|
||||||
// }}}
|
// }}}
|
||||||
|
|
||||||
// extract_first_page() {{{
|
// extract_first_page() {{{
|
||||||
@ -453,6 +462,9 @@ static PyMethodDef PDFDoc_methods[] = {
|
|||||||
{"write", (PyCFunction)PDFDoc_write, METH_VARARGS,
|
{"write", (PyCFunction)PDFDoc_write, METH_VARARGS,
|
||||||
"Return the PDF document as a bytestring."
|
"Return the PDF document as a bytestring."
|
||||||
},
|
},
|
||||||
|
{"save_to_fileobj", (PyCFunction)PDFDoc_save_to_fileobj, METH_VARARGS,
|
||||||
|
"Write the PDF document to the soecified file-like object."
|
||||||
|
},
|
||||||
{"extract_first_page", (PyCFunction)PDFDoc_extract_first_page, METH_VARARGS,
|
{"extract_first_page", (PyCFunction)PDFDoc_extract_first_page, METH_VARARGS,
|
||||||
"extract_first_page() -> Remove all but the first page."
|
"extract_first_page() -> Remove all but the first page."
|
||||||
},
|
},
|
||||||
|
@ -41,6 +41,7 @@ extern void podofo_set_exception(const PdfError &err);
|
|||||||
extern PyObject * podofo_convert_pdfstring(const PdfString &s);
|
extern PyObject * podofo_convert_pdfstring(const PdfString &s);
|
||||||
extern PdfString * podofo_convert_pystring(PyObject *py);
|
extern PdfString * podofo_convert_pystring(PyObject *py);
|
||||||
extern PdfString * podofo_convert_pystring_single_byte(PyObject *py);
|
extern PdfString * podofo_convert_pystring_single_byte(PyObject *py);
|
||||||
|
extern PyObject* write_doc(PdfMemDocument *doc, PyObject *f);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
179
src/calibre/utils/podofo/output.cpp
Normal file
179
src/calibre/utils/podofo/output.cpp
Normal file
@ -0,0 +1,179 @@
|
|||||||
|
/*
|
||||||
|
* output.cpp
|
||||||
|
* Copyright (C) 2012 Kovid Goyal <kovid at kovidgoyal.net>
|
||||||
|
*
|
||||||
|
* Distributed under terms of the GPL3 license.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "global.h"
|
||||||
|
|
||||||
|
using namespace PoDoFo;
|
||||||
|
|
||||||
|
class pyerr : public std::exception {
|
||||||
|
};
|
||||||
|
|
||||||
|
class OutputDevice : public PdfOutputDevice {
|
||||||
|
|
||||||
|
private:
|
||||||
|
PyObject *file;
|
||||||
|
size_t written;
|
||||||
|
|
||||||
|
void update_written() {
|
||||||
|
size_t pos;
|
||||||
|
pos = Tell();
|
||||||
|
if (pos > written) written = pos;
|
||||||
|
}
|
||||||
|
|
||||||
|
public:
|
||||||
|
OutputDevice(PyObject *f) : file(f), written(0) { Py_XINCREF(file); }
|
||||||
|
~OutputDevice() { Py_XDECREF(file); file = NULL; }
|
||||||
|
|
||||||
|
size_t GetLength() const { return written; }
|
||||||
|
|
||||||
|
long PrintVLen(const char* pszFormat, va_list args) {
|
||||||
|
|
||||||
|
if( !pszFormat ) { PODOFO_RAISE_ERROR( ePdfError_InvalidHandle ); }
|
||||||
|
|
||||||
|
#ifdef _MSC_VER
|
||||||
|
return _vscprintf(pszFormat, args);
|
||||||
|
#else
|
||||||
|
char *buf;
|
||||||
|
int res, len=1024;
|
||||||
|
while(true) {
|
||||||
|
buf = new (std::nothrow) char[len+1];
|
||||||
|
if (buf == NULL) { PyErr_NoMemory(); throw pyerr(); }
|
||||||
|
res = vsnprintf(buf, len, pszFormat, args);
|
||||||
|
delete[] buf;
|
||||||
|
if (res >= 0) return res + 1;
|
||||||
|
len *= 2;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
void PrintV( const char* pszFormat, long lBytes, va_list args ) {
|
||||||
|
char *buf;
|
||||||
|
int res;
|
||||||
|
|
||||||
|
if( !pszFormat ) { PODOFO_RAISE_ERROR( ePdfError_InvalidHandle ); }
|
||||||
|
|
||||||
|
buf = new (std::nothrow) char[lBytes+1];
|
||||||
|
if (buf == NULL) { PyErr_NoMemory(); throw pyerr(); }
|
||||||
|
|
||||||
|
// Note: PyOS_vsnprintf produces broken output on windows
|
||||||
|
res = vsnprintf(buf, lBytes, pszFormat, args);
|
||||||
|
|
||||||
|
if (res < 0) {
|
||||||
|
PyErr_SetString(PyExc_Exception, "Something bad happened while calling vsnprintf");
|
||||||
|
delete[] buf;
|
||||||
|
throw pyerr();
|
||||||
|
}
|
||||||
|
|
||||||
|
Write(buf, static_cast<size_t>(res));
|
||||||
|
delete[] buf;
|
||||||
|
}
|
||||||
|
|
||||||
|
void Print( const char* pszFormat, ... )
|
||||||
|
{
|
||||||
|
va_list args;
|
||||||
|
long lBytes;
|
||||||
|
|
||||||
|
va_start( args, pszFormat );
|
||||||
|
lBytes = PrintVLen(pszFormat, args);
|
||||||
|
va_end( args );
|
||||||
|
|
||||||
|
va_start( args, pszFormat );
|
||||||
|
PrintV(pszFormat, lBytes, args);
|
||||||
|
va_end( args );
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t Read( char* pBuffer, size_t lLen ) {
|
||||||
|
PyObject *ret;
|
||||||
|
char *buf = NULL;
|
||||||
|
Py_ssize_t len = 0;
|
||||||
|
|
||||||
|
ret = PyObject_CallMethod(file, (char*)"read", (char*)"n", static_cast<Py_ssize_t>(lLen));
|
||||||
|
if (ret != NULL) {
|
||||||
|
if (PyBytes_AsStringAndSize(ret, &buf, &len) != -1) {
|
||||||
|
memcpy(pBuffer, buf, len);
|
||||||
|
Py_DECREF(ret);
|
||||||
|
return static_cast<size_t>(len);
|
||||||
|
}
|
||||||
|
Py_DECREF(ret);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (PyErr_Occurred() == NULL)
|
||||||
|
PyErr_SetString(PyExc_Exception, "Failed to read data from python file object");
|
||||||
|
|
||||||
|
throw pyerr();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
void Seek(size_t offset) {
|
||||||
|
PyObject *ret;
|
||||||
|
ret = PyObject_CallMethod(file, (char*)"seek", (char*)"n", static_cast<Py_ssize_t>(offset));
|
||||||
|
if (ret == NULL) {
|
||||||
|
if (PyErr_Occurred() == NULL)
|
||||||
|
PyErr_SetString(PyExc_Exception, "Failed to seek in python file object");
|
||||||
|
throw pyerr();
|
||||||
|
}
|
||||||
|
Py_DECREF(ret);
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t Tell() const {
|
||||||
|
PyObject *ret;
|
||||||
|
unsigned long ans;
|
||||||
|
|
||||||
|
ret = PyObject_CallMethod(file, (char*)"tell", NULL);
|
||||||
|
if (ret == NULL) {
|
||||||
|
if (PyErr_Occurred() == NULL)
|
||||||
|
PyErr_SetString(PyExc_Exception, "Failed to call tell() on python file object");
|
||||||
|
throw pyerr();
|
||||||
|
}
|
||||||
|
if (!PyNumber_Check(ret)) {
|
||||||
|
Py_DECREF(ret);
|
||||||
|
PyErr_SetString(PyExc_Exception, "tell() method did not return a number");
|
||||||
|
throw pyerr();
|
||||||
|
}
|
||||||
|
ans = PyInt_AsUnsignedLongMask(ret);
|
||||||
|
Py_DECREF(ret);
|
||||||
|
if (PyErr_Occurred() != NULL) throw pyerr();
|
||||||
|
|
||||||
|
return static_cast<size_t>(ans);
|
||||||
|
}
|
||||||
|
|
||||||
|
void Write(const char* pBuffer, size_t lLen) {
|
||||||
|
PyObject *ret;
|
||||||
|
|
||||||
|
ret = PyObject_CallMethod(file, (char*)"write", (char*)"s#", pBuffer, (int)lLen);
|
||||||
|
if (ret == NULL) {
|
||||||
|
if (PyErr_Occurred() == NULL)
|
||||||
|
PyErr_SetString(PyExc_Exception, "Failed to call write() on python file object");
|
||||||
|
throw pyerr();
|
||||||
|
}
|
||||||
|
Py_DECREF(ret);
|
||||||
|
update_written();
|
||||||
|
}
|
||||||
|
|
||||||
|
void Flush() {
|
||||||
|
Py_XDECREF(PyObject_CallMethod(file, (char*)"flush", NULL));
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
PyObject* pdf::write_doc(PdfMemDocument *doc, PyObject *f) {
|
||||||
|
OutputDevice d(f);
|
||||||
|
|
||||||
|
try {
|
||||||
|
doc->Write(&d);
|
||||||
|
} catch(const PdfError & err) {
|
||||||
|
podofo_set_exception(err); return NULL;
|
||||||
|
} catch (...) {
|
||||||
|
if (PyErr_Occurred() == NULL)
|
||||||
|
PyErr_SetString(PyExc_Exception, "An unknown error occurred while trying to write the pdf to the file object");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
Py_RETURN_NONE;
|
||||||
|
}
|
||||||
|
|
Loading…
x
Reference in New Issue
Block a user