mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
SONY driver: Add support for uploading covers and periodicals
This commit is contained in:
parent
1dd72e682c
commit
5fb294486b
@ -2,5 +2,11 @@ __license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
|
||||
MEDIA_XML = 'database/cache/media.xml'
|
||||
MEDIA_EXT = 'database/cache/cacheExt.xml'
|
||||
|
||||
CACHE_XML = 'Sony Reader/database/cache.xml'
|
||||
CACHE_EXT = 'Sony Reader/database/cacheExt.xml'
|
||||
|
||||
MEDIA_THUMBNAIL = 'database/thumbnail'
|
||||
CACHE_THUMBNAIL = 'Sony Reader/database/thumbnail'
|
||||
|
||||
|
@ -9,10 +9,10 @@ Device driver for the SONY devices
|
||||
import os, time, re
|
||||
|
||||
from calibre.devices.usbms.driver import USBMS, debug_print
|
||||
from calibre.devices.prs505 import MEDIA_XML
|
||||
from calibre.devices.prs505 import CACHE_XML
|
||||
from calibre.devices.prs505 import MEDIA_XML, MEDIA_EXT, CACHE_XML, CACHE_EXT, \
|
||||
MEDIA_THUMBNAIL, CACHE_THUMBNAIL
|
||||
from calibre.devices.prs505.sony_cache import XMLCache
|
||||
from calibre import __appname__
|
||||
from calibre import __appname__, prints
|
||||
from calibre.devices.usbms.books import CollectionsBookList
|
||||
|
||||
class PRS505(USBMS):
|
||||
@ -66,6 +66,8 @@ class PRS505(USBMS):
|
||||
plugboard = None
|
||||
plugboard_func = None
|
||||
|
||||
THUMBNAIL_HEIGHT = 200
|
||||
|
||||
def windows_filter_pnp_id(self, pnp_id):
|
||||
return '_LAUNCHER' in pnp_id
|
||||
|
||||
@ -116,20 +118,21 @@ class PRS505(USBMS):
|
||||
return fname
|
||||
|
||||
def initialize_XML_cache(self):
|
||||
paths, prefixes = {}, {}
|
||||
for prefix, path, source_id in [
|
||||
('main', MEDIA_XML, 0),
|
||||
('card_a', CACHE_XML, 1),
|
||||
('card_b', CACHE_XML, 2)
|
||||
paths, prefixes, ext_paths = {}, {}, {}
|
||||
for prefix, path, ext_path, source_id in [
|
||||
('main', MEDIA_XML, MEDIA_EXT, 0),
|
||||
('card_a', CACHE_XML, CACHE_EXT, 1),
|
||||
('card_b', CACHE_XML, CACHE_EXT, 2)
|
||||
]:
|
||||
prefix = getattr(self, '_%s_prefix'%prefix)
|
||||
if prefix is not None and os.path.exists(prefix):
|
||||
paths[source_id] = os.path.join(prefix, *(path.split('/')))
|
||||
ext_paths[source_id] = os.path.join(prefix, *(ext_path.split('/')))
|
||||
prefixes[source_id] = prefix
|
||||
d = os.path.dirname(paths[source_id])
|
||||
if not os.path.exists(d):
|
||||
os.makedirs(d)
|
||||
return XMLCache(paths, prefixes, self.settings().use_author_sort)
|
||||
return XMLCache(paths, ext_paths, prefixes, self.settings().use_author_sort)
|
||||
|
||||
def books(self, oncard=None, end_session=True):
|
||||
debug_print('PRS505: starting fetching books for card', oncard)
|
||||
@ -174,3 +177,31 @@ class PRS505(USBMS):
|
||||
def set_plugboards(self, plugboards, pb_func):
|
||||
self.plugboards = plugboards
|
||||
self.plugboard_func = pb_func
|
||||
|
||||
def upload_cover(self, path, filename, metadata, filepath):
|
||||
if metadata.thumbnail and metadata.thumbnail[-1]:
|
||||
path = path.replace('/', os.sep)
|
||||
is_main = path.startswith(self._main_prefix)
|
||||
thumbnail_dir = MEDIA_THUMBNAIL if is_main else CACHE_THUMBNAIL
|
||||
prefix = None
|
||||
if is_main:
|
||||
prefix = self._main_prefix
|
||||
else:
|
||||
if self._card_a_prefix and \
|
||||
path.startswith(self._card_a_prefix):
|
||||
prefix = self._card_a_prefix
|
||||
elif self._card_b_prefix and \
|
||||
path.startswith(self._card_b_prefix):
|
||||
prefix = self._card_b_prefix
|
||||
if prefix is None:
|
||||
prints('WARNING: Failed to find prefix for:', filepath)
|
||||
return
|
||||
thumbnail_dir = os.path.join(prefix, *thumbnail_dir.split('/'))
|
||||
|
||||
relpath = os.path.relpath(filepath, prefix)
|
||||
thumbnail_dir = os.path.join(thumbnail_dir, relpath)
|
||||
if not os.path.exists(thumbnail_dir):
|
||||
os.makedirs(thumbnail_dir)
|
||||
with open(os.path.join(thumbnail_dir, 'main_thumbnail.jpg'), 'wb') as f:
|
||||
f.write(metadata.thumbnail[-1])
|
||||
|
||||
|
@ -9,6 +9,7 @@ import os, time
|
||||
from base64 import b64decode
|
||||
from uuid import uuid4
|
||||
from lxml import etree
|
||||
from datetime import date
|
||||
|
||||
from calibre import prints, guess_type, isbytestring
|
||||
from calibre.devices.errors import DeviceError
|
||||
@ -18,6 +19,20 @@ from calibre.ebooks.chardet import xml_to_unicode
|
||||
from calibre.ebooks.metadata import authors_to_string, title_sort, \
|
||||
authors_to_sort_string
|
||||
|
||||
'''
|
||||
cahceExt.xml
|
||||
|
||||
Periodical identifier sample from a PRS-650:
|
||||
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<cacheExt xmlns="http://www.sony.com/xmlns/product/prs/device/1">
|
||||
<text conformsTo="http://xmlns.sony.net/e-book/prs/periodicals/1.0/newspaper/1.0" periodicalName="The Atlantic" description="Current affairs and politics focussed on the US" publicationDate="Tue, 19 Oct 2010 00:00:00 GMT" path="database/media/books/calibre/Atlantic [Mon, 18 Oct 2010], The - calibre_1701.epub">
|
||||
<thumbnail width="167" height="217">main_thumbnail.jpg</thumbnail>
|
||||
</text>
|
||||
</cacheExt>
|
||||
|
||||
'''
|
||||
|
||||
# Utility functions {{{
|
||||
EMPTY_CARD_CACHE = '''\
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
@ -25,6 +40,12 @@ EMPTY_CARD_CACHE = '''\
|
||||
</cache>
|
||||
'''
|
||||
|
||||
EMPTY_EXT_CACHE = '''\
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<cacheExt xmlns="http://www.sony.com/xmlns/product/prs/device/1">
|
||||
</cacheExt>
|
||||
'''
|
||||
|
||||
MIME_MAP = {
|
||||
"lrf" : "application/x-sony-bbeb",
|
||||
'lrx' : 'application/x-sony-bbeb',
|
||||
@ -63,7 +84,7 @@ def uuid():
|
||||
|
||||
class XMLCache(object):
|
||||
|
||||
def __init__(self, paths, prefixes, use_author_sort):
|
||||
def __init__(self, paths, ext_paths, prefixes, use_author_sort):
|
||||
if DEBUG:
|
||||
debug_print('Building XMLCache...', paths)
|
||||
self.paths = paths
|
||||
@ -85,6 +106,7 @@ class XMLCache(object):
|
||||
if os.access(path, os.R_OK):
|
||||
with open(path, 'rb') as f:
|
||||
raw = f.read()
|
||||
|
||||
self.roots[source_id] = etree.fromstring(xml_to_unicode(
|
||||
raw, strip_encoding_pats=True, assume_utf8=True,
|
||||
verbose=DEBUG)[0],
|
||||
@ -93,6 +115,25 @@ class XMLCache(object):
|
||||
raise Exception(('The SONY database at %s is corrupted. Try '
|
||||
' disconnecting and reconnecting your reader.')%path)
|
||||
|
||||
self.ext_paths, self.ext_roots = {}, {}
|
||||
for source_id, path in ext_paths.items():
|
||||
if not os.path.exists(path):
|
||||
try:
|
||||
with open(path, 'wb') as f:
|
||||
f.write(EMPTY_EXT_CACHE)
|
||||
except:
|
||||
pass
|
||||
if os.access(path, os.W_OK):
|
||||
try:
|
||||
with open(path, 'rb') as f:
|
||||
self.ext_roots[source_id] = etree.fromstring(
|
||||
xml_to_unicode(f.read(),
|
||||
strip_encoding_pats=True, assume_utf8=True,
|
||||
verbose=DEBUG)[0], parser=parser)
|
||||
self.ext_paths[source_id] = path
|
||||
except:
|
||||
pass
|
||||
|
||||
# }}}
|
||||
|
||||
recs = self.roots[0].xpath('//*[local-name()="records"]')
|
||||
@ -352,12 +393,18 @@ class XMLCache(object):
|
||||
debug_print('Updating XML Cache:', i)
|
||||
root = self.record_roots[i]
|
||||
lpath_map = self.build_lpath_map(root)
|
||||
ext_root = self.ext_roots[i] if i in self.ext_roots else None
|
||||
ext_lpath_map = None
|
||||
if ext_root is not None:
|
||||
ext_lpath_map = self.build_lpath_map(ext_root)
|
||||
gtz_count = ltz_count = 0
|
||||
use_tz_var = False
|
||||
for book in booklist:
|
||||
path = os.path.join(self.prefixes[i], *(book.lpath.split('/')))
|
||||
record = lpath_map.get(book.lpath, None)
|
||||
created = False
|
||||
if record is None:
|
||||
created = True
|
||||
record = self.create_text_record(root, i, book.lpath)
|
||||
if plugboard is not None:
|
||||
newmi = book.deepcopy_metadata()
|
||||
@ -373,6 +420,13 @@ class XMLCache(object):
|
||||
if book.device_collections is None:
|
||||
book.device_collections = []
|
||||
book.device_collections = playlist_map.get(book.lpath, [])
|
||||
|
||||
if created and ext_root is not None and \
|
||||
ext_lpath_map.get(book.lpath, None) is None:
|
||||
ext_record = self.create_ext_text_record(ext_root, i,
|
||||
book.lpath, book.thumbnail)
|
||||
self.periodicalize_book(book, ext_record)
|
||||
|
||||
debug_print('Timezone votes: %d GMT, %d LTZ, use_tz_var=%s'%
|
||||
(gtz_count, ltz_count, use_tz_var))
|
||||
self.update_playlists(i, root, booklist, collections_attributes)
|
||||
@ -386,6 +440,47 @@ class XMLCache(object):
|
||||
self.fix_ids()
|
||||
debug_print('Finished update')
|
||||
|
||||
def is_sony_periodical(self, book):
|
||||
if _('News') not in book.tags:
|
||||
return False
|
||||
if not book.lpath.lower().endswith('.epub'):
|
||||
return False
|
||||
if book.pubdate.date() < date(2010, 10, 17):
|
||||
return False
|
||||
return True
|
||||
|
||||
def periodicalize_book(self, book, record):
|
||||
if not self.is_sony_periodical(book):
|
||||
return
|
||||
record.set('conformsTo',
|
||||
"http://xmlns.sony.net/e-book/prs/periodicals/1.0/newspaper/1.0")
|
||||
|
||||
record.set('description', '')
|
||||
|
||||
name = None
|
||||
if '[' in book.title:
|
||||
name = book.title.split('[')[0].strip()
|
||||
if len(name) < 4:
|
||||
name = None
|
||||
if not name:
|
||||
try:
|
||||
name = [t for t in book.tags if t != _('News')][0]
|
||||
except:
|
||||
name = None
|
||||
|
||||
if not name:
|
||||
name = book.title
|
||||
|
||||
record.set('periodicalName', name)
|
||||
|
||||
try:
|
||||
pubdate = strftime(book.pubdate.utctimetuple(),
|
||||
zone=lambda x : x)
|
||||
record.set('publicationDate', pubdate)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def rebuild_collections(self, booklist, bl_index):
|
||||
if bl_index not in self.record_roots:
|
||||
return
|
||||
@ -472,6 +567,25 @@ class XMLCache(object):
|
||||
root.append(ans)
|
||||
return ans
|
||||
|
||||
def create_ext_text_record(self, root, bl_id, lpath, thumbnail):
|
||||
namespace = root.nsmap[None]
|
||||
attrib = { 'path': lpath }
|
||||
ans = root.makeelement('{%s}text'%namespace, attrib=attrib,
|
||||
nsmap=root.nsmap)
|
||||
ans.tail = '\n'
|
||||
root[-1].tail = '\n' + '\t'
|
||||
root.append(ans)
|
||||
if thumbnail and thumbnail[-1]:
|
||||
ans.text = '\n' + '\t\t'
|
||||
t = root.makeelement('{%s}thumbnail'%namespace,
|
||||
attrib={'width':str(thumbnail[0]), 'height':str(thumbnail[1])},
|
||||
nsmap=root.nsmap)
|
||||
t.text = 'main_thumbnail.jpg'
|
||||
ans.append(t)
|
||||
t.tail = '\n\t'
|
||||
return ans
|
||||
|
||||
|
||||
def update_text_record(self, record, book, path, bl_index,
|
||||
gtz_count, ltz_count, use_tz_var):
|
||||
'''
|
||||
@ -589,6 +703,18 @@ class XMLCache(object):
|
||||
'<?xml version="1.0" encoding="UTF-8"?>')
|
||||
with open(path, 'wb') as f:
|
||||
f.write(raw)
|
||||
|
||||
for i, path in self.ext_paths.items():
|
||||
try:
|
||||
raw = etree.tostring(self.ext_roots[i], encoding='UTF-8',
|
||||
xml_declaration=True)
|
||||
except:
|
||||
continue
|
||||
raw = raw.replace("<?xml version='1.0' encoding='UTF-8'?>",
|
||||
'<?xml version="1.0" encoding="UTF-8"?>')
|
||||
with open(path, 'wb') as f:
|
||||
f.write(raw)
|
||||
|
||||
# }}}
|
||||
|
||||
# Utility methods {{{
|
||||
|
Loading…
x
Reference in New Issue
Block a user