Merge from trunk

This commit is contained in:
Charles Haley 2011-10-16 14:00:45 +02:00
commit 6be5c5a1f0
6 changed files with 113 additions and 100 deletions

View File

@ -1,50 +0,0 @@
#!/usr/bin/env python
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import with_statement
__license__ = 'GPL v3'
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
from calibre.web.feeds.news import BasicNewsRecipe
class HunMilNews(BasicNewsRecipe):
title = u'Honvedelem.hu'
oldest_article = 3
description = u'Katonah\xedrek'
language = 'hu'
lang = 'hu'
encoding = 'windows-1250'
category = 'news, military'
no_stylesheets = True
__author__ = 'Devilinside'
max_articles_per_feed = 16
no_stylesheets = True
keep_only_tags = [dict(name='div', attrs={'class':'cikkoldal_cikk_cim'}),
dict(name='div', attrs={'class':'cikkoldal_cikk_alcim'}),
dict(name='div', attrs={'class':'cikkoldal_datum'}),
dict(name='div', attrs={'class':'cikkoldal_lead'}),
dict(name='div', attrs={'class':'cikkoldal_szoveg'}),
dict(name='img', attrs={'class':'ajanlo_kep_keretes'}),
]
feeds = [(u'Misszi\xf3k', u'http://www.honvedelem.hu/rss_b?c=22'),
(u'Aktu\xe1lis hazai h\xedrek', u'http://www.honvedelem.hu/rss_b?c=3'),
(u'K\xfclf\xf6ldi h\xedrek', u'http://www.honvedelem.hu/rss_b?c=4'),
(u'A h\xf3nap t\xe9m\xe1ja', u'http://www.honvedelem.hu/rss_b?c=6'),
(u'Riport', u'http://www.honvedelem.hu/rss_b?c=5'),
(u'Portr\xe9k', u'http://www.honvedelem.hu/rss_b?c=7'),
(u'Haditechnika', u'http://www.honvedelem.hu/rss_b?c=8'),
(u'Programok, esem\xe9nyek', u'http://www.honvedelem.hu/rss_b?c=12')
]

View File

@ -1,41 +0,0 @@
#!/usr/bin/env python
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import with_statement
__license__ = 'GPL v3'
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
from calibre.web.feeds.news import BasicNewsRecipe
class HunTechNet(BasicNewsRecipe):
title = u'TechNet'
oldest_article = 3
description = u'Az ut\xf3bbi 3 nap TechNet h\xedrei'
language = 'hu'
lang = 'hu'
encoding = 'utf-8'
__author__ = 'Devilinside'
max_articles_per_feed = 30
timefmt = ' [%Y, %b %d, %a]'
remove_tags_before = dict(name='div', attrs={'id':'c-main'})
remove_tags = [dict(name='div', attrs={'class':'wrp clr'}),
{'class' : ['screenrdr','forum','print','startlap','text_small','text_normal','text_big','email']},
]
keep_only_tags = [dict(name='div', attrs={'class':'cikk_head box'}),dict(name='div', attrs={'class':'cikk_txt box'})]
feeds = [(u'C\xedmlap',
u'http://www.technet.hu/rss/cimoldal/'), (u'TechTud',
u'http://www.technet.hu/rss/techtud/'), (u'PDA M\xe1nia',
u'http://www.technet.hu/rss/pdamania/'), (u'Telefon',
u'http://www.technet.hu/rss/telefon/'), (u'Sz\xe1m\xedt\xf3g\xe9p',
u'http://www.technet.hu/rss/notebook/'), (u'GPS',
u'http://www.technet.hu/rss/gps/')]

View File

@ -207,8 +207,11 @@ class PRS505(USBMS):
c = self.initialize_XML_cache() c = self.initialize_XML_cache()
blists = {} blists = {}
for i in c.paths: for i in c.paths:
if booklists[i] is not None: try:
blists[i] = booklists[i] if booklists[i] is not None:
blists[i] = booklists[i]
except IndexError:
pass
opts = self.settings() opts = self.settings()
if opts.extra_customization: if opts.extra_customization:
collections = [x.strip() for x in collections = [x.strip() for x in

View File

@ -14,6 +14,7 @@ Device driver for the SONY T1 devices
import os, time, re import os, time, re
import sqlite3 as sqlite import sqlite3 as sqlite
from contextlib import closing from contextlib import closing
from datetime import date
from calibre.devices.usbms.driver import USBMS, debug_print from calibre.devices.usbms.driver import USBMS, debug_print
from calibre.devices.usbms.device import USBDevice from calibre.devices.usbms.device import USBDevice
@ -21,6 +22,7 @@ from calibre.devices.usbms.books import CollectionsBookList
from calibre.devices.usbms.books import BookList from calibre.devices.usbms.books import BookList
from calibre.ebooks.metadata import authors_to_sort_string from calibre.ebooks.metadata import authors_to_sort_string
from calibre.constants import islinux from calibre.constants import islinux
from calibre.ebooks.metadata import authors_to_string, authors_to_sort_string
DBPATH = 'Sony_Reader/database/books.db' DBPATH = 'Sony_Reader/database/books.db'
THUMBPATH = 'Sony_Reader/database/cache/books/%s/thumbnail/main_thumbnail.jpg' THUMBPATH = 'Sony_Reader/database/cache/books/%s/thumbnail/main_thumbnail.jpg'
@ -83,18 +85,26 @@ class PRST1(USBMS):
'the same aspect ratio (width to height) as the cover. ' 'the same aspect ratio (width to height) as the cover. '
'Unset it if you want the thumbnail to be the maximum size, ' 'Unset it if you want the thumbnail to be the maximum size, '
'ignoring aspect ratio.'), 'ignoring aspect ratio.'),
_('Use SONY Author Format (First Author Only)') +
':::' +
_('Set this option if you want the author on the Sony to '
'appear the same way the T1 sets it. This means it will '
'only show the first author for books with multiple authors. '
'Leave this disabled if you use Metadata Plugboards.')
] ]
EXTRA_CUSTOMIZATION_DEFAULT = [ EXTRA_CUSTOMIZATION_DEFAULT = [
', '.join(['series', 'tags']), ', '.join(['series', 'tags']),
True, True,
False, False,
True, True,
False,
] ]
OPT_COLLECTIONS = 0 OPT_COLLECTIONS = 0
OPT_UPLOAD_COVERS = 1 OPT_UPLOAD_COVERS = 1
OPT_REFRESH_COVERS = 2 OPT_REFRESH_COVERS = 2
OPT_PRESERVE_ASPECT_RATIO = 3 OPT_PRESERVE_ASPECT_RATIO = 3
OPT_USE_SONY_AUTHORS = 4
plugboards = None plugboards = None
plugboard_func = None plugboard_func = None
@ -104,6 +114,8 @@ class PRST1(USBMS):
# that we do not preserve aspect ratio # that we do not preserve aspect ratio
if not self.settings().extra_customization[self.OPT_PRESERVE_ASPECT_RATIO]: if not self.settings().extra_customization[self.OPT_PRESERVE_ASPECT_RATIO]:
self.THUMBNAIL_WIDTH = 108 self.THUMBNAIL_WIDTH = 108
# Make sure the date offset is set to none, we'll calculate it in books.
self.device_offset = None
def windows_filter_pnp_id(self, pnp_id): def windows_filter_pnp_id(self, pnp_id):
return '_LAUNCHER' in pnp_id or '_SETTING' in pnp_id return '_LAUNCHER' in pnp_id or '_SETTING' in pnp_id
@ -169,6 +181,27 @@ class PRST1(USBMS):
bl_collections.setdefault(row[0], []) bl_collections.setdefault(row[0], [])
bl_collections[row[0]].append(row[1]) bl_collections[row[0]].append(row[1])
# collect information on offsets, but assume any
# offset we already calculated is correct
if self.device_offset is None:
query = 'SELECT file_path, modified_date FROM books'
cursor.execute(query)
time_offsets = {}
for i, row in enumerate(cursor):
comp_date = int(os.path.getmtime(self.normalize_path(prefix + row[0])) * 1000);
device_date = int(row[1]);
offset = device_date - comp_date
time_offsets.setdefault(offset, 0)
time_offsets[offset] = time_offsets[offset] + 1
try:
device_offset = max(time_offsets,key = lambda a: time_offsets.get(a))
debug_print("Device Offset: %d ms"%device_offset)
self.device_offset = device_offset
except ValueError:
debug_print("No Books To Detect Device Offset.")
for idx, book in enumerate(bl): for idx, book in enumerate(bl):
query = 'SELECT _id, thumbnail FROM books WHERE file_path = ?' query = 'SELECT _id, thumbnail FROM books WHERE file_path = ?'
t = (book.lpath,) t = (book.lpath,)
@ -238,6 +271,7 @@ class PRST1(USBMS):
opts = self.settings() opts = self.settings()
upload_covers = opts.extra_customization[self.OPT_UPLOAD_COVERS] upload_covers = opts.extra_customization[self.OPT_UPLOAD_COVERS]
refresh_covers = opts.extra_customization[self.OPT_REFRESH_COVERS] refresh_covers = opts.extra_customization[self.OPT_REFRESH_COVERS]
use_sony_authors = opts.extra_customization[self.OPT_USE_SONY_AUTHORS]
cursor = connection.cursor() cursor = connection.cursor()
@ -267,15 +301,21 @@ class PRST1(USBMS):
else: else:
author = authors_to_sort_string(newmi.authors) author = authors_to_sort_string(newmi.authors)
else: else:
author = newmi.authors[0] if use_sony_authors:
author = newmi.authors[0]
else:
author = authors_to_string(newmi.authors)
except: except:
author = _('Unknown') author = _('Unknown')
title = newmi.title or _('Unknown') title = newmi.title or _('Unknown')
# Get modified date # Get modified date
modified_date = os.path.getmtime(book.path) modified_date = os.path.getmtime(book.path) * 1000
time_offset = time.altzone if time.daylight else time.timezone if self.device_offset is not None:
modified_date = (modified_date - time_offset) * 1000 modified_date = modified_date + self.device_offset
else:
time_offset = -time.altzone if time.daylight else -time.timezone
modified_date = modified_date + (time_offset * 1000)
if lpath not in db_books: if lpath not in db_books:
query = ''' query = '''
@ -306,6 +346,9 @@ class PRST1(USBMS):
self.upload_book_cover(connection, book, source_id) self.upload_book_cover(connection, book, source_id)
db_books[lpath] = None db_books[lpath] = None
if self.is_sony_periodical(book):
self.periodicalize_book(connection, book)
for book, bookId in db_books.items(): for book, bookId in db_books.items():
if bookId is not None: if bookId is not None:
# Remove From Collections # Remove From Collections
@ -479,3 +522,52 @@ class PRST1(USBMS):
connection.commit() connection.commit()
cursor.close() cursor.close()
def is_sony_periodical(self, book):
if _('News') not in book.tags:
return False
if not book.lpath.lower().endswith('.epub'):
return False
if book.pubdate.date() < date(2010, 10, 17):
return False
return True
def periodicalize_book(self, connection, book):
if not self.is_sony_periodical(book):
return
name = None
if '[' in book.title:
name = book.title.split('[')[0].strip()
if len(name) < 4:
name = None
if not name:
try:
name = [t for t in book.tags if t != _('News')][0]
except:
name = None
if not name:
name = book.title
pubdate = None
try:
pubdate = int(time.mktime(book.pubdate.timetuple()) * 1000)
except:
pass
cursor = connection.cursor()
query = '''
UPDATE books
SET conforms_to = 'http://xmlns.sony.net/e-book/prs/periodicals/1.0/newspaper/1.0',
periodical_name = ?,
description = ?,
publication_date = ?
WHERE _id = ?
'''
t = (name, None, pubdate, book.bookId,)
cursor.execute(query, t)
connection.commit()
cursor.close()

View File

@ -212,7 +212,11 @@ class Serializer(object):
if tocref.klass == "periodical": if tocref.klass == "periodical":
buf.write('<div> <div height="1em"></div>') buf.write('<div> <div height="1em"></div>')
else: else:
buf.write('<div></div> <div> <h2 height="1em"><font size="+2"><b>'+tocref.title+'</b></font></h2> <div height="1em"></div>') t = tocref.title
if isinstance(t, unicode):
t = t.encode('utf-8')
buf.write('<div></div> <div> <h2 height="1em"><font size="+2"><b>'
+t+'</b></font></h2> <div height="1em"></div>')
buf.write('<ul>') buf.write('<ul>')
@ -221,14 +225,17 @@ class Serializer(object):
itemhref = tocitem.href itemhref = tocitem.href
if tocref.klass == 'periodical': if tocref.klass == 'periodical':
# This is a section node. # This is a section node.
# For periodical toca, the section urls are like r'feed_\d+/index.html' # For periodical tocs, the section urls are like r'feed_\d+/index.html'
# We dont want to point to the start of the first article # We dont want to point to the start of the first article
# so we change the href. # so we change the href.
itemhref = re.sub(r'article_\d+/', '', itemhref) itemhref = re.sub(r'article_\d+/', '', itemhref)
self.href_offsets[itemhref].append(buf.tell()) self.href_offsets[itemhref].append(buf.tell())
buf.write('0000000000') buf.write('0000000000')
buf.write(' ><font size="+1" color="blue"><b><u>') buf.write(' ><font size="+1" color="blue"><b><u>')
buf.write(tocitem.title) t = tocitem.title
if isinstance(t, unicode):
t = t.encode('utf-8')
buf.write(t)
buf.write('</u></b></font></a></li>') buf.write('</u></b></font></a></li>')
buf.write('</ul><div height="1em"></div></div><mbp:pagebreak />') buf.write('</ul><div height="1em"></div></div><mbp:pagebreak />')

View File

@ -374,6 +374,8 @@ def do_save_book_to_disk(id_, mi, cover, plugboards,
newmi.template_to_attribute(mi, cpb) newmi.template_to_attribute(mi, cpb)
else: else:
newmi = mi newmi = mi
if cover:
newmi.cover_data = ('jpg', cover)
set_metadata(stream, newmi, fmt) set_metadata(stream, newmi, fmt)
except: except:
if DEBUG: if DEBUG: