Merge from trunk
102
Changelog.yaml
@ -4,6 +4,108 @@
|
|||||||
# for important features/bug fixes.
|
# for important features/bug fixes.
|
||||||
# Also, each release can have new and improved recipes.
|
# Also, each release can have new and improved recipes.
|
||||||
|
|
||||||
|
- version: 0.7.30
|
||||||
|
date: 2010-11-26
|
||||||
|
|
||||||
|
new features:
|
||||||
|
- title: "Support for Acer Lumiread and PocketBook Pro 602"
|
||||||
|
|
||||||
|
- title: "When importing by ISBN also allow the specification of a file to be imported."
|
||||||
|
tickets: [7400]
|
||||||
|
|
||||||
|
- title: "E-mail sending: Email sends are now regular jobs that can be accessed from the jobs list. Also when sending using gmail/hotmail send at most one email every five minutes to avoid trigerring their spam controls. Failed sends are now retried one more time, automatically."
|
||||||
|
|
||||||
|
- title: "Content server: When a category contains only one item, go directly to the book list instead of forcing the user to click on that one item"
|
||||||
|
|
||||||
|
- title: "E-mail sending: Allow unencrypted connections to SMTP relay"
|
||||||
|
|
||||||
|
- title: "Improve startup times for large libraries by caching the has_cover check"
|
||||||
|
|
||||||
|
- title: "Update windows binary build to use python 2.7"
|
||||||
|
|
||||||
|
- title: "Metadata and cover download plugins from Nicebooks (disabled by default)"
|
||||||
|
|
||||||
|
|
||||||
|
bug fixes:
|
||||||
|
- title: "MOBI Input: Fix bug in cleanup regex that broke parsing of escaped XML declarations."
|
||||||
|
tickets: [7585]
|
||||||
|
|
||||||
|
- title: "Content server: Fix bug when user has custom categories/columns with non ascii names"
|
||||||
|
tickets: [7590]
|
||||||
|
|
||||||
|
- title: "RTF Output: Handle non breaking spaces correctly"
|
||||||
|
tickets: [7668]
|
||||||
|
|
||||||
|
- title: "Conversion pipeline: When rasterizing SVG images workaround incorrect handinlg of percentage height specifications in QSvgRenderer."
|
||||||
|
tickets: [7598]
|
||||||
|
|
||||||
|
- title: "News download: Update version of feedparser used to parse RSS feeds."
|
||||||
|
tickets: [7674]
|
||||||
|
|
||||||
|
- title: "Tag Browser: Allow user to restore hidden categories by a right click even is all categories have been hidden"
|
||||||
|
|
||||||
|
- title: "TXT/RTF Output: Handle XML processing instructions embedded in content correctly."
|
||||||
|
tickets: [7644]
|
||||||
|
|
||||||
|
- title: "MOBI Input: Workarounds for lack of nesting rules between block and inline tags"
|
||||||
|
tickets: [7618]
|
||||||
|
|
||||||
|
- title: "E-book viewer: Load all hyphenation patterns to support multi-lingual books"
|
||||||
|
|
||||||
|
- title: "E-book viewer: Fix incorrect lang names being used in hyphenation"
|
||||||
|
|
||||||
|
- title: "Check to see that the result file from a conversion is not empty before adding it, protects against the case where the conversion process crashes and the GUI adds a zero byte file to the book record"
|
||||||
|
|
||||||
|
- title: "E-book viewer: More sophisticated algorithm to resize images to fit viewer window. Should preserve aspect ratio in more cases"
|
||||||
|
|
||||||
|
- title: "Remove unneccessary calls to set_path when creating book records. Speeds up record creation by about 30% on my system"
|
||||||
|
|
||||||
|
- title: "Speedup for bibtex catalog generation."
|
||||||
|
|
||||||
|
- title: "Kobo driver: Fix missing table in deleting books process for Kobo WiFi and Kobo-O 1.8 Beta"
|
||||||
|
|
||||||
|
- title: "RTF Input: Preserve scene breaks in the form of empty paragraphs. Preprocessing: Improvements to chapter detection"
|
||||||
|
|
||||||
|
- title: "Fix custom recipe not sorted by title"
|
||||||
|
tickets: [7486]
|
||||||
|
|
||||||
|
- title: "Kobo driver: Fix bug in managing the Im_Reading category on windows"
|
||||||
|
|
||||||
|
improved recipes:
|
||||||
|
- "El Pais - Uruguay"
|
||||||
|
- Argentinian La Nacion
|
||||||
|
- comics.com
|
||||||
|
- Mingpao
|
||||||
|
- Revista Muy Intersante
|
||||||
|
- Telepolis
|
||||||
|
- New York Times
|
||||||
|
|
||||||
|
new recipes:
|
||||||
|
- title: "Bangkok Biz News and Matichon"
|
||||||
|
author: "Anat Ruangrassamee"
|
||||||
|
|
||||||
|
- title: "The Workingham Times and Deutsche Welle"
|
||||||
|
author: "Darko Miletic"
|
||||||
|
|
||||||
|
- title: "Biz Portal"
|
||||||
|
author: "marbs"
|
||||||
|
|
||||||
|
- title: "Various Japanese news sources"
|
||||||
|
author: "Hiroshi Miura"
|
||||||
|
|
||||||
|
- title: "Arcamax"
|
||||||
|
author: "Starson17"
|
||||||
|
|
||||||
|
- title: "Various Spanish news sources"
|
||||||
|
author: "Gustavo Azambuja"
|
||||||
|
|
||||||
|
- title: "TSN"
|
||||||
|
author: Nexus
|
||||||
|
|
||||||
|
- title: "Zeit Online Premium"
|
||||||
|
author: Steffen Siebert
|
||||||
|
|
||||||
|
|
||||||
- version: 0.7.29
|
- version: 0.7.29
|
||||||
date: 2010-11-19
|
date: 2010-11-19
|
||||||
|
|
||||||
|
@ -38,6 +38,7 @@ Monocle.Browser.on = {
|
|||||||
iPad: navigator.userAgent.indexOf("iPad") != -1,
|
iPad: navigator.userAgent.indexOf("iPad") != -1,
|
||||||
BlackBerry: navigator.userAgent.indexOf("BlackBerry") != -1,
|
BlackBerry: navigator.userAgent.indexOf("BlackBerry") != -1,
|
||||||
Android: navigator.userAgent.indexOf('Android') != -1,
|
Android: navigator.userAgent.indexOf('Android') != -1,
|
||||||
|
MacOSX: navigator.userAgent.indexOf('Mac OS X') != -1,
|
||||||
Kindle3: navigator.userAgent.match(/Kindle\/3/)
|
Kindle3: navigator.userAgent.match(/Kindle\/3/)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -162,12 +163,23 @@ Monocle.Browser.has.transform3d = Monocle.Browser.CSSProps.isSupported([
|
|||||||
'OPerspective',
|
'OPerspective',
|
||||||
'msPerspective'
|
'msPerspective'
|
||||||
]) && Monocle.Browser.CSSProps.supportsMediaQueryProperty('transform-3d');
|
]) && Monocle.Browser.CSSProps.supportsMediaQueryProperty('transform-3d');
|
||||||
|
Monocle.Browser.has.embedded = (top != self);
|
||||||
|
|
||||||
Monocle.Browser.has.iframeTouchBug = Monocle.Browser.iOSVersionBelow("4.2");
|
Monocle.Browser.has.iframeTouchBug = Monocle.Browser.iOSVersionBelow("4.2");
|
||||||
|
|
||||||
Monocle.Browser.has.selectThruBug = Monocle.Browser.iOSVersionBelow("4.2");
|
Monocle.Browser.has.selectThruBug = Monocle.Browser.iOSVersionBelow("4.2");
|
||||||
|
|
||||||
Monocle.Browser.has.mustScrollSheaf = Monocle.Browser.is.MobileSafari;
|
Monocle.Browser.has.mustScrollSheaf = Monocle.Browser.is.MobileSafari;
|
||||||
Monocle.Browser.has.iframeDoubleWidthBug = Monocle.Browser.has.mustScrollSheaf;
|
Monocle.Browser.has.iframeDoubleWidthBug = Monocle.Browser.has.mustScrollSheaf;
|
||||||
|
|
||||||
Monocle.Browser.has.floatColumnBug = Monocle.Browser.is.WebKit;
|
Monocle.Browser.has.floatColumnBug = Monocle.Browser.is.WebKit;
|
||||||
|
|
||||||
|
Monocle.Browser.has.relativeIframeWidthBug = Monocle.Browser.on.Android;
|
||||||
|
|
||||||
|
|
||||||
|
Monocle.Browser.has.jumpFlickerBug =
|
||||||
|
Monocle.Browser.on.MacOSX && Monocle.Browser.is.WebKit;
|
||||||
|
|
||||||
|
|
||||||
if (typeof window.console == "undefined") {
|
if (typeof window.console == "undefined") {
|
||||||
window.console = {
|
window.console = {
|
||||||
@ -1091,11 +1103,29 @@ Monocle.Reader = function (node, bookData, options, onLoadCallback) {
|
|||||||
cmpt.dom.setStyles(Monocle.Styles.component);
|
cmpt.dom.setStyles(Monocle.Styles.component);
|
||||||
Monocle.Styles.applyRules(cmpt.contentDocument.body, Monocle.Styles.body);
|
Monocle.Styles.applyRules(cmpt.contentDocument.body, Monocle.Styles.body);
|
||||||
}
|
}
|
||||||
|
lockFrameWidths();
|
||||||
dom.find('overlay').dom.setStyles(Monocle.Styles.overlay);
|
dom.find('overlay').dom.setStyles(Monocle.Styles.overlay);
|
||||||
dispatchEvent('monocle:styles');
|
dispatchEvent('monocle:styles');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function lockingFrameWidths() {
|
||||||
|
if (!Monocle.Browser.has.relativeIframeWidthBug) { return; }
|
||||||
|
for (var i = 0, cmpt; cmpt = dom.find('component', i); ++i) {
|
||||||
|
cmpt.style.display = "none";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function lockFrameWidths() {
|
||||||
|
if (!Monocle.Browser.has.relativeIframeWidthBug) { return; }
|
||||||
|
for (var i = 0, cmpt; cmpt = dom.find('component', i); ++i) {
|
||||||
|
cmpt.style.width = cmpt.parentNode.offsetWidth+"px";
|
||||||
|
cmpt.style.display = "block";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function setBook(bk, place, callback) {
|
function setBook(bk, place, callback) {
|
||||||
p.book = bk;
|
p.book = bk;
|
||||||
var pageCount = 0;
|
var pageCount = 0;
|
||||||
@ -1121,12 +1151,14 @@ Monocle.Reader = function (node, bookData, options, onLoadCallback) {
|
|||||||
if (!p.initialized) {
|
if (!p.initialized) {
|
||||||
console.warn('Attempt to resize book before initialization.');
|
console.warn('Attempt to resize book before initialization.');
|
||||||
}
|
}
|
||||||
|
lockingFrameWidths();
|
||||||
if (!dispatchEvent("monocle:resizing", {}, true)) {
|
if (!dispatchEvent("monocle:resizing", {}, true)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
clearTimeout(p.resizeTimer);
|
clearTimeout(p.resizeTimer);
|
||||||
p.resizeTimer = setTimeout(
|
p.resizeTimer = setTimeout(
|
||||||
function () {
|
function () {
|
||||||
|
lockFrameWidths();
|
||||||
p.flipper.moveTo({ page: pageNumber() });
|
p.flipper.moveTo({ page: pageNumber() });
|
||||||
dispatchEvent("monocle:resize");
|
dispatchEvent("monocle:resize");
|
||||||
},
|
},
|
||||||
@ -1765,12 +1797,7 @@ Monocle.Book = function (dataSource) {
|
|||||||
|
|
||||||
|
|
||||||
function componentIdMatching(str) {
|
function componentIdMatching(str) {
|
||||||
for (var i = 0; i < p.componentIds.length; ++i) {
|
return p.componentIds.indexOf(str) >= 0 ? str : null;
|
||||||
if (str.indexOf(p.componentIds[i]) > -1) {
|
|
||||||
return p.componentIds[i];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -2018,6 +2045,12 @@ Monocle.Component = function (book, id, index, chapters, source) {
|
|||||||
|
|
||||||
|
|
||||||
function loadFrameFromURL(url, frame, callback) {
|
function loadFrameFromURL(url, frame, callback) {
|
||||||
|
if (!url.match(/^\//)) {
|
||||||
|
var link = document.createElement('a');
|
||||||
|
link.setAttribute('href', url);
|
||||||
|
url = link.href;
|
||||||
|
delete(link);
|
||||||
|
}
|
||||||
frame.onload = function () {
|
frame.onload = function () {
|
||||||
frame.onload = null;
|
frame.onload = null;
|
||||||
Monocle.defer(callback);
|
Monocle.defer(callback);
|
||||||
@ -2460,7 +2493,7 @@ Monocle.Flippers.Legacy = function (reader) {
|
|||||||
function moveTo(locus, callback) {
|
function moveTo(locus, callback) {
|
||||||
var fn = frameToLocus;
|
var fn = frameToLocus;
|
||||||
if (typeof callback == "function") {
|
if (typeof callback == "function") {
|
||||||
fn = function () { frameToLocus(); callback(); }
|
fn = function (locus) { frameToLocus(locus); callback(locus); }
|
||||||
}
|
}
|
||||||
p.reader.getBook().setOrLoadPageAt(page(), locus, fn);
|
p.reader.getBook().setOrLoadPageAt(page(), locus, fn);
|
||||||
}
|
}
|
||||||
@ -2794,7 +2827,9 @@ Monocle.Dimensions.Columns = function (pageDiv) {
|
|||||||
function scrollerWidth() {
|
function scrollerWidth() {
|
||||||
var bdy = p.page.m.activeFrame.contentDocument.body;
|
var bdy = p.page.m.activeFrame.contentDocument.body;
|
||||||
if (Monocle.Browser.has.iframeDoubleWidthBug) {
|
if (Monocle.Browser.has.iframeDoubleWidthBug) {
|
||||||
if (Monocle.Browser.iOSVersion < "4.1") {
|
if (Monocle.Browser.on.Android) {
|
||||||
|
return bdy.scrollWidth * 1.5; // I actually have no idea why 1.5.
|
||||||
|
} else if (Monocle.Browser.iOSVersion < "4.1") {
|
||||||
var hbw = bdy.scrollWidth / 2;
|
var hbw = bdy.scrollWidth / 2;
|
||||||
var sew = scrollerElement().scrollWidth;
|
var sew = scrollerElement().scrollWidth;
|
||||||
return Math.max(sew, hbw);
|
return Math.max(sew, hbw);
|
||||||
@ -2969,6 +3004,7 @@ Monocle.Flippers.Slider = function (reader) {
|
|||||||
|
|
||||||
|
|
||||||
function setPage(pageDiv, locus, callback) {
|
function setPage(pageDiv, locus, callback) {
|
||||||
|
ensureWaitControl();
|
||||||
p.reader.getBook().setOrLoadPageAt(
|
p.reader.getBook().setOrLoadPageAt(
|
||||||
pageDiv,
|
pageDiv,
|
||||||
locus,
|
locus,
|
||||||
@ -3048,6 +3084,7 @@ Monocle.Flippers.Slider = function (reader) {
|
|||||||
checkPoint(boxPointX);
|
checkPoint(boxPointX);
|
||||||
|
|
||||||
p.turnData.releasing = true;
|
p.turnData.releasing = true;
|
||||||
|
showWaitControl(lowerPage());
|
||||||
|
|
||||||
if (dir == k.FORWARDS) {
|
if (dir == k.FORWARDS) {
|
||||||
if (
|
if (
|
||||||
@ -3088,14 +3125,18 @@ Monocle.Flippers.Slider = function (reader) {
|
|||||||
|
|
||||||
|
|
||||||
function onGoingBackward(x) {
|
function onGoingBackward(x) {
|
||||||
var lp = lowerPage();
|
var lp = lowerPage(), up = upperPage();
|
||||||
|
showWaitControl(up);
|
||||||
jumpOut(lp, // move lower page off-screen
|
jumpOut(lp, // move lower page off-screen
|
||||||
function () {
|
function () {
|
||||||
flipPages(); // flip lower to upper
|
flipPages(); // flip lower to upper
|
||||||
setPage( // set upper page to previous
|
setPage( // set upper page to previous
|
||||||
lp,
|
lp,
|
||||||
getPlace(lowerPage()).getLocus({ direction: k.BACKWARDS }),
|
getPlace(lowerPage()).getLocus({ direction: k.BACKWARDS }),
|
||||||
function () { lifted(x); }
|
function () {
|
||||||
|
lifted(x);
|
||||||
|
hideWaitControl(up);
|
||||||
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
@ -3103,8 +3144,10 @@ Monocle.Flippers.Slider = function (reader) {
|
|||||||
|
|
||||||
|
|
||||||
function afterGoingForward() {
|
function afterGoingForward() {
|
||||||
var up = upperPage();
|
var up = upperPage(), lp = lowerPage();
|
||||||
if (p.interactive) {
|
if (p.interactive) {
|
||||||
|
showWaitControl(up);
|
||||||
|
showWaitControl(lp);
|
||||||
setPage( // set upper (off screen) to current
|
setPage( // set upper (off screen) to current
|
||||||
up,
|
up,
|
||||||
getPlace().getLocus({ direction: k.FORWARDS }),
|
getPlace().getLocus({ direction: k.FORWARDS }),
|
||||||
@ -3113,6 +3156,7 @@ Monocle.Flippers.Slider = function (reader) {
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
|
showWaitControl(lp);
|
||||||
flipPages();
|
flipPages();
|
||||||
jumpIn(up, function () { prepareNextPage(announceTurn); });
|
jumpIn(up, function () { prepareNextPage(announceTurn); });
|
||||||
}
|
}
|
||||||
@ -3171,6 +3215,8 @@ Monocle.Flippers.Slider = function (reader) {
|
|||||||
|
|
||||||
|
|
||||||
function announceTurn() {
|
function announceTurn() {
|
||||||
|
hideWaitControl(upperPage());
|
||||||
|
hideWaitControl(lowerPage());
|
||||||
p.reader.dispatchEvent('monocle:turn');
|
p.reader.dispatchEvent('monocle:turn');
|
||||||
resetTurnData();
|
resetTurnData();
|
||||||
}
|
}
|
||||||
@ -3319,12 +3365,14 @@ Monocle.Flippers.Slider = function (reader) {
|
|||||||
|
|
||||||
|
|
||||||
function jumpIn(pageDiv, callback) {
|
function jumpIn(pageDiv, callback) {
|
||||||
setX(pageDiv, 0, { duration: 1 }, callback);
|
var dur = Monocle.Browser.has.jumpFlickerBug ? 1 : 0;
|
||||||
|
setX(pageDiv, 0, { duration: dur }, callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function jumpOut(pageDiv, callback) {
|
function jumpOut(pageDiv, callback) {
|
||||||
setX(pageDiv, 0 - pageDiv.offsetWidth, { duration: 1 }, callback);
|
var dur = Monocle.Browser.has.jumpFlickerBug ? 1 : 0;
|
||||||
|
setX(pageDiv, 0 - pageDiv.offsetWidth, { duration: dur }, callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -3357,6 +3405,28 @@ Monocle.Flippers.Slider = function (reader) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function ensureWaitControl() {
|
||||||
|
if (p.waitControl) { return; }
|
||||||
|
p.waitControl = {
|
||||||
|
createControlElements: function (holder) {
|
||||||
|
return holder.dom.make('div', 'flippers_slider_wait');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
p.reader.addControl(p.waitControl, 'page');
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function showWaitControl(page) {
|
||||||
|
var ctrl = p.reader.dom.find('flippers_slider_wait', page.m.pageIndex);
|
||||||
|
ctrl.style.opacity = 0.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function hideWaitControl(page) {
|
||||||
|
var ctrl = p.reader.dom.find('flippers_slider_wait', page.m.pageIndex);
|
||||||
|
ctrl.style.opacity = 0;
|
||||||
|
}
|
||||||
|
|
||||||
API.pageCount = p.pageCount;
|
API.pageCount = p.pageCount;
|
||||||
API.addPage = addPage;
|
API.addPage = addPage;
|
||||||
API.getPlace = getPlace;
|
API.getPlace = getPlace;
|
||||||
|
BIN
resources/images/news/deutsche_welle_bs.png
Normal file
After Width: | Height: | Size: 445 B |
BIN
resources/images/news/deutsche_welle_en.png
Normal file
After Width: | Height: | Size: 445 B |
BIN
resources/images/news/deutsche_welle_es.png
Normal file
After Width: | Height: | Size: 445 B |
BIN
resources/images/news/deutsche_welle_hr.png
Normal file
After Width: | Height: | Size: 445 B |
BIN
resources/images/news/deutsche_welle_pt.png
Normal file
After Width: | Height: | Size: 445 B |
BIN
resources/images/news/deutsche_welle_sr.png
Normal file
After Width: | Height: | Size: 445 B |
BIN
resources/images/news/the_workingham_times.png
Normal file
After Width: | Height: | Size: 1011 B |
110
resources/recipes/arcamax.recipe
Normal file
@ -0,0 +1,110 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
__license__ = 'GPL v3'
|
||||||
|
__copyright__ = 'Copyright 2010 Starson17'
|
||||||
|
'''
|
||||||
|
www.arcamax.com
|
||||||
|
'''
|
||||||
|
from calibre.web.feeds.news import BasicNewsRecipe
|
||||||
|
|
||||||
|
class Arcamax(BasicNewsRecipe):
|
||||||
|
title = 'Arcamax'
|
||||||
|
__author__ = 'Starson17'
|
||||||
|
__version__ = '1.03'
|
||||||
|
__date__ = '25 November 2010'
|
||||||
|
description = u'Family Friendly Comics - Customize for more days/comics: Defaults to 7 days, 25 comics - 20 general, 5 editorial.'
|
||||||
|
category = 'news, comics'
|
||||||
|
language = 'en'
|
||||||
|
use_embedded_content= False
|
||||||
|
no_stylesheets = True
|
||||||
|
remove_javascript = True
|
||||||
|
cover_url = 'http://www.arcamax.com/images/pub/amuse/leftcol/zits.jpg'
|
||||||
|
|
||||||
|
####### USER PREFERENCES - SET COMICS AND NUMBER OF COMICS TO RETRIEVE ########
|
||||||
|
num_comics_to_get = 7
|
||||||
|
# CHOOSE COMIC STRIPS BELOW - REMOVE COMMENT '# ' FROM IN FRONT OF DESIRED STRIPS
|
||||||
|
|
||||||
|
conversion_options = {'linearize_tables' : True
|
||||||
|
, 'comment' : description
|
||||||
|
, 'tags' : category
|
||||||
|
, 'language' : language
|
||||||
|
}
|
||||||
|
|
||||||
|
keep_only_tags = [dict(name='div', attrs={'class':['toon']}),
|
||||||
|
]
|
||||||
|
|
||||||
|
def parse_index(self):
|
||||||
|
feeds = []
|
||||||
|
for title, url in [
|
||||||
|
######## COMICS - GENERAL ########
|
||||||
|
#(u"9 Chickweed Lane", u"http://www.arcamax.com/ninechickweedlane"),
|
||||||
|
#(u"Agnes", u"http://www.arcamax.com/agnes"),
|
||||||
|
#(u"Andy Capp", u"http://www.arcamax.com/andycapp"),
|
||||||
|
(u"BC", u"http://www.arcamax.com/bc"),
|
||||||
|
#(u"Baby Blues", u"http://www.arcamax.com/babyblues"),
|
||||||
|
#(u"Beetle Bailey", u"http://www.arcamax.com/beetlebailey"),
|
||||||
|
(u"Blondie", u"http://www.arcamax.com/blondie"),
|
||||||
|
#u"Boondocks", u"http://www.arcamax.com/boondocks"),
|
||||||
|
#(u"Cathy", u"http://www.arcamax.com/cathy"),
|
||||||
|
#(u"Daddys Home", u"http://www.arcamax.com/daddyshome"),
|
||||||
|
(u"Dilbert", u"http://www.arcamax.com/dilbert"),
|
||||||
|
#(u"Dinette Set", u"http://www.arcamax.com/thedinetteset"),
|
||||||
|
(u"Dog Eat Doug", u"http://www.arcamax.com/dogeatdoug"),
|
||||||
|
(u"Doonesbury", u"http://www.arcamax.com/doonesbury"),
|
||||||
|
#(u"Dustin", u"http://www.arcamax.com/dustin"),
|
||||||
|
(u"Family Circus", u"http://www.arcamax.com/familycircus"),
|
||||||
|
(u"Garfield", u"http://www.arcamax.com/garfield"),
|
||||||
|
#(u"Get Fuzzy", u"http://www.arcamax.com/getfuzzy"),
|
||||||
|
#(u"Girls and Sports", u"http://www.arcamax.com/girlsandsports"),
|
||||||
|
#(u"Hagar the Horrible", u"http://www.arcamax.com/hagarthehorrible"),
|
||||||
|
#(u"Heathcliff", u"http://www.arcamax.com/heathcliff"),
|
||||||
|
#(u"Jerry King Cartoons", u"http://www.arcamax.com/humorcartoon"),
|
||||||
|
#(u"Luann", u"http://www.arcamax.com/luann"),
|
||||||
|
#(u"Momma", u"http://www.arcamax.com/momma"),
|
||||||
|
#(u"Mother Goose and Grimm", u"http://www.arcamax.com/mothergooseandgrimm"),
|
||||||
|
(u"Mutts", u"http://www.arcamax.com/mutts"),
|
||||||
|
#(u"Non Sequitur", u"http://www.arcamax.com/nonsequitur"),
|
||||||
|
#(u"Pearls Before Swine", u"http://www.arcamax.com/pearlsbeforeswine"),
|
||||||
|
#(u"Pickles", u"http://www.arcamax.com/pickles"),
|
||||||
|
#(u"Red and Rover", u"http://www.arcamax.com/redandrover"),
|
||||||
|
#(u"Rubes", u"http://www.arcamax.com/rubes"),
|
||||||
|
#(u"Rugrats", u"http://www.arcamax.com/rugrats"),
|
||||||
|
(u"Speed Bump", u"http://www.arcamax.com/speedbump"),
|
||||||
|
(u"Wizard of Id", u"http://www.arcamax.com/wizardofid"),
|
||||||
|
(u"Dilbert", u"http://www.arcamax.com/dilbert"),
|
||||||
|
(u"Zits", u"http://www.arcamax.com/zits"),
|
||||||
|
]:
|
||||||
|
articles = self.make_links(url)
|
||||||
|
if articles:
|
||||||
|
feeds.append((title, articles))
|
||||||
|
return feeds
|
||||||
|
|
||||||
|
def make_links(self, url):
|
||||||
|
title = 'Temp'
|
||||||
|
current_articles = []
|
||||||
|
pages = range(1, self.num_comics_to_get+1)
|
||||||
|
for page in pages:
|
||||||
|
page_soup = self.index_to_soup(url)
|
||||||
|
if page_soup:
|
||||||
|
title = page_soup.find(name='div', attrs={'class':'toon'}).p.img['alt']
|
||||||
|
page_url = url
|
||||||
|
prev_page_url = 'http://www.arcamax.com' + page_soup.find('a', attrs={'class':'next'}, text='Previous').parent['href']
|
||||||
|
current_articles.append({'title': title, 'url': page_url, 'description':'', 'date':''})
|
||||||
|
url = prev_page_url
|
||||||
|
current_articles.reverse()
|
||||||
|
return current_articles
|
||||||
|
|
||||||
|
def preprocess_html(self, soup):
|
||||||
|
main_comic = soup.find('p',attrs={'class':'m0'})
|
||||||
|
if main_comic.a['target'] == '_blank':
|
||||||
|
main_comic.a.img['id'] = 'main_comic'
|
||||||
|
return soup
|
||||||
|
|
||||||
|
extra_css = '''
|
||||||
|
h1{font-family:Arial,Helvetica,sans-serif; font-weight:bold;font-size:large;}
|
||||||
|
h2{font-family:Arial,Helvetica,sans-serif; font-weight:normal;font-size:small;}
|
||||||
|
img#main_comic {max-width:100%; min-width:100%;}
|
||||||
|
p{font-family:Arial,Helvetica,sans-serif;font-size:small;}
|
||||||
|
body{font-family:Helvetica,Arial,sans-serif;font-size:small;}
|
||||||
|
'''
|
||||||
|
|
25
resources/recipes/bangkok_biz.recipe
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
from calibre.web.feeds.news import BasicNewsRecipe
|
||||||
|
|
||||||
|
class AdvancedUserRecipe1290689337(BasicNewsRecipe):
|
||||||
|
__author__ = 'Anat R.'
|
||||||
|
language = 'th'
|
||||||
|
title = u'Bangkok Biz News'
|
||||||
|
oldest_article = 7
|
||||||
|
max_articles_per_feed = 100
|
||||||
|
no_stylesheets = True
|
||||||
|
remove_javascript = True
|
||||||
|
use_embedded_content = False
|
||||||
|
feeds = [(u'Headlines',
|
||||||
|
u'http://www.bangkokbiznews.com/home/services/rss/home.xml'),
|
||||||
|
(u'Politics', u'http://www.bangkokbiznews.com/home/services/rss/politics.xml'),
|
||||||
|
(u'Business', u'http://www.bangkokbiznews.com/home/services/rss/business.xml'),
|
||||||
|
(u'Finance', u' http://www.bangkokbiznews.com/home/services/rss/finance.xml'),
|
||||||
|
(u'Technology', u' http://www.bangkokbiznews.com/home/services/rss/it.xml')]
|
||||||
|
remove_tags_before = dict(name='div', attrs={'class':'box-Detailcontent'})
|
||||||
|
remove_tags_after = dict(name='p', attrs={'class':'allTags'})
|
||||||
|
remove_tags = []
|
||||||
|
remove_tags.append(dict(name = 'div', attrs = {'id': 'content-tools'}))
|
||||||
|
remove_tags.append(dict(name = 'p', attrs = {'class':'allTags'}))
|
||||||
|
remove_tags.append(dict(name = 'div', attrs = {'id':'morePic'}))
|
||||||
|
remove_tags.append(dict(name = 'ul', attrs = {'class':'tabs-nav'}))
|
||||||
|
|
40
resources/recipes/biz_portal.recipe
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
from calibre.web.feeds.news import BasicNewsRecipe
|
||||||
|
|
||||||
|
class AdvancedUserRecipe1283848012(BasicNewsRecipe):
|
||||||
|
description = 'This is a recipe of BizPortal.co.il.'
|
||||||
|
cover_url = 'http://www.bizportal.co.il/shukhahon/images/bizportal.jpg'
|
||||||
|
title = u'BizPortal'
|
||||||
|
language = 'he'
|
||||||
|
__author__ = 'marbs'
|
||||||
|
extra_css='img {max-width:100%;} body{direction: rtl;},title{direction: rtl; } ,article_description{direction: rtl; }, a.article{direction: rtl; } ,calibre_feed_description{direction: rtl; }'
|
||||||
|
simultaneous_downloads = 5
|
||||||
|
remove_javascript = True
|
||||||
|
timefmt = '[%a, %d %b, %Y]'
|
||||||
|
remove_empty_feeds = True
|
||||||
|
oldest_article = 1
|
||||||
|
max_articles_per_feed = 100
|
||||||
|
remove_attributes = ['width']
|
||||||
|
simultaneous_downloads = 5
|
||||||
|
# keep_only_tags =dict(name='div', attrs={'id':'articleContainer'})
|
||||||
|
remove_tags = [dict(name='img', attrs={'scr':['images/bizlogo_nl.gif']})]
|
||||||
|
max_articles_per_feed = 100
|
||||||
|
#preprocess_regexps = [
|
||||||
|
# (re.compile(r'<p> </p>', re.DOTALL|re.IGNORECASE), lambda match: '')
|
||||||
|
# ]
|
||||||
|
|
||||||
|
|
||||||
|
feeds = [(u'חדשות שוק ההון', u'http://www.bizportal.co.il/shukhahon/messRssUTF2.xml'),
|
||||||
|
(u'חדשות וול סטריט בעברית', u'http://www.bizportal.co.il/shukhahon/images/bizportal.jpg'),
|
||||||
|
(u'שיווק ופרסום', u'http://www.bizportal.co.il/shukhahon/messRssUTF145.xml'),
|
||||||
|
(u'משפט', u'http://www.bizportal.co.il/shukhahon/messRssUTF3.xml'),
|
||||||
|
(u'ניתוח טכני', u'http://www.bizportal.co.il/shukhahon/messRssUTF5.xml'),
|
||||||
|
(u'דיני עבודה ושכר', u'http://www.bizportal.co.il/shukhahon/messRssUTF6.xml'),
|
||||||
|
(u'מיסוי', u'http://www.bizportal.co.il/shukhahon/messRssUTF7.xml'),
|
||||||
|
(u'טאבו', u'http://www.bizportal.co.il/shukhahon/messRssUTF8.xml'),
|
||||||
|
(u'נדל"ן', u'http://www.bizportal.co.il/shukhahon/messRssUTF160.xml'),
|
||||||
|
]
|
||||||
|
|
||||||
|
def print_version(self, url):
|
||||||
|
split1 = url.split("=")
|
||||||
|
print_url = 'http://www.bizportal.co.il/web/webnew/shukhahon/biznews02print.shtml?mid=' + split1[1]
|
||||||
|
return print_url
|
@ -27,7 +27,7 @@ class BrandEins(BasicNewsRecipe):
|
|||||||
encoding = 'utf-8'
|
encoding = 'utf-8'
|
||||||
language = 'de'
|
language = 'de'
|
||||||
publication_type = 'magazine'
|
publication_type = 'magazine'
|
||||||
needs_subscription = True
|
needs_subscription = 'optional'
|
||||||
|
|
||||||
# 2 is the last full magazine (default)
|
# 2 is the last full magazine (default)
|
||||||
# 1 is the newest (but not full)
|
# 1 is the newest (but not full)
|
||||||
|
@ -11,7 +11,6 @@ class AdvancedUserRecipe1275798572(BasicNewsRecipe):
|
|||||||
remove_javascript = True
|
remove_javascript = True
|
||||||
use_embedded_content = False
|
use_embedded_content = False
|
||||||
no_stylesheets = True
|
no_stylesheets = True
|
||||||
language = 'en'
|
|
||||||
masthead_url = 'http://www.cbc.ca/includes/gfx/cbcnews_logo_09.gif'
|
masthead_url = 'http://www.cbc.ca/includes/gfx/cbcnews_logo_09.gif'
|
||||||
cover_url = 'http://img692.imageshack.us/img692/2814/cbc.png'
|
cover_url = 'http://img692.imageshack.us/img692/2814/cbc.png'
|
||||||
keep_only_tags = [dict(name='div', attrs={'id':['storyhead','storybody']})]
|
keep_only_tags = [dict(name='div', attrs={'id':['storyhead','storybody']})]
|
||||||
|
@ -347,6 +347,7 @@ class Comics(BasicNewsRecipe):
|
|||||||
title = strip_tag['title']
|
title = strip_tag['title']
|
||||||
print 'title: ', title
|
print 'title: ', title
|
||||||
current_articles.append({'title': title, 'url': page_url, 'description':'', 'date':''})
|
current_articles.append({'title': title, 'url': page_url, 'description':'', 'date':''})
|
||||||
|
current_articles.reverse()
|
||||||
return current_articles
|
return current_articles
|
||||||
|
|
||||||
extra_css = '''
|
extra_css = '''
|
||||||
|
76
resources/recipes/deutsche_welle_bs.recipe
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
__license__ = 'GPL v3'
|
||||||
|
__copyright__ = '2010, Darko Miletic <darko.miletic at gmail.com>'
|
||||||
|
'''
|
||||||
|
dw-world.de
|
||||||
|
'''
|
||||||
|
|
||||||
|
import re
|
||||||
|
from calibre.web.feeds.news import BasicNewsRecipe
|
||||||
|
|
||||||
|
class DeutscheWelle_bs(BasicNewsRecipe):
|
||||||
|
title = 'Deutsche Welle'
|
||||||
|
__author__ = 'Darko Miletic'
|
||||||
|
description = 'Vijesti iz Njemacke i svijeta'
|
||||||
|
publisher = 'Deutsche Welle'
|
||||||
|
category = 'news, politics, Germany'
|
||||||
|
oldest_article = 1
|
||||||
|
max_articles_per_feed = 100
|
||||||
|
use_embedded_content = False
|
||||||
|
no_stylesheets = True
|
||||||
|
language = 'bs'
|
||||||
|
publication_type = 'newsportal'
|
||||||
|
remove_empty_feeds = True
|
||||||
|
masthead_url = 'http://www.dw-world.de/skins/std/channel1/pics/dw_logo1024.gif'
|
||||||
|
extra_css = """
|
||||||
|
@font-face {font-family: "sans1";src:url(res:///opt/sony/ebook/FONT/tt0003m_.ttf)}
|
||||||
|
body{font-family: Arial,sans1,sans-serif}
|
||||||
|
img{margin-top: 0.5em; margin-bottom: 0.2em; display: block}
|
||||||
|
.caption{font-size: x-small; display: block; margin-bottom: 0.4em}
|
||||||
|
"""
|
||||||
|
preprocess_regexps = [(re.compile(u'\u0110'), lambda match: u'\u00D0')]
|
||||||
|
|
||||||
|
|
||||||
|
conversion_options = {
|
||||||
|
'comment' : description
|
||||||
|
, 'tags' : category
|
||||||
|
, 'publisher': publisher
|
||||||
|
, 'language' : language
|
||||||
|
}
|
||||||
|
|
||||||
|
remove_tags = [
|
||||||
|
dict(name=['iframe','embed','object','form','base','meta','link'])
|
||||||
|
,dict(attrs={'class':'actionFooter'})
|
||||||
|
]
|
||||||
|
keep_only_tags=[dict(attrs={'class':'ArticleDetail detail'})]
|
||||||
|
remove_attributes = ['height','width','onclick','border','lang']
|
||||||
|
|
||||||
|
feeds = [
|
||||||
|
(u'Politika' , u'http://rss.dw-world.de/rdf/rss-bos-pol')
|
||||||
|
,(u'Evropa' , u'http://rss.dw-world.de/rdf/rss-bos-eu' )
|
||||||
|
,(u'Kiosk' , u'http://rss.dw-world.de/rdf/rss-bos-eu' )
|
||||||
|
,(u'Ekonomija i Nuka' , u'http://rss.dw-world.de/rdf/rss-bos-eco')
|
||||||
|
,(u'Kultura' , u'http://rss.dw-world.de/rdf/rss-bos-cul')
|
||||||
|
,(u'Sport' , u'http://rss.dw-world.de/rdf/rss-bos-sp' )
|
||||||
|
]
|
||||||
|
|
||||||
|
def print_version(self, url):
|
||||||
|
artl = url.rpartition('/')[2]
|
||||||
|
return 'http://www.dw-world.de/popups/popup_printcontent/' + artl
|
||||||
|
|
||||||
|
def preprocess_html(self, soup):
|
||||||
|
for item in soup.findAll('a'):
|
||||||
|
limg = item.find('img')
|
||||||
|
if item.string is not None:
|
||||||
|
str = item.string
|
||||||
|
item.replaceWith(str)
|
||||||
|
else:
|
||||||
|
if limg:
|
||||||
|
item.name = 'div'
|
||||||
|
del item['href']
|
||||||
|
if item.has_key('target'):
|
||||||
|
del item['target']
|
||||||
|
else:
|
||||||
|
str = self.tag_to_string(item)
|
||||||
|
item.replaceWith(str)
|
||||||
|
return soup
|
||||||
|
|
66
resources/recipes/deutsche_welle_en.recipe
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
__license__ = 'GPL v3'
|
||||||
|
__copyright__ = '2010, Darko Miletic <darko.miletic at gmail.com>'
|
||||||
|
'''
|
||||||
|
dw-world.de
|
||||||
|
'''
|
||||||
|
|
||||||
|
from calibre.web.feeds.news import BasicNewsRecipe
|
||||||
|
|
||||||
|
class DeutscheWelle_en(BasicNewsRecipe):
|
||||||
|
title = 'Deutsche Welle'
|
||||||
|
__author__ = 'Darko Miletic'
|
||||||
|
description = 'News from Germany and World'
|
||||||
|
publisher = 'Deutsche Welle'
|
||||||
|
category = 'news, politics, Germany'
|
||||||
|
oldest_article = 1
|
||||||
|
max_articles_per_feed = 100
|
||||||
|
use_embedded_content = False
|
||||||
|
no_stylesheets = True
|
||||||
|
language = 'en'
|
||||||
|
publication_type = 'newsportal'
|
||||||
|
remove_empty_feeds = True
|
||||||
|
masthead_url = 'http://www.dw-world.de/skins/std/channel1/pics/dw_logo1024.gif'
|
||||||
|
extra_css = """
|
||||||
|
body{font-family: Arial,sans-serif}
|
||||||
|
img{margin-top: 0.5em; margin-bottom: 0.2em; display: block}
|
||||||
|
.caption{font-size: x-small; display: block; margin-bottom: 0.4em}
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
conversion_options = {
|
||||||
|
'comment' : description
|
||||||
|
, 'tags' : category
|
||||||
|
, 'publisher': publisher
|
||||||
|
, 'language' : language
|
||||||
|
}
|
||||||
|
|
||||||
|
remove_tags = [
|
||||||
|
dict(name=['iframe','embed','object','form','base','meta','link'])
|
||||||
|
,dict(attrs={'class':'actionFooter'})
|
||||||
|
]
|
||||||
|
keep_only_tags=[dict(attrs={'class':'ArticleDetail detail'})]
|
||||||
|
remove_attributes = ['height','width','onclick','border','lang']
|
||||||
|
|
||||||
|
feeds = [(u'All news', u'http://rss.dw-world.de/rdf/rss-en-all')]
|
||||||
|
|
||||||
|
def print_version(self, url):
|
||||||
|
artl = url.rpartition('/')[2]
|
||||||
|
return 'http://www.dw-world.de/popups/popup_printcontent/' + artl
|
||||||
|
|
||||||
|
def preprocess_html(self, soup):
|
||||||
|
for item in soup.findAll('a'):
|
||||||
|
limg = item.find('img')
|
||||||
|
if item.string is not None:
|
||||||
|
str = item.string
|
||||||
|
item.replaceWith(str)
|
||||||
|
else:
|
||||||
|
if limg:
|
||||||
|
item.name = 'div'
|
||||||
|
del item['href']
|
||||||
|
if item.has_key('target'):
|
||||||
|
del item['target']
|
||||||
|
else:
|
||||||
|
str = self.tag_to_string(item)
|
||||||
|
item.replaceWith(str)
|
||||||
|
return soup
|
||||||
|
|
66
resources/recipes/deutsche_welle_es.recipe
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
__license__ = 'GPL v3'
|
||||||
|
__copyright__ = '2010, Darko Miletic <darko.miletic at gmail.com>'
|
||||||
|
'''
|
||||||
|
dw-world.de
|
||||||
|
'''
|
||||||
|
|
||||||
|
from calibre.web.feeds.news import BasicNewsRecipe
|
||||||
|
|
||||||
|
class DeutscheWelle_es(BasicNewsRecipe):
|
||||||
|
title = 'Deutsche Welle'
|
||||||
|
__author__ = 'Darko Miletic'
|
||||||
|
description = 'Noticias desde Alemania y mundo'
|
||||||
|
publisher = 'Deutsche Welle'
|
||||||
|
category = 'news, politics, Germany'
|
||||||
|
oldest_article = 1
|
||||||
|
max_articles_per_feed = 100
|
||||||
|
use_embedded_content = False
|
||||||
|
no_stylesheets = True
|
||||||
|
language = 'es'
|
||||||
|
publication_type = 'newsportal'
|
||||||
|
remove_empty_feeds = True
|
||||||
|
masthead_url = 'http://www.dw-world.de/skins/std/channel1/pics/dw_logo1024.gif'
|
||||||
|
extra_css = """
|
||||||
|
body{font-family: Arial,sans-serif}
|
||||||
|
img{margin-top: 0.5em; margin-bottom: 0.2em; display: block}
|
||||||
|
.caption{font-size: x-small; display: block; margin-bottom: 0.4em}
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
conversion_options = {
|
||||||
|
'comment' : description
|
||||||
|
, 'tags' : category
|
||||||
|
, 'publisher': publisher
|
||||||
|
, 'language' : language
|
||||||
|
}
|
||||||
|
|
||||||
|
remove_tags = [
|
||||||
|
dict(name=['iframe','embed','object','form','base','meta','link'])
|
||||||
|
,dict(attrs={'class':'actionFooter'})
|
||||||
|
]
|
||||||
|
keep_only_tags=[dict(attrs={'class':'ArticleDetail detail'})]
|
||||||
|
remove_attributes = ['height','width','onclick','border','lang']
|
||||||
|
|
||||||
|
feeds = [(u'Noticias', u'http://rss.dw-world.de/rdf/rss-sp-all')]
|
||||||
|
|
||||||
|
def print_version(self, url):
|
||||||
|
artl = url.rpartition('/')[2]
|
||||||
|
return 'http://www.dw-world.de/popups/popup_printcontent/' + artl
|
||||||
|
|
||||||
|
def preprocess_html(self, soup):
|
||||||
|
for item in soup.findAll('a'):
|
||||||
|
limg = item.find('img')
|
||||||
|
if item.string is not None:
|
||||||
|
str = item.string
|
||||||
|
item.replaceWith(str)
|
||||||
|
else:
|
||||||
|
if limg:
|
||||||
|
item.name = 'div'
|
||||||
|
del item['href']
|
||||||
|
if item.has_key('target'):
|
||||||
|
del item['target']
|
||||||
|
else:
|
||||||
|
str = self.tag_to_string(item)
|
||||||
|
item.replaceWith(str)
|
||||||
|
return soup
|
||||||
|
|
74
resources/recipes/deutsche_welle_hr.recipe
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
__license__ = 'GPL v3'
|
||||||
|
__copyright__ = '2010, Darko Miletic <darko.miletic at gmail.com>'
|
||||||
|
'''
|
||||||
|
dw-world.de
|
||||||
|
'''
|
||||||
|
|
||||||
|
import re
|
||||||
|
from calibre.web.feeds.news import BasicNewsRecipe
|
||||||
|
|
||||||
|
class DeutscheWelle_hr(BasicNewsRecipe):
|
||||||
|
title = 'Deutsche Welle'
|
||||||
|
__author__ = 'Darko Miletic'
|
||||||
|
description = 'Vesti iz Njemacke i svijeta'
|
||||||
|
publisher = 'Deutsche Welle'
|
||||||
|
category = 'news, politics, Germany'
|
||||||
|
oldest_article = 1
|
||||||
|
max_articles_per_feed = 100
|
||||||
|
use_embedded_content = False
|
||||||
|
no_stylesheets = True
|
||||||
|
language = 'hr'
|
||||||
|
publication_type = 'newsportal'
|
||||||
|
remove_empty_feeds = True
|
||||||
|
masthead_url = 'http://www.dw-world.de/skins/std/channel1/pics/dw_logo1024.gif'
|
||||||
|
extra_css = """
|
||||||
|
@font-face {font-family: "sans1";src:url(res:///opt/sony/ebook/FONT/tt0003m_.ttf)}
|
||||||
|
body{font-family: Arial,sans1,sans-serif}
|
||||||
|
img{margin-top: 0.5em; margin-bottom: 0.2em; display: block}
|
||||||
|
.caption{font-size: x-small; display: block; margin-bottom: 0.4em}
|
||||||
|
"""
|
||||||
|
preprocess_regexps = [(re.compile(u'\u0110'), lambda match: u'\u00D0')]
|
||||||
|
|
||||||
|
|
||||||
|
conversion_options = {
|
||||||
|
'comment' : description
|
||||||
|
, 'tags' : category
|
||||||
|
, 'publisher': publisher
|
||||||
|
, 'language' : language
|
||||||
|
}
|
||||||
|
|
||||||
|
remove_tags = [
|
||||||
|
dict(name=['iframe','embed','object','form','base','meta','link'])
|
||||||
|
,dict(attrs={'class':'actionFooter'})
|
||||||
|
]
|
||||||
|
keep_only_tags=[dict(attrs={'class':'ArticleDetail detail'})]
|
||||||
|
remove_attributes = ['height','width','onclick','border','lang']
|
||||||
|
|
||||||
|
feeds = [
|
||||||
|
(u'Svijet' , u'http://rss.dw-world.de/rdf/rss-cro-svijet')
|
||||||
|
,(u'Europa' , u'http://rss.dw-world.de/rdf/rss-cro-eu' )
|
||||||
|
,(u'Njemacka' , u'http://rss.dw-world.de/rdf/rss-cro-ger' )
|
||||||
|
,(u'Vijesti' , u'http://rss.dw-world.de/rdf/rss-cro-all' )
|
||||||
|
]
|
||||||
|
|
||||||
|
def print_version(self, url):
|
||||||
|
artl = url.rpartition('/')[2]
|
||||||
|
return 'http://www.dw-world.de/popups/popup_printcontent/' + artl
|
||||||
|
|
||||||
|
def preprocess_html(self, soup):
|
||||||
|
for item in soup.findAll('a'):
|
||||||
|
limg = item.find('img')
|
||||||
|
if item.string is not None:
|
||||||
|
str = item.string
|
||||||
|
item.replaceWith(str)
|
||||||
|
else:
|
||||||
|
if limg:
|
||||||
|
item.name = 'div'
|
||||||
|
del item['href']
|
||||||
|
if item.has_key('target'):
|
||||||
|
del item['target']
|
||||||
|
else:
|
||||||
|
str = self.tag_to_string(item)
|
||||||
|
item.replaceWith(str)
|
||||||
|
return soup
|
||||||
|
|
66
resources/recipes/deutsche_welle_pt.recipe
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
__license__ = 'GPL v3'
|
||||||
|
__copyright__ = '2010, Darko Miletic <darko.miletic at gmail.com>'
|
||||||
|
'''
|
||||||
|
dw-world.de
|
||||||
|
'''
|
||||||
|
|
||||||
|
from calibre.web.feeds.news import BasicNewsRecipe
|
||||||
|
|
||||||
|
class DeutscheWelle_pt(BasicNewsRecipe):
|
||||||
|
title = 'Deutsche Welle'
|
||||||
|
__author__ = 'Darko Miletic'
|
||||||
|
description = 'Noticias desde Alemania y mundo'
|
||||||
|
publisher = 'Deutsche Welle'
|
||||||
|
category = 'news, politics, Germany'
|
||||||
|
oldest_article = 1
|
||||||
|
max_articles_per_feed = 100
|
||||||
|
use_embedded_content = False
|
||||||
|
no_stylesheets = True
|
||||||
|
language = 'pt'
|
||||||
|
publication_type = 'newsportal'
|
||||||
|
remove_empty_feeds = True
|
||||||
|
masthead_url = 'http://www.dw-world.de/skins/std/channel1/pics/dw_logo1024.gif'
|
||||||
|
extra_css = """
|
||||||
|
body{font-family: Arial,sans-serif}
|
||||||
|
img{margin-top: 0.5em; margin-bottom: 0.2em; display: block}
|
||||||
|
.caption{font-size: x-small; display: block; margin-bottom: 0.4em}
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
conversion_options = {
|
||||||
|
'comment' : description
|
||||||
|
, 'tags' : category
|
||||||
|
, 'publisher': publisher
|
||||||
|
, 'language' : language
|
||||||
|
}
|
||||||
|
|
||||||
|
remove_tags = [
|
||||||
|
dict(name=['iframe','embed','object','form','base','meta','link'])
|
||||||
|
,dict(attrs={'class':'actionFooter'})
|
||||||
|
]
|
||||||
|
keep_only_tags=[dict(attrs={'class':'ArticleDetail detail'})]
|
||||||
|
remove_attributes = ['height','width','onclick','border','lang']
|
||||||
|
|
||||||
|
feeds = [(u'Noticias', u'http://rss.dw-world.de/rdf/rss-br-all')]
|
||||||
|
|
||||||
|
def print_version(self, url):
|
||||||
|
artl = url.rpartition('/')[2]
|
||||||
|
return 'http://www.dw-world.de/popups/popup_printcontent/' + artl
|
||||||
|
|
||||||
|
def preprocess_html(self, soup):
|
||||||
|
for item in soup.findAll('a'):
|
||||||
|
limg = item.find('img')
|
||||||
|
if item.string is not None:
|
||||||
|
str = item.string
|
||||||
|
item.replaceWith(str)
|
||||||
|
else:
|
||||||
|
if limg:
|
||||||
|
item.name = 'div'
|
||||||
|
del item['href']
|
||||||
|
if item.has_key('target'):
|
||||||
|
del item['target']
|
||||||
|
else:
|
||||||
|
str = self.tag_to_string(item)
|
||||||
|
item.replaceWith(str)
|
||||||
|
return soup
|
||||||
|
|
79
resources/recipes/deutsche_welle_sr.recipe
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
__license__ = 'GPL v3'
|
||||||
|
__copyright__ = '2010, Darko Miletic <darko.miletic at gmail.com>'
|
||||||
|
'''
|
||||||
|
dw-world.de
|
||||||
|
'''
|
||||||
|
|
||||||
|
import re
|
||||||
|
from calibre.web.feeds.news import BasicNewsRecipe
|
||||||
|
|
||||||
|
class DeutscheWelle_sr(BasicNewsRecipe):
|
||||||
|
title = 'Deutsche Welle'
|
||||||
|
__author__ = 'Darko Miletic'
|
||||||
|
description = 'Vesti iz Nemacke i sveta'
|
||||||
|
publisher = 'Deutsche Welle'
|
||||||
|
category = 'news, politics, Germany'
|
||||||
|
oldest_article = 1
|
||||||
|
max_articles_per_feed = 100
|
||||||
|
use_embedded_content = False
|
||||||
|
no_stylesheets = True
|
||||||
|
language = 'sr'
|
||||||
|
publication_type = 'newsportal'
|
||||||
|
remove_empty_feeds = True
|
||||||
|
masthead_url = 'http://www.dw-world.de/skins/std/channel1/pics/dw_logo1024.gif'
|
||||||
|
extra_css = """
|
||||||
|
@font-face {font-family: "sans1";src:url(res:///opt/sony/ebook/FONT/tt0003m_.ttf)}
|
||||||
|
body{font-family: Arial,sans1,sans-serif}
|
||||||
|
img{margin-top: 0.5em; margin-bottom: 0.2em; display: block}
|
||||||
|
.caption{font-size: x-small; display: block; margin-bottom: 0.4em}
|
||||||
|
"""
|
||||||
|
preprocess_regexps = [(re.compile(u'\u0110'), lambda match: u'\u00D0')]
|
||||||
|
|
||||||
|
|
||||||
|
conversion_options = {
|
||||||
|
'comment' : description
|
||||||
|
, 'tags' : category
|
||||||
|
, 'publisher': publisher
|
||||||
|
, 'language' : language
|
||||||
|
}
|
||||||
|
|
||||||
|
remove_tags = [
|
||||||
|
dict(name=['iframe','embed','object','form','base','meta','link'])
|
||||||
|
,dict(attrs={'class':'actionFooter'})
|
||||||
|
]
|
||||||
|
keep_only_tags=[dict(attrs={'class':'ArticleDetail detail'})]
|
||||||
|
remove_attributes = ['height','width','onclick','border','lang']
|
||||||
|
|
||||||
|
feeds = [
|
||||||
|
(u'Politika' , u'http://rss.dw-world.de/rdf/rss-ser-pol' )
|
||||||
|
,(u'Srbija' , u'http://rss.dw-world.de/rdf/rss-ser-pol-ser' )
|
||||||
|
,(u'Region' , u'http://rss.dw-world.de/rdf/rss-ser-pol-region' )
|
||||||
|
,(u'Evropa' , u'http://rss.dw-world.de/rdf/rss-ser-pol-eu' )
|
||||||
|
,(u'Nemacka' , u'http://rss.dw-world.de/rdf/rss-ser-pol-ger' )
|
||||||
|
,(u'Svet' , u'http://rss.dw-world.de/rdf/rss-ser-pol-ger' )
|
||||||
|
,(u'Pregled stampe', u'http://rss.dw-world.de/rdf/rss-ser-pol-ger')
|
||||||
|
,(u'Nauka Tehnika Medicina', u'http://rss.dw-world.de/rdf/rss-ser-science')
|
||||||
|
,(u'Kultura' , u'feed:http://rss.dw-world.de/rdf/rss-ser-cul' )
|
||||||
|
]
|
||||||
|
|
||||||
|
def print_version(self, url):
|
||||||
|
artl = url.rpartition('/')[2]
|
||||||
|
return 'http://www.dw-world.de/popups/popup_printcontent/' + artl
|
||||||
|
|
||||||
|
def preprocess_html(self, soup):
|
||||||
|
for item in soup.findAll('a'):
|
||||||
|
limg = item.find('img')
|
||||||
|
if item.string is not None:
|
||||||
|
str = item.string
|
||||||
|
item.replaceWith(str)
|
||||||
|
else:
|
||||||
|
if limg:
|
||||||
|
item.name = 'div'
|
||||||
|
del item['href']
|
||||||
|
if item.has_key('target'):
|
||||||
|
del item['target']
|
||||||
|
else:
|
||||||
|
str = self.tag_to_string(item)
|
||||||
|
item.replaceWith(str)
|
||||||
|
return soup
|
||||||
|
|
@ -9,23 +9,34 @@ http://www.elpais.com.uy/
|
|||||||
from calibre.web.feeds.news import BasicNewsRecipe
|
from calibre.web.feeds.news import BasicNewsRecipe
|
||||||
|
|
||||||
class General(BasicNewsRecipe):
|
class General(BasicNewsRecipe):
|
||||||
title = 'Diario El Pais'
|
title = 'El Pais - Uruguay'
|
||||||
__author__ = 'Gustavo Azambuja'
|
__author__ = 'Gustavo Azambuja'
|
||||||
description = 'Noticias | Uruguay'
|
description = 'Noticias de Uruguay y el resto del mundo'
|
||||||
|
publisher = 'EL PAIS S.A.'
|
||||||
|
category = 'news, politics, Uruguay'
|
||||||
language = 'es'
|
language = 'es'
|
||||||
timefmt = '[%a, %d %b, %Y]'
|
timefmt = '[%a, %d %b, %Y]'
|
||||||
use_embedded_content = False
|
use_embedded_content = False
|
||||||
recursion = 2
|
recursion = 2
|
||||||
encoding = 'iso-8859-1'
|
encoding = 'iso-8859-1'
|
||||||
|
masthead_url = 'http://www.elpais.com.uy/Images/09/cabezal/logo_PDEP.png'
|
||||||
|
publication_type = 'newspaper'
|
||||||
remove_javascript = True
|
remove_javascript = True
|
||||||
no_stylesheets = True
|
no_stylesheets = True
|
||||||
|
|
||||||
oldest_article = 2
|
oldest_article = 2
|
||||||
max_articles_per_feed = 100
|
max_articles_per_feed = 200
|
||||||
keep_only_tags = [
|
keep_only_tags = [
|
||||||
dict(name='h1'),
|
dict(name='h1'),
|
||||||
dict(name='div', attrs={'id':'Contenido'})
|
dict(name='div', attrs={'id':'Contenido'})
|
||||||
]
|
]
|
||||||
|
|
||||||
|
conversion_options = {
|
||||||
|
'comment' : description
|
||||||
|
, 'tags' : category
|
||||||
|
, 'publisher' : publisher
|
||||||
|
, 'language' : language
|
||||||
|
}
|
||||||
remove_tags = [
|
remove_tags = [
|
||||||
dict(name='div', attrs={'class':['date_text', 'comments', 'form_section', 'share_it']}),
|
dict(name='div', attrs={'class':['date_text', 'comments', 'form_section', 'share_it']}),
|
||||||
dict(name='div', attrs={'id':['relatedPosts', 'spacer', 'banner_izquierda', 'right_container']}),
|
dict(name='div', attrs={'id':['relatedPosts', 'spacer', 'banner_izquierda', 'right_container']}),
|
||||||
@ -38,6 +49,8 @@ class General(BasicNewsRecipe):
|
|||||||
h3{font-size: 14px;color:#999999; font-family:Geneva, Arial, Helvetica, sans-serif;font-weight: bold;}
|
h3{font-size: 14px;color:#999999; font-family:Geneva, Arial, Helvetica, sans-serif;font-weight: bold;}
|
||||||
h2{color:#666666; font-family:Geneva, Arial, Helvetica, sans-serif;font-size:small;}
|
h2{color:#666666; font-family:Geneva, Arial, Helvetica, sans-serif;font-size:small;}
|
||||||
p {font-family:Arial,Helvetica,sans-serif;}
|
p {font-family:Arial,Helvetica,sans-serif;}
|
||||||
|
body{font-family: Verdana,Arial,Helvetica,sans-serif }
|
||||||
|
img{margin-bottom: 0.4em; display:block;}
|
||||||
'''
|
'''
|
||||||
feeds = [
|
feeds = [
|
||||||
(u'Ultimo Momento', u'http://www.elpais.com.uy/formatos/rss/index.asp?seccion=umomento'),
|
(u'Ultimo Momento', u'http://www.elpais.com.uy/formatos/rss/index.asp?seccion=umomento'),
|
||||||
|
@ -21,10 +21,13 @@ class Lanacion(BasicNewsRecipe):
|
|||||||
remove_empty_feeds = True
|
remove_empty_feeds = True
|
||||||
masthead_url = 'http://www.lanacion.com.ar/imgs/layout/logos/ln341x47.gif'
|
masthead_url = 'http://www.lanacion.com.ar/imgs/layout/logos/ln341x47.gif'
|
||||||
extra_css = """ h1{font-family: Georgia,serif}
|
extra_css = """ h1{font-family: Georgia,serif}
|
||||||
|
h2{color: #626262}
|
||||||
body{font-family: Arial,sans-serif}
|
body{font-family: Arial,sans-serif}
|
||||||
img{margin-top: 0.5em; margin-bottom: 0.2em}
|
img{margin-top: 0.5em; margin-bottom: 0.2em; display: block}
|
||||||
|
.notaFecha{color: #808080}
|
||||||
.notaEpigrafe{font-size: x-small}
|
.notaEpigrafe{font-size: x-small}
|
||||||
.topNota h1{font-family: Arial,sans-serif} """
|
.topNota h1{font-family: Arial,sans-serif}
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
conversion_options = {
|
conversion_options = {
|
||||||
@ -38,12 +41,12 @@ class Lanacion(BasicNewsRecipe):
|
|||||||
remove_tags = [
|
remove_tags = [
|
||||||
dict(name='div' , attrs={'class':'notaComentario floatFix noprint' })
|
dict(name='div' , attrs={'class':'notaComentario floatFix noprint' })
|
||||||
,dict(name='ul' , attrs={'class':['cajaHerramientas cajaTop noprint','herramientas noprint']})
|
,dict(name='ul' , attrs={'class':['cajaHerramientas cajaTop noprint','herramientas noprint']})
|
||||||
,dict(name='div' , attrs={'class':'cajaHerramientas noprint' })
|
,dict(name='div' , attrs={'class':['cajaHerramientas noprint','cajaHerramientas floatFix'] })
|
||||||
,dict(attrs={'class':['titulosMultimedia','derecha','techo color','encuesta','izquierda compartir','floatFix']})
|
,dict(attrs={'class':['titulosMultimedia','derecha','techo color','encuesta','izquierda compartir','floatFix','videoCentro']})
|
||||||
,dict(name=['iframe','embed','object','form','base','hr'])
|
,dict(name=['iframe','embed','object','form','base','hr','meta','link','input'])
|
||||||
]
|
]
|
||||||
remove_tags_after = dict(attrs={'class':['tags','nota-destacado']})
|
remove_tags_after = dict(attrs={'class':['tags','nota-destacado']})
|
||||||
remove_attributes = ['height','width','visible']
|
remove_attributes = ['height','width','visible','onclick','data-count','name']
|
||||||
|
|
||||||
feeds = [
|
feeds = [
|
||||||
(u'Ultimas noticias' , u'http://www.lanacion.com.ar/herramientas/rss/index.asp?origen=2' )
|
(u'Ultimas noticias' , u'http://www.lanacion.com.ar/herramientas/rss/index.asp?origen=2' )
|
||||||
|
22
resources/recipes/matichon.recipe
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
from calibre.web.feeds.news import BasicNewsRecipe
|
||||||
|
|
||||||
|
class AdvancedUserRecipe1290412756(BasicNewsRecipe):
|
||||||
|
__author__ = 'Anat R.'
|
||||||
|
title = u'Matichon'
|
||||||
|
oldest_article = 7
|
||||||
|
language = 'th'
|
||||||
|
max_articles_per_feed = 100
|
||||||
|
no_stylesheets = True
|
||||||
|
remove_javascript = True
|
||||||
|
use_embedded_content = False
|
||||||
|
feeds = [(u'News', u'http://www.matichon.co.th/rss/news_article.xml'),
|
||||||
|
(u'Columns', u'http://www.matichon.co.th/rss/news_columns.xml'),
|
||||||
|
(u'Politics', u'http://www.matichon.co.th/rss/news_politic.xml'),
|
||||||
|
(u'Business', u'http://www.matichon.co.th/rss/news_business.xml'),
|
||||||
|
(u'World', u'http://www.matichon.co.th/rss/news_world.xml'),
|
||||||
|
(u'Sports', u'http://www.matichon.co.th/rss/news_sport.xml'),
|
||||||
|
(u'Entertainment', u'http://www.matichon.co.th/rss/news_entertainment.xml')]
|
||||||
|
keep_only_tags = []
|
||||||
|
keep_only_tags.append(dict(name = 'h3', attrs = {'class' : 'read-h'}))
|
||||||
|
keep_only_tags.append(dict(name = 'p', attrs = {'class' : 'read-time'}))
|
||||||
|
keep_only_tags.append(dict(name = 'div', attrs = {'class' : 'news-content'}))
|
59
resources/recipes/the_workingham_times.recipe
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
__license__ = 'GPL v3'
|
||||||
|
__copyright__ = '2010, Darko Miletic <darko.miletic at gmail.com>'
|
||||||
|
'''
|
||||||
|
www.getwokingham.co.uk
|
||||||
|
'''
|
||||||
|
|
||||||
|
from calibre.web.feeds.recipes import BasicNewsRecipe
|
||||||
|
|
||||||
|
class TheWorkinghamTimes(BasicNewsRecipe):
|
||||||
|
title = 'The Workingham Times'
|
||||||
|
__author__ = 'Darko Miletic'
|
||||||
|
description = 'News from UK'
|
||||||
|
oldest_article = 2
|
||||||
|
max_articles_per_feed = 100
|
||||||
|
no_stylesheets = True
|
||||||
|
use_embedded_content = False
|
||||||
|
encoding = 'utf8'
|
||||||
|
publisher = 'The Wokingham Times - S&B media'
|
||||||
|
category = 'news, UK, world'
|
||||||
|
language = 'en_GB'
|
||||||
|
publication_type = 'newsportal'
|
||||||
|
extra_css = """
|
||||||
|
body{ font-family: Arial,sans-serif }
|
||||||
|
img{display: block; margin-bottom: 0.4em}
|
||||||
|
"""
|
||||||
|
|
||||||
|
conversion_options = {
|
||||||
|
'comments' : description
|
||||||
|
,'tags' : category
|
||||||
|
,'language' : language
|
||||||
|
,'publisher' : publisher
|
||||||
|
}
|
||||||
|
|
||||||
|
keep_only_tags = [dict(name='div', attrs={'id':'article-body'})]
|
||||||
|
remove_tags = [
|
||||||
|
dict(name='div' , attrs={'class':['ad']})
|
||||||
|
,dict(name=['meta','base','iframe','embed','object'])
|
||||||
|
,dict(name='span' , attrs={'class':'caption small'})
|
||||||
|
]
|
||||||
|
remove_attributes = ['width','height','lang']
|
||||||
|
|
||||||
|
feeds = [
|
||||||
|
('Home' , 'http://www.getwokingham.co.uk/rss.xml' )
|
||||||
|
,('News' , 'http://www.getwokingham.co.uk/news/rss.xml' )
|
||||||
|
,('Entertainment', 'http://www.getwokingham.co.uk/entertainment/rss.xml')
|
||||||
|
,('Lifestyle' , 'http://www.getwokingham.co.uk/lifestyle/rss.xml' )
|
||||||
|
]
|
||||||
|
|
||||||
|
def preprocess_html(self, soup):
|
||||||
|
for item in soup.findAll(style=True):
|
||||||
|
del item['style']
|
||||||
|
for item in soup.findAll('a'):
|
||||||
|
if item.string is not None:
|
||||||
|
str = item.string
|
||||||
|
item.replaceWith(str)
|
||||||
|
else:
|
||||||
|
item.name = 'span'
|
||||||
|
del item['href']
|
||||||
|
return soup
|
@ -2022,7 +2022,8 @@ var Hyphenator = (function (window) {
|
|||||||
if (n.nodeType === 3 && n.data.length >= min) { //type 3 = #text -> hyphenate!
|
if (n.nodeType === 3 && n.data.length >= min) { //type 3 = #text -> hyphenate!
|
||||||
n.data = n.data.replace(Hyphenator.languages[lang].genRegExp, hyphenate);
|
n.data = n.data.replace(Hyphenator.languages[lang].genRegExp, hyphenate);
|
||||||
} else if (n.nodeType === 1) {
|
} else if (n.nodeType === 1) {
|
||||||
if (n.lang !== '') {
|
// Modified by Kovid to use element lang only if it has been loaded
|
||||||
|
if (n.lang !== '' && Hyphenator.languages.hasOwnProperty(n.lang)) {
|
||||||
Hyphenator.hyphenate(n, n.lang);
|
Hyphenator.hyphenate(n, n.lang);
|
||||||
} else {
|
} else {
|
||||||
Hyphenator.hyphenate(n, lang);
|
Hyphenator.hyphenate(n, lang);
|
||||||
|
@ -6,14 +6,43 @@
|
|||||||
|
|
||||||
function scale_images() {
|
function scale_images() {
|
||||||
$("img:visible").each(function() {
|
$("img:visible").each(function() {
|
||||||
var offset = $(this).offset();
|
var img = $(this);
|
||||||
|
var offset = img.offset();
|
||||||
|
var avail_width = window.innerWidth - offset.left - 5;
|
||||||
|
var avail_height = window.innerHeight - 5;
|
||||||
|
img.css('width', img.data('orig-width'));
|
||||||
|
img.css('height', img.data('orig-height'));
|
||||||
|
var width = img.width();
|
||||||
|
var height = img.height();
|
||||||
|
var ratio = 0;
|
||||||
|
|
||||||
|
if (width > avail_width) {
|
||||||
|
ratio = avail_width / width;
|
||||||
|
img.css('width', avail_width+'px');
|
||||||
|
img.css('height', (ratio*height) + 'px');
|
||||||
|
height = height * ratio;
|
||||||
|
width = width * ratio;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (height > avail_height) {
|
||||||
|
ratio = avail_height / height;
|
||||||
|
img.css('height', avail_height);
|
||||||
|
img.css('width', width * ratio);
|
||||||
|
}
|
||||||
//window.py_bridge.debug(window.getComputedStyle(this, '').getPropertyValue('max-width'));
|
//window.py_bridge.debug(window.getComputedStyle(this, '').getPropertyValue('max-width'));
|
||||||
$(this).css("max-width", (window.innerWidth-offset.left-5)+"px");
|
});
|
||||||
$(this).css("max-height", (window.innerHeight-5)+"px");
|
}
|
||||||
|
|
||||||
|
function store_original_size_attributes() {
|
||||||
|
$("img").each(function() {
|
||||||
|
var img = $(this);
|
||||||
|
img.data('orig-width', img.css('width'));
|
||||||
|
img.data('orig-height', img.css('height'));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function setup_image_scaling_handlers() {
|
function setup_image_scaling_handlers() {
|
||||||
|
store_original_size_attributes();
|
||||||
scale_images();
|
scale_images();
|
||||||
$(window).resize(function(){
|
$(window).resize(function(){
|
||||||
scale_images();
|
scale_images();
|
||||||
|
@ -90,11 +90,13 @@ fc_lib = '/usr/lib'
|
|||||||
podofo_inc = '/usr/include/podofo'
|
podofo_inc = '/usr/include/podofo'
|
||||||
podofo_lib = '/usr/lib'
|
podofo_lib = '/usr/lib'
|
||||||
chmlib_inc_dirs = chmlib_lib_dirs = []
|
chmlib_inc_dirs = chmlib_lib_dirs = []
|
||||||
|
sqlite_inc_dirs = []
|
||||||
|
|
||||||
if iswindows:
|
if iswindows:
|
||||||
prefix = r'C:\cygwin\home\kovid\sw'
|
prefix = r'C:\cygwin\home\kovid\sw'
|
||||||
sw_inc_dir = os.path.join(prefix, 'include')
|
sw_inc_dir = os.path.join(prefix, 'include')
|
||||||
sw_lib_dir = os.path.join(prefix, 'lib')
|
sw_lib_dir = os.path.join(prefix, 'lib')
|
||||||
|
sqlite_inc_dirs = [sw_inc_dir]
|
||||||
fc_inc = os.path.join(sw_inc_dir, 'fontconfig')
|
fc_inc = os.path.join(sw_inc_dir, 'fontconfig')
|
||||||
fc_lib = sw_lib_dir
|
fc_lib = sw_lib_dir
|
||||||
chmlib_inc_dirs = consolidate('CHMLIB_INC_DIR', os.path.join(prefix,
|
chmlib_inc_dirs = consolidate('CHMLIB_INC_DIR', os.path.join(prefix,
|
||||||
|
@ -18,7 +18,7 @@ from setup.build_environment import fc_inc, fc_lib, chmlib_inc_dirs, \
|
|||||||
QMAKE, msvc, MT, win_inc, win_lib, png_inc_dirs, win_ddk, \
|
QMAKE, msvc, MT, win_inc, win_lib, png_inc_dirs, win_ddk, \
|
||||||
magick_inc_dirs, magick_lib_dirs, png_lib_dirs, png_libs, \
|
magick_inc_dirs, magick_lib_dirs, png_lib_dirs, png_libs, \
|
||||||
magick_error, magick_libs, ft_lib_dirs, ft_libs, jpg_libs, \
|
magick_error, magick_libs, ft_lib_dirs, ft_libs, jpg_libs, \
|
||||||
jpg_lib_dirs, chmlib_lib_dirs
|
jpg_lib_dirs, chmlib_lib_dirs, sqlite_inc_dirs
|
||||||
MT
|
MT
|
||||||
isunix = islinux or isosx or isfreebsd
|
isunix = islinux or isosx or isfreebsd
|
||||||
|
|
||||||
@ -58,6 +58,11 @@ if iswindows:
|
|||||||
|
|
||||||
extensions = [
|
extensions = [
|
||||||
|
|
||||||
|
Extension('sqlite_custom',
|
||||||
|
['calibre/library/sqlite_custom.c'],
|
||||||
|
inc_dirs=sqlite_inc_dirs
|
||||||
|
),
|
||||||
|
|
||||||
Extension('chmlib',
|
Extension('chmlib',
|
||||||
['calibre/utils/chm/swig_chm.c'],
|
['calibre/utils/chm/swig_chm.c'],
|
||||||
libraries=['ChmLib' if iswindows else 'chm'],
|
libraries=['ChmLib' if iswindows else 'chm'],
|
||||||
|
@ -32,6 +32,12 @@ Run the following command to install python dependencies::
|
|||||||
|
|
||||||
Install BeautifulSoup 3.0.x manually into site-packages (3.1.x parses broken HTML very poorly)
|
Install BeautifulSoup 3.0.x manually into site-packages (3.1.x parses broken HTML very poorly)
|
||||||
|
|
||||||
|
|
||||||
|
SQLite
|
||||||
|
---------
|
||||||
|
|
||||||
|
Put sqlite3*.h from the sqlite windows amlgamation in ~/sw/include
|
||||||
|
|
||||||
Qt
|
Qt
|
||||||
--------
|
--------
|
||||||
|
|
||||||
|
@ -632,6 +632,10 @@ def main(outfile, args=sys.argv[1:]):
|
|||||||
except tokenize.TokenError, e:
|
except tokenize.TokenError, e:
|
||||||
print >> sys.stderr, '%s: %s, line %d, column %d' % (
|
print >> sys.stderr, '%s: %s, line %d, column %d' % (
|
||||||
e[0], filename, e[1][0], e[1][1])
|
e[0], filename, e[1][0], e[1][1])
|
||||||
|
except IndentationError, e:
|
||||||
|
print >> sys.stderr, '%s: %s, line %s, column %s' % (
|
||||||
|
e[0], filename, e.lineno, e[1][1])
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
if closep:
|
if closep:
|
||||||
fp.close()
|
fp.close()
|
||||||
|
@ -2,7 +2,7 @@ __license__ = 'GPL v3'
|
|||||||
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
|
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
|
||||||
__docformat__ = 'restructuredtext en'
|
__docformat__ = 'restructuredtext en'
|
||||||
__appname__ = 'calibre'
|
__appname__ = 'calibre'
|
||||||
__version__ = '0.7.29'
|
__version__ = '0.7.30'
|
||||||
__author__ = "Kovid Goyal <kovid@kovidgoyal.net>"
|
__author__ = "Kovid Goyal <kovid@kovidgoyal.net>"
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
@ -457,7 +457,7 @@ from calibre.devices.blackberry.driver import BLACKBERRY
|
|||||||
from calibre.devices.cybook.driver import CYBOOK, ORIZON
|
from calibre.devices.cybook.driver import CYBOOK, ORIZON
|
||||||
from calibre.devices.eb600.driver import EB600, COOL_ER, SHINEBOOK, \
|
from calibre.devices.eb600.driver import EB600, COOL_ER, SHINEBOOK, \
|
||||||
POCKETBOOK360, GER2, ITALICA, ECLICTO, DBOOK, INVESBOOK, \
|
POCKETBOOK360, GER2, ITALICA, ECLICTO, DBOOK, INVESBOOK, \
|
||||||
BOOQ, ELONEX, POCKETBOOK301, MENTOR
|
BOOQ, ELONEX, POCKETBOOK301, MENTOR, POCKETBOOK602
|
||||||
from calibre.devices.iliad.driver import ILIAD
|
from calibre.devices.iliad.driver import ILIAD
|
||||||
from calibre.devices.irexdr.driver import IREXDR1000, IREXDR800
|
from calibre.devices.irexdr.driver import IREXDR1000, IREXDR800
|
||||||
from calibre.devices.jetbook.driver import JETBOOK, MIBUK, JETBOOK_MINI
|
from calibre.devices.jetbook.driver import JETBOOK, MIBUK, JETBOOK_MINI
|
||||||
@ -476,7 +476,7 @@ from calibre.devices.teclast.driver import TECLAST_K3, NEWSMY, IPAPYRUS, \
|
|||||||
SOVOS, PICO
|
SOVOS, PICO
|
||||||
from calibre.devices.sne.driver import SNE
|
from calibre.devices.sne.driver import SNE
|
||||||
from calibre.devices.misc import PALMPRE, AVANT, SWEEX, PDNOVEL, KOGAN, \
|
from calibre.devices.misc import PALMPRE, AVANT, SWEEX, PDNOVEL, KOGAN, \
|
||||||
GEMEI, VELOCITYMICRO, PDNOVEL_KOBO, Q600
|
GEMEI, VELOCITYMICRO, PDNOVEL_KOBO, Q600, LUMIREAD
|
||||||
from calibre.devices.folder_device.driver import FOLDER_DEVICE_FOR_CONFIG
|
from calibre.devices.folder_device.driver import FOLDER_DEVICE_FOR_CONFIG
|
||||||
from calibre.devices.kobo.driver import KOBO
|
from calibre.devices.kobo.driver import KOBO
|
||||||
|
|
||||||
@ -547,6 +547,7 @@ plugins += [
|
|||||||
SHINEBOOK,
|
SHINEBOOK,
|
||||||
POCKETBOOK360,
|
POCKETBOOK360,
|
||||||
POCKETBOOK301,
|
POCKETBOOK301,
|
||||||
|
POCKETBOOK602,
|
||||||
KINDLE,
|
KINDLE,
|
||||||
KINDLE2,
|
KINDLE2,
|
||||||
KINDLE_DX,
|
KINDLE_DX,
|
||||||
@ -599,6 +600,7 @@ plugins += [
|
|||||||
GEMEI,
|
GEMEI,
|
||||||
VELOCITYMICRO,
|
VELOCITYMICRO,
|
||||||
PDNOVEL_KOBO,
|
PDNOVEL_KOBO,
|
||||||
|
LUMIREAD,
|
||||||
ITUNES,
|
ITUNES,
|
||||||
]
|
]
|
||||||
plugins += [x for x in list(locals().values()) if isinstance(x, type) and \
|
plugins += [x for x in list(locals().values()) if isinstance(x, type) and \
|
||||||
|
@ -227,4 +227,22 @@ class POCKETBOOK301(USBMS):
|
|||||||
PRODUCT_ID = [0x301]
|
PRODUCT_ID = [0x301]
|
||||||
BCD = [0x132]
|
BCD = [0x132]
|
||||||
|
|
||||||
|
class POCKETBOOK602(USBMS):
|
||||||
|
|
||||||
|
name = 'PocketBook Pro 602 Device Interface'
|
||||||
|
description = _('Communicate with the PocketBook 602 reader.')
|
||||||
|
author = 'Kovid Goyal'
|
||||||
|
supported_platforms = ['windows', 'osx', 'linux']
|
||||||
|
FORMATS = ['epub', 'fb2', 'prc', 'mobi', 'pdf', 'djvu', 'rtf', 'chm',
|
||||||
|
'doc', 'tcr', 'txt']
|
||||||
|
|
||||||
|
EBOOK_DIR_MAIN = 'books'
|
||||||
|
SUPPORTS_SUB_DIRS = True
|
||||||
|
|
||||||
|
VENDOR_ID = [0x0525]
|
||||||
|
PRODUCT_ID = [0xa4a5]
|
||||||
|
BCD = [0x0324]
|
||||||
|
|
||||||
|
VENDOR_NAME = ''
|
||||||
|
WINDOWS_MAIN_MEM = WINDOWS_CARD_A_MEM = 'PB602'
|
||||||
|
|
||||||
|
@ -174,3 +174,33 @@ class GEMEI(USBMS):
|
|||||||
EBOOK_DIR_MAIN = 'eBooks'
|
EBOOK_DIR_MAIN = 'eBooks'
|
||||||
SUPPORTS_SUB_DIRS = True
|
SUPPORTS_SUB_DIRS = True
|
||||||
|
|
||||||
|
class LUMIREAD(USBMS):
|
||||||
|
name = 'Acer Lumiread Device Interface'
|
||||||
|
gui_name = 'Lumiread'
|
||||||
|
description = _('Communicate with the Acer Lumiread')
|
||||||
|
author = 'Kovid Goyal'
|
||||||
|
supported_platforms = ['windows', 'osx', 'linux']
|
||||||
|
|
||||||
|
# Ordered list of supported formats
|
||||||
|
FORMATS = ['epub', 'pdf', 'mobi', 'chm', 'txt', 'doc', 'docx', 'rtf']
|
||||||
|
|
||||||
|
VENDOR_ID = [0x1025]
|
||||||
|
PRODUCT_ID = [0x048d]
|
||||||
|
BCD = [0x323]
|
||||||
|
|
||||||
|
EBOOK_DIR_MAIN = EBOOK_DIR_CARD_A = 'books'
|
||||||
|
SUPPORTS_SUB_DIRS = True
|
||||||
|
|
||||||
|
THUMBNAIL_HEIGHT = 200
|
||||||
|
|
||||||
|
def upload_cover(self, path, filename, metadata, filepath):
|
||||||
|
if metadata.thumbnail and metadata.thumbnail[-1]:
|
||||||
|
cfilepath = filepath.replace('/', os.sep)
|
||||||
|
cfilepath = cfilepath.replace(os.sep+'books'+os.sep,
|
||||||
|
os.sep+'covers'+os.sep, 1)
|
||||||
|
pdir = os.path.dirname(cfilepath)
|
||||||
|
if not os.exists(pdir):
|
||||||
|
os.makedirs(pdir)
|
||||||
|
with open(cfilepath+'.jpg', 'wb') as f:
|
||||||
|
f.write(metadata.thumbnail[-1])
|
||||||
|
|
||||||
|
9
src/calibre/ebooks/iterator/__init__.py
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
|
||||||
|
|
||||||
|
__license__ = 'GPL v3'
|
||||||
|
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||||
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -475,7 +475,14 @@ class MobiReader(object):
|
|||||||
self.processed_html = self.processed_html.replace('\r\n', '\n')
|
self.processed_html = self.processed_html.replace('\r\n', '\n')
|
||||||
self.processed_html = self.processed_html.replace('> <', '>\n<')
|
self.processed_html = self.processed_html.replace('> <', '>\n<')
|
||||||
self.processed_html = self.processed_html.replace('<mbp: ', '<mbp:')
|
self.processed_html = self.processed_html.replace('<mbp: ', '<mbp:')
|
||||||
self.processed_html = re.sub(r'<?xml[^>]*>', '', self.processed_html)
|
self.processed_html = re.sub(r'<\?xml[^>]*>', '', self.processed_html)
|
||||||
|
# Swap inline and block level elements, and order block level elements according to priority
|
||||||
|
# - lxml and beautifulsoup expect/assume a specific order based on xhtml spec
|
||||||
|
self.processed_html = re.sub(r'(?i)(?P<styletags>(<(h\d+|i|b|u|em|small|big|strong|tt)>\s*){1,})(?P<para><p[^>]*>)', '\g<para>'+'\g<styletags>', self.processed_html)
|
||||||
|
self.processed_html = re.sub(r'(?i)(?P<para></p[^>]*>)\s*(?P<styletags>(</(h\d+|i|b|u|em|small|big|strong|tt)>\s*){1,})', '\g<styletags>'+'\g<para>', self.processed_html)
|
||||||
|
self.processed_html = re.sub(r'(?i)(?P<blockquote>(</blockquote[^>]*>\s*){1,})(?P<para></p[^>]*>)', '\g<para>'+'\g<blockquote>', self.processed_html)
|
||||||
|
self.processed_html = re.sub(r'(?i)(?P<para><p[^>]*>)\s*(?P<blockquote>(<blockquote[^>]*>\s*){1,})', '\g<blockquote>'+'\g<para>', self.processed_html)
|
||||||
|
|
||||||
|
|
||||||
def remove_random_bytes(self, html):
|
def remove_random_bytes(self, html):
|
||||||
return re.sub('\x14|\x15|\x19|\x1c|\x1d|\xef|\x12|\x13|\xec|\x08',
|
return re.sub('\x14|\x15|\x19|\x1c|\x1d|\xef|\x12|\x13|\xec|\x08',
|
||||||
|
@ -55,18 +55,31 @@ class SVGRasterizer(object):
|
|||||||
self.rasterize_cover()
|
self.rasterize_cover()
|
||||||
|
|
||||||
def rasterize_svg(self, elem, width=0, height=0, format='PNG'):
|
def rasterize_svg(self, elem, width=0, height=0, format='PNG'):
|
||||||
|
view_box = elem.get('viewBox', elem.get('viewbox', None))
|
||||||
|
sizes = None
|
||||||
|
logger = self.oeb.logger
|
||||||
|
|
||||||
|
if view_box is not None:
|
||||||
|
box = [float(x) for x in view_box.split()]
|
||||||
|
sizes = [box[2]-box[0], box[3] - box[1]]
|
||||||
|
for image in elem.xpath('descendant::*[local-name()="image" and '
|
||||||
|
'@height and contains(@height, "%")]'):
|
||||||
|
logger.info('Found SVG image height in %, trying to convert...')
|
||||||
|
try:
|
||||||
|
h = float(image.get('height').replace('%', ''))/100.
|
||||||
|
image.set('height', str(h*sizes[1]))
|
||||||
|
except:
|
||||||
|
logger.exception('Failed to convert percentage height:',
|
||||||
|
image.get('height'))
|
||||||
|
|
||||||
data = QByteArray(xml2str(elem, with_tail=False))
|
data = QByteArray(xml2str(elem, with_tail=False))
|
||||||
svg = QSvgRenderer(data)
|
svg = QSvgRenderer(data)
|
||||||
size = svg.defaultSize()
|
size = svg.defaultSize()
|
||||||
view_box = elem.get('viewBox', elem.get('viewbox', None))
|
if size.width() == 100 and size.height() == 100 and sizes:
|
||||||
if size.width() == 100 and size.height() == 100 \
|
size.setWidth(sizes[0])
|
||||||
and view_box is not None:
|
size.setHeight(sizes[1])
|
||||||
box = [float(x) for x in view_box.split()]
|
|
||||||
size.setWidth(box[2] - box[0])
|
|
||||||
size.setHeight(box[3] - box[1])
|
|
||||||
if width or height:
|
if width or height:
|
||||||
size.scale(width, height, Qt.KeepAspectRatio)
|
size.scale(width, height, Qt.KeepAspectRatio)
|
||||||
logger = self.oeb.logger
|
|
||||||
logger.info('Rasterizing %r to %dx%d'
|
logger.info('Rasterizing %r to %dx%d'
|
||||||
% (elem, size.width(), size.height()))
|
% (elem, size.width(), size.height()))
|
||||||
image = QImage(size, QImage.Format_ARGB32_Premultiplied)
|
image = QImage(size, QImage.Format_ARGB32_Premultiplied)
|
||||||
|
@ -81,7 +81,9 @@ def txt2rtf(text):
|
|||||||
buf = cStringIO.StringIO()
|
buf = cStringIO.StringIO()
|
||||||
for x in text:
|
for x in text:
|
||||||
val = ord(x)
|
val = ord(x)
|
||||||
if val <= 127:
|
if val == 160:
|
||||||
|
buf.write('\\~')
|
||||||
|
elif val <= 127:
|
||||||
buf.write(x)
|
buf.write(x)
|
||||||
else:
|
else:
|
||||||
repl = ascii_text(x)
|
repl = ascii_text(x)
|
||||||
@ -191,6 +193,10 @@ class RTFMLizer(object):
|
|||||||
def dump_text(self, elem, stylizer, tag_stack=[]):
|
def dump_text(self, elem, stylizer, tag_stack=[]):
|
||||||
if not isinstance(elem.tag, basestring) \
|
if not isinstance(elem.tag, basestring) \
|
||||||
or namespace(elem.tag) != XHTML_NS:
|
or namespace(elem.tag) != XHTML_NS:
|
||||||
|
p = elem.getparent()
|
||||||
|
if p is not None and isinstance(p.tag, basestring) and namespace(p.tag) == XHTML_NS \
|
||||||
|
and elem.tail:
|
||||||
|
return elem.tail
|
||||||
return u''
|
return u''
|
||||||
|
|
||||||
text = u''
|
text = u''
|
||||||
|
@ -155,6 +155,10 @@ class TXTMLizer(object):
|
|||||||
|
|
||||||
if not isinstance(elem.tag, basestring) \
|
if not isinstance(elem.tag, basestring) \
|
||||||
or namespace(elem.tag) != XHTML_NS:
|
or namespace(elem.tag) != XHTML_NS:
|
||||||
|
p = elem.getparent()
|
||||||
|
if p is not None and isinstance(p.tag, basestring) and namespace(p.tag) == XHTML_NS \
|
||||||
|
and elem.tail:
|
||||||
|
return [elem.tail]
|
||||||
return ['']
|
return ['']
|
||||||
|
|
||||||
text = ['']
|
text = ['']
|
||||||
|
@ -89,14 +89,18 @@ class AddAction(InterfaceAction):
|
|||||||
self.gui.library_view.model().db.import_book(MetaInformation(None), [])
|
self.gui.library_view.model().db.import_book(MetaInformation(None), [])
|
||||||
self.gui.library_view.model().books_added(num)
|
self.gui.library_view.model().books_added(num)
|
||||||
|
|
||||||
def add_isbns(self, isbns):
|
def add_isbns(self, books):
|
||||||
from calibre.ebooks.metadata import MetaInformation
|
from calibre.ebooks.metadata import MetaInformation
|
||||||
ids = set([])
|
ids = set([])
|
||||||
for x in isbns:
|
for x in books:
|
||||||
mi = MetaInformation(None)
|
mi = MetaInformation(None)
|
||||||
mi.isbn = x
|
mi.isbn = x['isbn']
|
||||||
ids.add(self.gui.library_view.model().db.import_book(mi, []))
|
db = self.gui.library_view.model().db
|
||||||
self.gui.library_view.model().books_added(len(isbns))
|
if x['path'] is not None:
|
||||||
|
ids.add(db.import_book(mi, [x['path']]))
|
||||||
|
else:
|
||||||
|
ids.add(db.import_book(mi, []))
|
||||||
|
self.gui.library_view.model().books_added(len(books))
|
||||||
self.gui.iactions['Edit Metadata'].do_download_metadata(ids)
|
self.gui.iactions['Edit Metadata'].do_download_metadata(ids)
|
||||||
|
|
||||||
|
|
||||||
@ -150,7 +154,7 @@ class AddAction(InterfaceAction):
|
|||||||
from calibre.gui2.dialogs.add_from_isbn import AddFromISBN
|
from calibre.gui2.dialogs.add_from_isbn import AddFromISBN
|
||||||
d = AddFromISBN(self.gui)
|
d = AddFromISBN(self.gui)
|
||||||
if d.exec_() == d.Accepted:
|
if d.exec_() == d.Accepted:
|
||||||
self.add_isbns(d.isbns)
|
self.add_isbns(d.books)
|
||||||
|
|
||||||
def add_books(self, *args):
|
def add_books(self, *args):
|
||||||
'''
|
'''
|
||||||
|
@ -165,6 +165,11 @@ class ConvertAction(InterfaceAction):
|
|||||||
if job.failed:
|
if job.failed:
|
||||||
self.gui.job_exception(job)
|
self.gui.job_exception(job)
|
||||||
return
|
return
|
||||||
|
fmtf = temp_files[-1].name
|
||||||
|
if os.stat(fmtf).st_size < 1:
|
||||||
|
raise Exception(_('Empty output file, '
|
||||||
|
'probably the conversion process crashed'))
|
||||||
|
|
||||||
data = open(temp_files[-1].name, 'rb')
|
data = open(temp_files[-1].name, 'rb')
|
||||||
self.gui.library_view.model().db.add_format(book_id, \
|
self.gui.library_view.model().db.add_format(book_id, \
|
||||||
fmt, data, index_is_id=True)
|
fmt, data, index_is_id=True)
|
||||||
|
@ -3,11 +3,8 @@ __license__ = 'GPL v3'
|
|||||||
__copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
|
__copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||||
|
|
||||||
# Imports {{{
|
# Imports {{{
|
||||||
import os, traceback, Queue, time, socket, cStringIO, re, sys
|
import os, traceback, Queue, time, cStringIO, re, sys
|
||||||
from threading import Thread, RLock
|
from threading import Thread
|
||||||
from itertools import repeat
|
|
||||||
from functools import partial
|
|
||||||
from binascii import unhexlify
|
|
||||||
|
|
||||||
from PyQt4.Qt import QMenu, QAction, QActionGroup, QIcon, SIGNAL, \
|
from PyQt4.Qt import QMenu, QAction, QActionGroup, QIcon, SIGNAL, \
|
||||||
Qt, pyqtSignal, QDialog, QMessageBox
|
Qt, pyqtSignal, QDialog, QMessageBox
|
||||||
@ -25,8 +22,6 @@ from calibre.ebooks.metadata import authors_to_string
|
|||||||
from calibre import preferred_encoding, prints, force_unicode
|
from calibre import preferred_encoding, prints, force_unicode
|
||||||
from calibre.utils.filenames import ascii_filename
|
from calibre.utils.filenames import ascii_filename
|
||||||
from calibre.devices.errors import FreeSpaceError
|
from calibre.devices.errors import FreeSpaceError
|
||||||
from calibre.utils.smtp import compose_mail, sendmail, extract_email_address, \
|
|
||||||
config as email_config
|
|
||||||
from calibre.devices.apple.driver import ITUNES_ASYNC
|
from calibre.devices.apple.driver import ITUNES_ASYNC
|
||||||
from calibre.devices.folder_device.driver import FOLDER_DEVICE
|
from calibre.devices.folder_device.driver import FOLDER_DEVICE
|
||||||
from calibre.ebooks.metadata.meta import set_metadata
|
from calibre.ebooks.metadata.meta import set_metadata
|
||||||
@ -591,64 +586,6 @@ class DeviceMenu(QMenu): # {{{
|
|||||||
|
|
||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
class Emailer(Thread): # {{{
|
|
||||||
|
|
||||||
def __init__(self, timeout=60):
|
|
||||||
Thread.__init__(self)
|
|
||||||
self.setDaemon(True)
|
|
||||||
self.job_lock = RLock()
|
|
||||||
self.jobs = []
|
|
||||||
self._run = True
|
|
||||||
self.timeout = timeout
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
while self._run:
|
|
||||||
job = None
|
|
||||||
with self.job_lock:
|
|
||||||
if self.jobs:
|
|
||||||
job = self.jobs[0]
|
|
||||||
self.jobs = self.jobs[1:]
|
|
||||||
if job is not None:
|
|
||||||
self._send_mails(*job)
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
self._run = False
|
|
||||||
|
|
||||||
def send_mails(self, jobnames, callback, attachments, to_s, subjects,
|
|
||||||
texts, attachment_names):
|
|
||||||
job = (jobnames, callback, attachments, to_s, subjects, texts,
|
|
||||||
attachment_names)
|
|
||||||
with self.job_lock:
|
|
||||||
self.jobs.append(job)
|
|
||||||
|
|
||||||
def _send_mails(self, jobnames, callback, attachments,
|
|
||||||
to_s, subjects, texts, attachment_names):
|
|
||||||
opts = email_config().parse()
|
|
||||||
opts.verbose = 3 if os.environ.get('CALIBRE_DEBUG_EMAIL', False) else 0
|
|
||||||
from_ = opts.from_
|
|
||||||
if not from_:
|
|
||||||
from_ = 'calibre <calibre@'+socket.getfqdn()+'>'
|
|
||||||
results = []
|
|
||||||
for i, jobname in enumerate(jobnames):
|
|
||||||
try:
|
|
||||||
msg = compose_mail(from_, to_s[i], texts[i], subjects[i],
|
|
||||||
open(attachments[i], 'rb'),
|
|
||||||
attachment_name = attachment_names[i])
|
|
||||||
efrom, eto = map(extract_email_address, (from_, to_s[i]))
|
|
||||||
eto = [eto]
|
|
||||||
sendmail(msg, efrom, eto, localhost=None,
|
|
||||||
verbose=opts.verbose,
|
|
||||||
timeout=self.timeout, relay=opts.relay_host,
|
|
||||||
username=opts.relay_username,
|
|
||||||
password=unhexlify(opts.relay_password), port=opts.relay_port,
|
|
||||||
encryption=opts.encryption)
|
|
||||||
results.append([jobname, None, None])
|
|
||||||
except Exception, e:
|
|
||||||
results.append([jobname, e, traceback.format_exc()])
|
|
||||||
callback(results)
|
|
||||||
|
|
||||||
# }}}
|
|
||||||
|
|
||||||
class DeviceMixin(object): # {{{
|
class DeviceMixin(object): # {{{
|
||||||
|
|
||||||
@ -656,8 +593,6 @@ class DeviceMixin(object): # {{{
|
|||||||
self.device_error_dialog = error_dialog(self, _('Error'),
|
self.device_error_dialog = error_dialog(self, _('Error'),
|
||||||
_('Error communicating with device'), ' ')
|
_('Error communicating with device'), ' ')
|
||||||
self.device_error_dialog.setModal(Qt.NonModal)
|
self.device_error_dialog.setModal(Qt.NonModal)
|
||||||
self.emailer = Emailer()
|
|
||||||
self.emailer.start()
|
|
||||||
self.device_manager = DeviceManager(Dispatcher(self.device_detected),
|
self.device_manager = DeviceManager(Dispatcher(self.device_detected),
|
||||||
self.job_manager, Dispatcher(self.status_bar.show_message))
|
self.job_manager, Dispatcher(self.status_bar.show_message))
|
||||||
self.device_manager.start()
|
self.device_manager.start()
|
||||||
@ -911,124 +846,6 @@ class DeviceMixin(object): # {{{
|
|||||||
fmts = [x.strip().lower() for x in fmts.split(',')]
|
fmts = [x.strip().lower() for x in fmts.split(',')]
|
||||||
self.send_by_mail(to, fmts, delete)
|
self.send_by_mail(to, fmts, delete)
|
||||||
|
|
||||||
def send_by_mail(self, to, fmts, delete_from_library, send_ids=None,
|
|
||||||
do_auto_convert=True, specific_format=None):
|
|
||||||
ids = [self.library_view.model().id(r) for r in self.library_view.selectionModel().selectedRows()] if send_ids is None else send_ids
|
|
||||||
if not ids or len(ids) == 0:
|
|
||||||
return
|
|
||||||
files, _auto_ids = self.library_view.model().get_preferred_formats_from_ids(ids,
|
|
||||||
fmts, set_metadata=True,
|
|
||||||
specific_format=specific_format,
|
|
||||||
exclude_auto=do_auto_convert)
|
|
||||||
if do_auto_convert:
|
|
||||||
nids = list(set(ids).difference(_auto_ids))
|
|
||||||
ids = [i for i in ids if i in nids]
|
|
||||||
else:
|
|
||||||
_auto_ids = []
|
|
||||||
|
|
||||||
full_metadata = self.library_view.model().metadata_for(ids)
|
|
||||||
|
|
||||||
bad, remove_ids, jobnames = [], [], []
|
|
||||||
texts, subjects, attachments, attachment_names = [], [], [], []
|
|
||||||
for f, mi, id in zip(files, full_metadata, ids):
|
|
||||||
t = mi.title
|
|
||||||
if not t:
|
|
||||||
t = _('Unknown')
|
|
||||||
if f is None:
|
|
||||||
bad.append(t)
|
|
||||||
else:
|
|
||||||
remove_ids.append(id)
|
|
||||||
jobnames.append(u'%s:%s'%(id, t))
|
|
||||||
attachments.append(f)
|
|
||||||
subjects.append(_('E-book:')+ ' '+t)
|
|
||||||
a = authors_to_string(mi.authors if mi.authors else \
|
|
||||||
[_('Unknown')])
|
|
||||||
texts.append(_('Attached, you will find the e-book') + \
|
|
||||||
'\n\n' + t + '\n\t' + _('by') + ' ' + a + '\n\n' + \
|
|
||||||
_('in the %s format.') %
|
|
||||||
os.path.splitext(f)[1][1:].upper())
|
|
||||||
prefix = ascii_filename(t+' - '+a)
|
|
||||||
if not isinstance(prefix, unicode):
|
|
||||||
prefix = prefix.decode(preferred_encoding, 'replace')
|
|
||||||
attachment_names.append(prefix + os.path.splitext(f)[1])
|
|
||||||
remove = remove_ids if delete_from_library else []
|
|
||||||
|
|
||||||
to_s = list(repeat(to, len(attachments)))
|
|
||||||
if attachments:
|
|
||||||
self.emailer.send_mails(jobnames,
|
|
||||||
Dispatcher(partial(self.emails_sent, remove=remove)),
|
|
||||||
attachments, to_s, subjects, texts, attachment_names)
|
|
||||||
self.status_bar.show_message(_('Sending email to')+' '+to, 3000)
|
|
||||||
|
|
||||||
auto = []
|
|
||||||
if _auto_ids != []:
|
|
||||||
for id in _auto_ids:
|
|
||||||
if specific_format == None:
|
|
||||||
formats = [f.lower() for f in self.library_view.model().db.formats(id, index_is_id=True).split(',')]
|
|
||||||
formats = formats if formats != None else []
|
|
||||||
if list(set(formats).intersection(available_input_formats())) != [] and list(set(fmts).intersection(available_output_formats())) != []:
|
|
||||||
auto.append(id)
|
|
||||||
else:
|
|
||||||
bad.append(self.library_view.model().db.title(id, index_is_id=True))
|
|
||||||
else:
|
|
||||||
if specific_format in list(set(fmts).intersection(set(available_output_formats()))):
|
|
||||||
auto.append(id)
|
|
||||||
else:
|
|
||||||
bad.append(self.library_view.model().db.title(id, index_is_id=True))
|
|
||||||
|
|
||||||
if auto != []:
|
|
||||||
format = specific_format if specific_format in list(set(fmts).intersection(set(available_output_formats()))) else None
|
|
||||||
if not format:
|
|
||||||
for fmt in fmts:
|
|
||||||
if fmt in list(set(fmts).intersection(set(available_output_formats()))):
|
|
||||||
format = fmt
|
|
||||||
break
|
|
||||||
if format is None:
|
|
||||||
bad += auto
|
|
||||||
else:
|
|
||||||
autos = [self.library_view.model().db.title(id, index_is_id=True) for id in auto]
|
|
||||||
if self.auto_convert_question(
|
|
||||||
_('Auto convert the following books before sending via '
|
|
||||||
'email?'), autos):
|
|
||||||
self.iactions['Convert Books'].auto_convert_mail(to, fmts, delete_from_library, auto, format)
|
|
||||||
|
|
||||||
if bad:
|
|
||||||
bad = '\n'.join('%s'%(i,) for i in bad)
|
|
||||||
d = warning_dialog(self, _('No suitable formats'),
|
|
||||||
_('Could not email the following books '
|
|
||||||
'as no suitable formats were found:'), bad)
|
|
||||||
d.exec_()
|
|
||||||
|
|
||||||
def emails_sent(self, results, remove=[]):
|
|
||||||
errors, good = [], []
|
|
||||||
for jobname, exception, tb in results:
|
|
||||||
title = jobname.partition(':')[-1]
|
|
||||||
if exception is not None:
|
|
||||||
errors.append(list(map(force_unicode, [title, exception, tb])))
|
|
||||||
else:
|
|
||||||
good.append(title)
|
|
||||||
if errors:
|
|
||||||
errors = u'\n'.join([
|
|
||||||
u'%s\n\n%s\n%s\n' %
|
|
||||||
(title, e, tb) for \
|
|
||||||
title, e, tb in errors
|
|
||||||
])
|
|
||||||
error_dialog(self, _('Failed to email books'),
|
|
||||||
_('Failed to email the following books:'),
|
|
||||||
'%s'%errors, show=True
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.status_bar.show_message(_('Sent by email:') + ', '.join(good),
|
|
||||||
5000)
|
|
||||||
if remove:
|
|
||||||
try:
|
|
||||||
self.library_view.model().delete_books_by_id(remove)
|
|
||||||
except:
|
|
||||||
# Probably the user deleted the files, in any case, failing
|
|
||||||
# to delete the book is not catastrophic
|
|
||||||
traceback.print_exc()
|
|
||||||
|
|
||||||
|
|
||||||
def cover_to_thumbnail(self, data):
|
def cover_to_thumbnail(self, data):
|
||||||
ht = self.device_manager.device.THUMBNAIL_HEIGHT \
|
ht = self.device_manager.device.THUMBNAIL_HEIGHT \
|
||||||
if self.device_manager else DevicePlugin.THUMBNAIL_HEIGHT
|
if self.device_manager else DevicePlugin.THUMBNAIL_HEIGHT
|
||||||
@ -1037,36 +854,6 @@ class DeviceMixin(object): # {{{
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def email_news(self, id):
|
|
||||||
opts = email_config().parse()
|
|
||||||
accounts = [(account, [x.strip().lower() for x in x[0].split(',')])
|
|
||||||
for account, x in opts.accounts.items() if x[1]]
|
|
||||||
sent_mails = []
|
|
||||||
for account, fmts in accounts:
|
|
||||||
files, auto = self.library_view.model().\
|
|
||||||
get_preferred_formats_from_ids([id], fmts)
|
|
||||||
files = [f for f in files if f is not None]
|
|
||||||
if not files:
|
|
||||||
continue
|
|
||||||
attachment = files[0]
|
|
||||||
mi = self.library_view.model().db.get_metadata(id,
|
|
||||||
index_is_id=True)
|
|
||||||
to_s = [account]
|
|
||||||
subjects = [_('News:')+' '+mi.title]
|
|
||||||
texts = [_('Attached is the')+' '+mi.title]
|
|
||||||
attachment_names = [ascii_filename(mi.title)+os.path.splitext(attachment)[1]]
|
|
||||||
attachments = [attachment]
|
|
||||||
jobnames = ['%s:%s'%(id, mi.title)]
|
|
||||||
remove = [id] if config['delete_news_from_library_on_upload']\
|
|
||||||
else []
|
|
||||||
self.emailer.send_mails(jobnames,
|
|
||||||
Dispatcher(partial(self.emails_sent, remove=remove)),
|
|
||||||
attachments, to_s, subjects, texts, attachment_names)
|
|
||||||
sent_mails.append(to_s[0])
|
|
||||||
if sent_mails:
|
|
||||||
self.status_bar.show_message(_('Sent news to')+' '+\
|
|
||||||
', '.join(sent_mails), 3000)
|
|
||||||
|
|
||||||
def sync_catalogs(self, send_ids=None, do_auto_convert=True):
|
def sync_catalogs(self, send_ids=None, do_auto_convert=True):
|
||||||
if self.device_connected:
|
if self.device_connected:
|
||||||
settings = self.device_manager.device.settings()
|
settings = self.device_manager.device.settings()
|
||||||
|
@ -5,10 +5,13 @@ __license__ = 'GPL v3'
|
|||||||
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||||
__docformat__ = 'restructuredtext en'
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
from PyQt4.Qt import QDialog, QApplication
|
from PyQt4.Qt import QDialog, QApplication
|
||||||
|
|
||||||
from calibre.gui2.dialogs.add_from_isbn_ui import Ui_Dialog
|
from calibre.gui2.dialogs.add_from_isbn_ui import Ui_Dialog
|
||||||
from calibre.ebooks.metadata import check_isbn
|
from calibre.ebooks.metadata import check_isbn
|
||||||
|
from calibre.constants import iswindows
|
||||||
|
|
||||||
class AddFromISBN(QDialog, Ui_Dialog):
|
class AddFromISBN(QDialog, Ui_Dialog):
|
||||||
|
|
||||||
@ -16,7 +19,12 @@ class AddFromISBN(QDialog, Ui_Dialog):
|
|||||||
QDialog.__init__(self, parent)
|
QDialog.__init__(self, parent)
|
||||||
self.setupUi(self)
|
self.setupUi(self)
|
||||||
|
|
||||||
|
path = r'C:\Users\kovid\e-books\some_book.epub' if iswindows else \
|
||||||
|
'/Users/kovid/e-books/some_book.epub'
|
||||||
|
self.label.setText(unicode(self.label.text())%path)
|
||||||
|
|
||||||
self.isbns = []
|
self.isbns = []
|
||||||
|
self.books = []
|
||||||
self.paste_button.clicked.connect(self.paste)
|
self.paste_button.clicked.connect(self.paste)
|
||||||
|
|
||||||
def paste(self, *args):
|
def paste(self, *args):
|
||||||
@ -30,11 +38,24 @@ class AddFromISBN(QDialog, Ui_Dialog):
|
|||||||
|
|
||||||
def accept(self, *args):
|
def accept(self, *args):
|
||||||
for line in unicode(self.isbn_box.toPlainText()).strip().splitlines():
|
for line in unicode(self.isbn_box.toPlainText()).strip().splitlines():
|
||||||
if line:
|
line = line.strip()
|
||||||
isbn = check_isbn(line)
|
if not line:
|
||||||
|
continue
|
||||||
|
parts = line.split('>>')
|
||||||
|
if len(parts) > 2:
|
||||||
|
parts = [parts[0] + '>>'.join(parts[1:])]
|
||||||
|
parts = [x.strip() for x in parts]
|
||||||
|
if not parts[0]:
|
||||||
|
continue
|
||||||
|
isbn = check_isbn(parts[0])
|
||||||
if isbn is not None:
|
if isbn is not None:
|
||||||
isbn = isbn.upper()
|
isbn = isbn.upper()
|
||||||
if isbn not in self.isbns:
|
if isbn not in self.isbns:
|
||||||
self.isbns.append(isbn)
|
self.isbns.append(isbn)
|
||||||
|
book = {'isbn': isbn, 'path': None}
|
||||||
|
if len(parts) > 1 and parts[1] and \
|
||||||
|
os.access(parts[1], os.R_OK) and os.path.isfile(parts[1]):
|
||||||
|
book['path'] = parts[1]
|
||||||
|
self.books.append(book)
|
||||||
QDialog.accept(self, *args)
|
QDialog.accept(self, *args)
|
||||||
|
|
||||||
|
@ -24,7 +24,10 @@
|
|||||||
<item row="0" column="1">
|
<item row="0" column="1">
|
||||||
<widget class="QLabel" name="label">
|
<widget class="QLabel" name="label">
|
||||||
<property name="text">
|
<property name="text">
|
||||||
<string><p>Enter a list of ISBNs in the box to the left, one per line. calibre will automatically create entries for books based on the ISBN and download metadata and covers for them.<p>Any invalid ISBNs in the list will be ignored.</string>
|
<string><p>Enter a list of ISBNs in the box to the left, one per line. calibre will automatically create entries for books based on the ISBN and download metadata and covers for them.</p>
|
||||||
|
<p>Any invalid ISBNs in the list will be ignored.</p>
|
||||||
|
<p>You can also specify a file that will be added with each ISBN. To do this enter the full path to the file after a <code>>></code>. For example:</p>
|
||||||
|
<p><code>9788842915232 >> %s</code></p></string>
|
||||||
</property>
|
</property>
|
||||||
<property name="wordWrap">
|
<property name="wordWrap">
|
||||||
<bool>true</bool>
|
<bool>true</bool>
|
||||||
|
333
src/calibre/gui2/email.py
Normal file
@ -0,0 +1,333 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
__license__ = 'GPL v3'
|
||||||
|
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||||
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
|
import os, socket, time, cStringIO
|
||||||
|
from threading import Thread
|
||||||
|
from Queue import Queue
|
||||||
|
from binascii import unhexlify
|
||||||
|
from functools import partial
|
||||||
|
from itertools import repeat
|
||||||
|
|
||||||
|
from calibre.utils.smtp import compose_mail, sendmail, extract_email_address, \
|
||||||
|
config as email_config
|
||||||
|
from calibre.utils.filenames import ascii_filename
|
||||||
|
from calibre.utils.ipc.job import BaseJob
|
||||||
|
from calibre.ptempfile import PersistentTemporaryFile
|
||||||
|
from calibre.customize.ui import available_input_formats, available_output_formats
|
||||||
|
from calibre.ebooks.metadata import authors_to_string
|
||||||
|
from calibre.constants import preferred_encoding
|
||||||
|
from calibre.gui2 import config, Dispatcher, warning_dialog
|
||||||
|
|
||||||
|
class EmailJob(BaseJob): # {{{
|
||||||
|
|
||||||
|
def __init__(self, callback, description, attachment, aname, to, subject, text, job_manager):
|
||||||
|
BaseJob.__init__(self, description)
|
||||||
|
self.exception = None
|
||||||
|
self.job_manager = job_manager
|
||||||
|
self.email_args = (attachment, aname, to, subject, text)
|
||||||
|
self.email_sent_callback = callback
|
||||||
|
self.log_path = None
|
||||||
|
self._log_file = cStringIO.StringIO()
|
||||||
|
self._log_file.write(self.description.encode('utf-8') + '\n')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def log_file(self):
|
||||||
|
if self.log_path is not None:
|
||||||
|
return open(self.log_path, 'rb')
|
||||||
|
return cStringIO.StringIO(self._log_file.getvalue())
|
||||||
|
|
||||||
|
def start_work(self):
|
||||||
|
self.start_time = time.time()
|
||||||
|
self.job_manager.changed_queue.put(self)
|
||||||
|
|
||||||
|
def job_done(self):
|
||||||
|
self.duration = time.time() - self.start_time
|
||||||
|
self.percent = 1
|
||||||
|
# Dump log onto disk
|
||||||
|
lf = PersistentTemporaryFile('email_log')
|
||||||
|
lf.write(self._log_file.getvalue())
|
||||||
|
lf.close()
|
||||||
|
self.log_path = lf.name
|
||||||
|
self._log_file.close()
|
||||||
|
self._log_file = None
|
||||||
|
|
||||||
|
self.job_manager.changed_queue.put(self)
|
||||||
|
|
||||||
|
def log_write(self, what):
|
||||||
|
self._log_file.write(what)
|
||||||
|
|
||||||
|
# }}}
|
||||||
|
|
||||||
|
class Emailer(Thread): # {{{
|
||||||
|
|
||||||
|
MAX_RETRIES = 1
|
||||||
|
|
||||||
|
def __init__(self, job_manager):
|
||||||
|
Thread.__init__(self)
|
||||||
|
self.daemon = True
|
||||||
|
self.jobs = Queue()
|
||||||
|
self.job_manager = job_manager
|
||||||
|
self._run = True
|
||||||
|
self.calculate_rate_limit()
|
||||||
|
|
||||||
|
self.last_send_time = time.time() - self.rate_limit
|
||||||
|
|
||||||
|
def calculate_rate_limit(self):
|
||||||
|
self.rate_limit = 1
|
||||||
|
opts = email_config().parse()
|
||||||
|
rh = opts.relay_host
|
||||||
|
if rh and (
|
||||||
|
'gmail.com' in rh or 'live.com' in rh):
|
||||||
|
self.rate_limit = 301
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self._run = False
|
||||||
|
self.jobs.put(None)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
while self._run:
|
||||||
|
try:
|
||||||
|
job = self.jobs.get()
|
||||||
|
except:
|
||||||
|
break
|
||||||
|
if job is None or not self._run:
|
||||||
|
break
|
||||||
|
try_count = 0
|
||||||
|
failed, exc = False, None
|
||||||
|
job.start_work()
|
||||||
|
if job.kill_on_start:
|
||||||
|
job.log_write('Aborted\n')
|
||||||
|
job.failed = failed
|
||||||
|
job.killed = True
|
||||||
|
job.job_done()
|
||||||
|
continue
|
||||||
|
|
||||||
|
while try_count <= self.MAX_RETRIES:
|
||||||
|
failed = False
|
||||||
|
if try_count > 0:
|
||||||
|
job.log_write('\nRetrying in %d seconds...\n' %
|
||||||
|
self.rate_limit)
|
||||||
|
try:
|
||||||
|
self.sendmail(job)
|
||||||
|
break
|
||||||
|
except Exception, e:
|
||||||
|
if not self._run:
|
||||||
|
return
|
||||||
|
import traceback
|
||||||
|
failed = True
|
||||||
|
exc = e
|
||||||
|
job.log_write('\nSending failed...\n')
|
||||||
|
job.log_write(traceback.format_exc())
|
||||||
|
|
||||||
|
try_count += 1
|
||||||
|
|
||||||
|
if not self._run:
|
||||||
|
break
|
||||||
|
|
||||||
|
job.failed = failed
|
||||||
|
job.exception = exc
|
||||||
|
job.job_done()
|
||||||
|
try:
|
||||||
|
job.email_sent_callback(job)
|
||||||
|
except:
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
|
def send_mails(self, jobnames, callback, attachments, to_s, subjects,
|
||||||
|
texts, attachment_names):
|
||||||
|
for name, attachment, to, subject, text, aname in zip(jobnames,
|
||||||
|
attachments, to_s, subjects, texts, attachment_names):
|
||||||
|
description = _('Email %s to %s') % (name, to)
|
||||||
|
job = EmailJob(callback, description, attachment, aname, to,
|
||||||
|
subject, text, self.job_manager)
|
||||||
|
self.job_manager.add_job(job)
|
||||||
|
self.jobs.put(job)
|
||||||
|
|
||||||
|
def sendmail(self, job):
|
||||||
|
while time.time() - self.last_send_time <= self.rate_limit:
|
||||||
|
time.sleep(1)
|
||||||
|
try:
|
||||||
|
opts = email_config().parse()
|
||||||
|
from_ = opts.from_
|
||||||
|
if not from_:
|
||||||
|
from_ = 'calibre <calibre@'+socket.getfqdn()+'>'
|
||||||
|
attachment, aname, to, subject, text = job.email_args
|
||||||
|
msg = compose_mail(from_, to, text, subject, open(attachment, 'rb'),
|
||||||
|
aname)
|
||||||
|
efrom, eto = map(extract_email_address, (from_, to))
|
||||||
|
eto = [eto]
|
||||||
|
sendmail(msg, efrom, eto, localhost=None,
|
||||||
|
verbose=1,
|
||||||
|
relay=opts.relay_host,
|
||||||
|
username=opts.relay_username,
|
||||||
|
password=unhexlify(opts.relay_password), port=opts.relay_port,
|
||||||
|
encryption=opts.encryption,
|
||||||
|
debug_output=partial(print, file=job._log_file))
|
||||||
|
finally:
|
||||||
|
self.last_send_time = time.time()
|
||||||
|
|
||||||
|
def email_news(self, mi, remove, get_fmts, done):
|
||||||
|
opts = email_config().parse()
|
||||||
|
accounts = [(account, [x.strip().lower() for x in x[0].split(',')])
|
||||||
|
for account, x in opts.accounts.items() if x[1]]
|
||||||
|
sent_mails = []
|
||||||
|
for i, x in enumerate(accounts):
|
||||||
|
account, fmts = x
|
||||||
|
files = get_fmts(fmts)
|
||||||
|
files = [f for f in files if f is not None]
|
||||||
|
if not files:
|
||||||
|
continue
|
||||||
|
attachment = files[0]
|
||||||
|
to_s = [account]
|
||||||
|
subjects = [_('News:')+' '+mi.title]
|
||||||
|
texts = [
|
||||||
|
_('Attached is the %s periodical downloaded by calibre.')
|
||||||
|
% (mi.title,)
|
||||||
|
]
|
||||||
|
attachment_names = [ascii_filename(mi.title)+os.path.splitext(attachment)[1]]
|
||||||
|
attachments = [attachment]
|
||||||
|
jobnames = [mi.title]
|
||||||
|
do_remove = []
|
||||||
|
if i == len(accounts) - 1:
|
||||||
|
do_remove = remove
|
||||||
|
self.send_mails(jobnames,
|
||||||
|
Dispatcher(partial(done, remove=do_remove)),
|
||||||
|
attachments, to_s, subjects, texts, attachment_names)
|
||||||
|
sent_mails.append(to_s[0])
|
||||||
|
return sent_mails
|
||||||
|
|
||||||
|
|
||||||
|
# }}}
|
||||||
|
|
||||||
|
class EmailMixin(object): # {{{
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.emailer = Emailer(self.job_manager)
|
||||||
|
self.emailer.start()
|
||||||
|
|
||||||
|
def send_by_mail(self, to, fmts, delete_from_library, send_ids=None,
|
||||||
|
do_auto_convert=True, specific_format=None):
|
||||||
|
ids = [self.library_view.model().id(r) for r in self.library_view.selectionModel().selectedRows()] if send_ids is None else send_ids
|
||||||
|
if not ids or len(ids) == 0:
|
||||||
|
return
|
||||||
|
files, _auto_ids = self.library_view.model().get_preferred_formats_from_ids(ids,
|
||||||
|
fmts, set_metadata=True,
|
||||||
|
specific_format=specific_format,
|
||||||
|
exclude_auto=do_auto_convert)
|
||||||
|
if do_auto_convert:
|
||||||
|
nids = list(set(ids).difference(_auto_ids))
|
||||||
|
ids = [i for i in ids if i in nids]
|
||||||
|
else:
|
||||||
|
_auto_ids = []
|
||||||
|
|
||||||
|
full_metadata = self.library_view.model().metadata_for(ids)
|
||||||
|
|
||||||
|
bad, remove_ids, jobnames = [], [], []
|
||||||
|
texts, subjects, attachments, attachment_names = [], [], [], []
|
||||||
|
for f, mi, id in zip(files, full_metadata, ids):
|
||||||
|
t = mi.title
|
||||||
|
if not t:
|
||||||
|
t = _('Unknown')
|
||||||
|
if f is None:
|
||||||
|
bad.append(t)
|
||||||
|
else:
|
||||||
|
remove_ids.append(id)
|
||||||
|
jobnames.append(t)
|
||||||
|
attachments.append(f)
|
||||||
|
subjects.append(_('E-book:')+ ' '+t)
|
||||||
|
a = authors_to_string(mi.authors if mi.authors else \
|
||||||
|
[_('Unknown')])
|
||||||
|
texts.append(_('Attached, you will find the e-book') + \
|
||||||
|
'\n\n' + t + '\n\t' + _('by') + ' ' + a + '\n\n' + \
|
||||||
|
_('in the %s format.') %
|
||||||
|
os.path.splitext(f)[1][1:].upper())
|
||||||
|
prefix = ascii_filename(t+' - '+a)
|
||||||
|
if not isinstance(prefix, unicode):
|
||||||
|
prefix = prefix.decode(preferred_encoding, 'replace')
|
||||||
|
attachment_names.append(prefix + os.path.splitext(f)[1])
|
||||||
|
remove = remove_ids if delete_from_library else []
|
||||||
|
|
||||||
|
to_s = list(repeat(to, len(attachments)))
|
||||||
|
if attachments:
|
||||||
|
self.emailer.send_mails(jobnames,
|
||||||
|
Dispatcher(partial(self.email_sent, remove=remove)),
|
||||||
|
attachments, to_s, subjects, texts, attachment_names)
|
||||||
|
self.status_bar.show_message(_('Sending email to')+' '+to, 3000)
|
||||||
|
|
||||||
|
auto = []
|
||||||
|
if _auto_ids != []:
|
||||||
|
for id in _auto_ids:
|
||||||
|
if specific_format == None:
|
||||||
|
formats = [f.lower() for f in self.library_view.model().db.formats(id, index_is_id=True).split(',')]
|
||||||
|
formats = formats if formats != None else []
|
||||||
|
if list(set(formats).intersection(available_input_formats())) != [] and list(set(fmts).intersection(available_output_formats())) != []:
|
||||||
|
auto.append(id)
|
||||||
|
else:
|
||||||
|
bad.append(self.library_view.model().db.title(id, index_is_id=True))
|
||||||
|
else:
|
||||||
|
if specific_format in list(set(fmts).intersection(set(available_output_formats()))):
|
||||||
|
auto.append(id)
|
||||||
|
else:
|
||||||
|
bad.append(self.library_view.model().db.title(id, index_is_id=True))
|
||||||
|
|
||||||
|
if auto != []:
|
||||||
|
format = specific_format if specific_format in list(set(fmts).intersection(set(available_output_formats()))) else None
|
||||||
|
if not format:
|
||||||
|
for fmt in fmts:
|
||||||
|
if fmt in list(set(fmts).intersection(set(available_output_formats()))):
|
||||||
|
format = fmt
|
||||||
|
break
|
||||||
|
if format is None:
|
||||||
|
bad += auto
|
||||||
|
else:
|
||||||
|
autos = [self.library_view.model().db.title(id, index_is_id=True) for id in auto]
|
||||||
|
if self.auto_convert_question(
|
||||||
|
_('Auto convert the following books before sending via '
|
||||||
|
'email?'), autos):
|
||||||
|
self.iactions['Convert Books'].auto_convert_mail(to, fmts, delete_from_library, auto, format)
|
||||||
|
|
||||||
|
if bad:
|
||||||
|
bad = '\n'.join('%s'%(i,) for i in bad)
|
||||||
|
d = warning_dialog(self, _('No suitable formats'),
|
||||||
|
_('Could not email the following books '
|
||||||
|
'as no suitable formats were found:'), bad)
|
||||||
|
d.exec_()
|
||||||
|
|
||||||
|
def email_sent(self, job, remove=[]):
|
||||||
|
if job.failed:
|
||||||
|
self.job_exception(job, dialog_title=_('Failed to email book'))
|
||||||
|
return
|
||||||
|
|
||||||
|
self.status_bar.show_message(job.description + ' ' + _('sent'),
|
||||||
|
5000)
|
||||||
|
if remove:
|
||||||
|
try:
|
||||||
|
self.library_view.model().delete_books_by_id(remove)
|
||||||
|
except:
|
||||||
|
import traceback
|
||||||
|
# Probably the user deleted the files, in any case, failing
|
||||||
|
# to delete the book is not catastrophic
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
|
def email_news(self, id_):
|
||||||
|
mi = self.library_view.model().db.get_metadata(id_,
|
||||||
|
index_is_id=True)
|
||||||
|
remove = [id_] if config['delete_news_from_library_on_upload'] \
|
||||||
|
else []
|
||||||
|
def get_fmts(fmts):
|
||||||
|
files, auto = self.library_view.model().\
|
||||||
|
get_preferred_formats_from_ids([id_], fmts)
|
||||||
|
return files
|
||||||
|
sent_mails = self.emailer.email_news(mi, remove,
|
||||||
|
get_fmts, self.email_sent)
|
||||||
|
if sent_mails:
|
||||||
|
self.status_bar.show_message(_('Sent news to')+' '+\
|
||||||
|
', '.join(sent_mails), 3000)
|
||||||
|
|
||||||
|
# }}}
|
||||||
|
|
@ -221,16 +221,27 @@ class JobManager(QAbstractTableModel):
|
|||||||
if job.duration is not None:
|
if job.duration is not None:
|
||||||
return error_dialog(view, _('Cannot kill job'),
|
return error_dialog(view, _('Cannot kill job'),
|
||||||
_('Job has already run')).exec_()
|
_('Job has already run')).exec_()
|
||||||
|
if isinstance(job, ParallelJob):
|
||||||
self.server.kill_job(job)
|
self.server.kill_job(job)
|
||||||
|
else:
|
||||||
|
job.kill_on_start = True
|
||||||
|
|
||||||
def kill_all_jobs(self):
|
def kill_all_jobs(self):
|
||||||
for job in self.jobs:
|
for job in self.jobs:
|
||||||
if isinstance(job, DeviceJob) or job.duration is not None:
|
if isinstance(job, DeviceJob) or job.duration is not None:
|
||||||
continue
|
continue
|
||||||
|
if isinstance(job, ParallelJob):
|
||||||
self.server.kill_job(job)
|
self.server.kill_job(job)
|
||||||
|
else:
|
||||||
|
job.kill_on_start = True
|
||||||
|
|
||||||
def terminate_all_jobs(self):
|
def terminate_all_jobs(self):
|
||||||
self.server.killall()
|
self.server.killall()
|
||||||
|
for job in self.jobs:
|
||||||
|
if isinstance(job, DeviceJob) or job.duration is not None:
|
||||||
|
continue
|
||||||
|
if not isinstance(job, ParallelJob):
|
||||||
|
job.kill_on_start = True
|
||||||
|
|
||||||
|
|
||||||
class ProgressBarDelegate(QAbstractItemDelegate):
|
class ProgressBarDelegate(QAbstractItemDelegate):
|
||||||
|
@ -170,6 +170,7 @@ class ConfigWidget(ConfigWidgetBase, Ui_Form):
|
|||||||
if not self.send_email_widget.set_email_settings(to_set):
|
if not self.send_email_widget.set_email_settings(to_set):
|
||||||
raise AbortCommit('abort')
|
raise AbortCommit('abort')
|
||||||
self.proxy['accounts'] = self._email_accounts.accounts
|
self.proxy['accounts'] = self._email_accounts.accounts
|
||||||
|
|
||||||
return ConfigWidgetBase.commit(self)
|
return ConfigWidgetBase.commit(self)
|
||||||
|
|
||||||
def make_default(self, *args):
|
def make_default(self, *args):
|
||||||
@ -188,6 +189,9 @@ class ConfigWidget(ConfigWidgetBase, Ui_Form):
|
|||||||
self._email_accounts.remove(idx)
|
self._email_accounts.remove(idx)
|
||||||
self.changed_signal.emit()
|
self.changed_signal.emit()
|
||||||
|
|
||||||
|
def refresh_gui(self, gui):
|
||||||
|
gui.emailer.calculate_rate_limit()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
from PyQt4.Qt import QApplication
|
from PyQt4.Qt import QApplication
|
||||||
|
@ -174,15 +174,18 @@ class TagsView(QTreeView): # {{{
|
|||||||
|
|
||||||
def show_context_menu(self, point):
|
def show_context_menu(self, point):
|
||||||
index = self.indexAt(point)
|
index = self.indexAt(point)
|
||||||
if not index.isValid():
|
self.context_menu = QMenu(self)
|
||||||
return False
|
|
||||||
|
if index.isValid():
|
||||||
item = index.internalPointer()
|
item = index.internalPointer()
|
||||||
tag_name = ''
|
tag_name = ''
|
||||||
|
|
||||||
if item.type == TagTreeItem.TAG:
|
if item.type == TagTreeItem.TAG:
|
||||||
tag_item = item
|
tag_item = item
|
||||||
tag_name = item.tag.name
|
tag_name = item.tag.name
|
||||||
tag_id = item.tag.id
|
tag_id = item.tag.id
|
||||||
item = item.parent
|
item = item.parent
|
||||||
|
|
||||||
if item.type == TagTreeItem.CATEGORY:
|
if item.type == TagTreeItem.CATEGORY:
|
||||||
category = unicode(item.name.toString())
|
category = unicode(item.name.toString())
|
||||||
key = item.category_key
|
key = item.category_key
|
||||||
@ -190,7 +193,6 @@ class TagsView(QTreeView): # {{{
|
|||||||
if key not in self.db.field_metadata:
|
if key not in self.db.field_metadata:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
self.context_menu = QMenu(self)
|
|
||||||
# If the user right-clicked on an editable item, then offer
|
# If the user right-clicked on an editable item, then offer
|
||||||
# the possibility of renaming that item
|
# the possibility of renaming that item
|
||||||
if tag_name and \
|
if tag_name and \
|
||||||
@ -213,8 +215,6 @@ class TagsView(QTreeView): # {{{
|
|||||||
for col in sorted(self.hidden_categories, cmp=lambda x,y: cmp(x.lower(), y.lower())):
|
for col in sorted(self.hidden_categories, cmp=lambda x,y: cmp(x.lower(), y.lower())):
|
||||||
m.addAction(col,
|
m.addAction(col,
|
||||||
partial(self.context_menu_handler, action='show', category=col))
|
partial(self.context_menu_handler, action='show', category=col))
|
||||||
self.context_menu.addAction(_('Show all categories'),
|
|
||||||
partial(self.context_menu_handler, action='defaults'))
|
|
||||||
|
|
||||||
# Offer specific editors for tags/series/publishers/saved searches
|
# Offer specific editors for tags/series/publishers/saved searches
|
||||||
self.context_menu.addSeparator()
|
self.context_menu.addSeparator()
|
||||||
@ -242,6 +242,13 @@ class TagsView(QTreeView): # {{{
|
|||||||
partial(self.context_menu_handler, action='manage_categories',
|
partial(self.context_menu_handler, action='manage_categories',
|
||||||
category=None))
|
category=None))
|
||||||
|
|
||||||
|
if self.hidden_categories:
|
||||||
|
if not self.context_menu.isEmpty():
|
||||||
|
self.context_menu.addSeparator()
|
||||||
|
self.context_menu.addAction(_('Show all categories'),
|
||||||
|
partial(self.context_menu_handler, action='defaults'))
|
||||||
|
|
||||||
|
if not self.context_menu.isEmpty():
|
||||||
self.context_menu.popup(self.mapToGlobal(point))
|
self.context_menu.popup(self.mapToGlobal(point))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -794,7 +801,7 @@ class TagBrowserMixin(object): # {{{
|
|||||||
cc_label = None
|
cc_label = None
|
||||||
if category in db.field_metadata:
|
if category in db.field_metadata:
|
||||||
cc_label = db.field_metadata[category]['label']
|
cc_label = db.field_metadata[category]['label']
|
||||||
result = self.db.get_custom_items_with_ids(label=cc_label)
|
result = db.get_custom_items_with_ids(label=cc_label)
|
||||||
else:
|
else:
|
||||||
result = []
|
result = []
|
||||||
compare = (lambda x,y:cmp(x.lower(), y.lower()))
|
compare = (lambda x,y:cmp(x.lower(), y.lower()))
|
||||||
|
@ -34,6 +34,7 @@ from calibre.gui2.update import UpdateMixin
|
|||||||
from calibre.gui2.main_window import MainWindow
|
from calibre.gui2.main_window import MainWindow
|
||||||
from calibre.gui2.layout import MainWindowMixin
|
from calibre.gui2.layout import MainWindowMixin
|
||||||
from calibre.gui2.device import DeviceMixin
|
from calibre.gui2.device import DeviceMixin
|
||||||
|
from calibre.gui2.email import EmailMixin
|
||||||
from calibre.gui2.jobs import JobManager, JobsDialog, JobsButton
|
from calibre.gui2.jobs import JobManager, JobsDialog, JobsButton
|
||||||
from calibre.gui2.init import LibraryViewMixin, LayoutMixin
|
from calibre.gui2.init import LibraryViewMixin, LayoutMixin
|
||||||
from calibre.gui2.search_box import SearchBoxMixin, SavedSearchBoxMixin
|
from calibre.gui2.search_box import SearchBoxMixin, SavedSearchBoxMixin
|
||||||
@ -88,7 +89,7 @@ class SystemTrayIcon(QSystemTrayIcon): # {{{
|
|||||||
|
|
||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
class Main(MainWindow, MainWindowMixin, DeviceMixin, # {{{
|
class Main(MainWindow, MainWindowMixin, DeviceMixin, EmailMixin, # {{{
|
||||||
TagBrowserMixin, CoverFlowMixin, LibraryViewMixin, SearchBoxMixin,
|
TagBrowserMixin, CoverFlowMixin, LibraryViewMixin, SearchBoxMixin,
|
||||||
SavedSearchBoxMixin, SearchRestrictionMixin, LayoutMixin, UpdateMixin
|
SavedSearchBoxMixin, SearchRestrictionMixin, LayoutMixin, UpdateMixin
|
||||||
):
|
):
|
||||||
@ -141,6 +142,7 @@ class Main(MainWindow, MainWindowMixin, DeviceMixin, # {{{
|
|||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
LayoutMixin.__init__(self)
|
LayoutMixin.__init__(self)
|
||||||
|
EmailMixin.__init__(self)
|
||||||
DeviceMixin.__init__(self)
|
DeviceMixin.__init__(self)
|
||||||
|
|
||||||
self.restriction_count_of_books_in_view = 0
|
self.restriction_count_of_books_in_view = 0
|
||||||
@ -434,7 +436,7 @@ class Main(MainWindow, MainWindowMixin, DeviceMixin, # {{{
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
def job_exception(self, job):
|
def job_exception(self, job, dialog_title=_('Conversion Error')):
|
||||||
if not hasattr(self, '_modeless_dialogs'):
|
if not hasattr(self, '_modeless_dialogs'):
|
||||||
self._modeless_dialogs = []
|
self._modeless_dialogs = []
|
||||||
minz = self.is_minimized_to_tray
|
minz = self.is_minimized_to_tray
|
||||||
@ -475,7 +477,7 @@ class Main(MainWindow, MainWindowMixin, DeviceMixin, # {{{
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
if not minz:
|
if not minz:
|
||||||
d = error_dialog(self, _('Conversion Error'),
|
d = error_dialog(self, dialog_title,
|
||||||
_('<b>Failed</b>')+': '+unicode(job.description),
|
_('<b>Failed</b>')+': '+unicode(job.description),
|
||||||
det_msg=job.details)
|
det_msg=job.details)
|
||||||
d.setModal(False)
|
d.setModal(False)
|
||||||
|
@ -23,7 +23,8 @@ from calibre.constants import iswindows
|
|||||||
from calibre import prints, guess_type
|
from calibre import prints, guess_type
|
||||||
from calibre.gui2.viewer.keys import SHORTCUTS
|
from calibre.gui2.viewer.keys import SHORTCUTS
|
||||||
|
|
||||||
bookmarks = referencing = hyphenation = jquery = jquery_scrollTo = hyphenator = images =None
|
bookmarks = referencing = hyphenation = jquery = jquery_scrollTo = \
|
||||||
|
hyphenator = images = hyphen_pats = None
|
||||||
|
|
||||||
def load_builtin_fonts():
|
def load_builtin_fonts():
|
||||||
base = P('fonts/liberation/*.ttf')
|
base = P('fonts/liberation/*.ttf')
|
||||||
@ -202,7 +203,8 @@ class Document(QWebPage):
|
|||||||
self.loaded_javascript = False
|
self.loaded_javascript = False
|
||||||
|
|
||||||
def load_javascript_libraries(self):
|
def load_javascript_libraries(self):
|
||||||
global bookmarks, referencing, hyphenation, jquery, jquery_scrollTo, hyphenator, images
|
global bookmarks, referencing, hyphenation, jquery, jquery_scrollTo, \
|
||||||
|
hyphenator, images, hyphen_pats
|
||||||
if self.loaded_javascript:
|
if self.loaded_javascript:
|
||||||
return
|
return
|
||||||
self.loaded_javascript = True
|
self.loaded_javascript = True
|
||||||
@ -234,14 +236,20 @@ class Document(QWebPage):
|
|||||||
return l.lower().replace('_', '-')
|
return l.lower().replace('_', '-')
|
||||||
if hyphenator is None:
|
if hyphenator is None:
|
||||||
hyphenator = P('viewer/hyphenate/Hyphenator.js', data=True).decode('utf-8')
|
hyphenator = P('viewer/hyphenate/Hyphenator.js', data=True).decode('utf-8')
|
||||||
self.javascript(hyphenator)
|
if hyphen_pats is None:
|
||||||
|
hyphen_pats = []
|
||||||
|
for x in glob.glob(P('viewer/hyphenate/patterns/*.js',
|
||||||
|
allow_user_override=False)):
|
||||||
|
with open(x, 'rb') as f:
|
||||||
|
hyphen_pats.append(f.read().decode('utf-8'))
|
||||||
|
hyphen_pats = u'\n'.join(hyphen_pats)
|
||||||
|
|
||||||
|
self.javascript(hyphenator+hyphen_pats)
|
||||||
p = P('viewer/hyphenate/patterns/%s.js'%lang_name(lang))
|
p = P('viewer/hyphenate/patterns/%s.js'%lang_name(lang))
|
||||||
if not os.path.exists(p):
|
if not os.path.exists(p):
|
||||||
lang = default_lang
|
lang = default_lang
|
||||||
p = P('viewer/hyphenate/patterns/%s.js'%lang_name(lang))
|
p = P('viewer/hyphenate/patterns/%s.js'%lang_name(lang))
|
||||||
self.javascript(open(p, 'rb').read().decode('utf-8'))
|
self.loaded_lang = lang_name(lang)
|
||||||
self.loaded_lang = lang
|
|
||||||
|
|
||||||
|
|
||||||
@pyqtSignature("")
|
@pyqtSignature("")
|
||||||
def animated_scroll_done(self):
|
def animated_scroll_done(self):
|
||||||
|
@ -20,7 +20,7 @@ from calibre.gui2 import Application, ORG_NAME, APP_UID, choose_files, \
|
|||||||
info_dialog, error_dialog, open_url
|
info_dialog, error_dialog, open_url
|
||||||
from calibre.ebooks.oeb.iterator import EbookIterator
|
from calibre.ebooks.oeb.iterator import EbookIterator
|
||||||
from calibre.ebooks import DRMError
|
from calibre.ebooks import DRMError
|
||||||
from calibre.constants import islinux, isfreebsd
|
from calibre.constants import islinux, isfreebsd, isosx
|
||||||
from calibre.utils.config import Config, StringConfig, dynamic
|
from calibre.utils.config import Config, StringConfig, dynamic
|
||||||
from calibre.gui2.search_box import SearchBox2
|
from calibre.gui2.search_box import SearchBox2
|
||||||
from calibre.ebooks.metadata import MetaInformation
|
from calibre.ebooks.metadata import MetaInformation
|
||||||
@ -209,7 +209,10 @@ class EbookViewer(MainWindow, Ui_EbookViewer):
|
|||||||
self.toc.setVisible(False)
|
self.toc.setVisible(False)
|
||||||
self.action_quit = QAction(self)
|
self.action_quit = QAction(self)
|
||||||
self.addAction(self.action_quit)
|
self.addAction(self.action_quit)
|
||||||
self.action_quit.setShortcut(Qt.CTRL+Qt.Key_Q)
|
qs = [Qt.CTRL+Qt.Key_Q]
|
||||||
|
if isosx:
|
||||||
|
qs += [Qt.CTRL+Qt.Key_W]
|
||||||
|
self.action_quit.setShortcuts(qs)
|
||||||
self.connect(self.action_quit, SIGNAL('triggered(bool)'),
|
self.connect(self.action_quit, SIGNAL('triggered(bool)'),
|
||||||
lambda x:QApplication.instance().quit())
|
lambda x:QApplication.instance().quit())
|
||||||
self.action_copy.setDisabled(True)
|
self.action_copy.setDisabled(True)
|
||||||
|
@ -36,7 +36,7 @@
|
|||||||
<item row="2" column="0" colspan="3">
|
<item row="2" column="0" colspan="3">
|
||||||
<widget class="QLabel" name="label">
|
<widget class="QLabel" name="label">
|
||||||
<property name="text">
|
<property name="text">
|
||||||
<string>Choose a location for your books. When you add books to calibre, they will be copied here:</string>
|
<string><p>Choose a location for your books. When you add books to calibre, they will be copied here. Use an <b>empty folder</b> for a new calibre library:</string>
|
||||||
</property>
|
</property>
|
||||||
<property name="wordWrap">
|
<property name="wordWrap">
|
||||||
<bool>true</bool>
|
<bool>true</bool>
|
||||||
|
@ -73,7 +73,7 @@ class SendEmail(QWidget, Ui_Form):
|
|||||||
if opts.relay_password:
|
if opts.relay_password:
|
||||||
self.relay_password.setText(unhexlify(opts.relay_password))
|
self.relay_password.setText(unhexlify(opts.relay_password))
|
||||||
self.relay_password.textChanged.connect(self.changed)
|
self.relay_password.textChanged.connect(self.changed)
|
||||||
(self.relay_tls if opts.encryption == 'TLS' else self.relay_ssl).setChecked(True)
|
getattr(self, 'relay_'+opts.encryption.lower()).setChecked(True)
|
||||||
self.relay_tls.toggled.connect(self.changed)
|
self.relay_tls.toggled.connect(self.changed)
|
||||||
|
|
||||||
for x in ('gmail', 'hotmail'):
|
for x in ('gmail', 'hotmail'):
|
||||||
@ -210,7 +210,8 @@ class SendEmail(QWidget, Ui_Form):
|
|||||||
conf.set('relay_port', self.relay_port.value())
|
conf.set('relay_port', self.relay_port.value())
|
||||||
conf.set('relay_username', username if username else None)
|
conf.set('relay_username', username if username else None)
|
||||||
conf.set('relay_password', hexlify(password))
|
conf.set('relay_password', hexlify(password))
|
||||||
conf.set('encryption', 'TLS' if self.relay_tls.isChecked() else 'SSL')
|
conf.set('encryption', 'TLS' if self.relay_tls.isChecked() else 'SSL'
|
||||||
|
if self.relay_ssl.isChecked() else 'NONE')
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@ -168,7 +168,7 @@
|
|||||||
</property>
|
</property>
|
||||||
</widget>
|
</widget>
|
||||||
</item>
|
</item>
|
||||||
<item row="4" column="2" colspan="2">
|
<item row="4" column="2">
|
||||||
<widget class="QRadioButton" name="relay_ssl">
|
<widget class="QRadioButton" name="relay_ssl">
|
||||||
<property name="toolTip">
|
<property name="toolTip">
|
||||||
<string>Use SSL encryption when connecting to the mail server.</string>
|
<string>Use SSL encryption when connecting to the mail server.</string>
|
||||||
@ -191,6 +191,16 @@
|
|||||||
</property>
|
</property>
|
||||||
</spacer>
|
</spacer>
|
||||||
</item>
|
</item>
|
||||||
|
<item row="4" column="3">
|
||||||
|
<widget class="QRadioButton" name="relay_none">
|
||||||
|
<property name="toolTip">
|
||||||
|
<string>WARNING: Using no encryption is highly insecure</string>
|
||||||
|
</property>
|
||||||
|
<property name="text">
|
||||||
|
<string>&None</string>
|
||||||
|
</property>
|
||||||
|
</widget>
|
||||||
|
</item>
|
||||||
</layout>
|
</layout>
|
||||||
</widget>
|
</widget>
|
||||||
</item>
|
</item>
|
||||||
|
@ -333,9 +333,7 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
self.dirtied_cache = set([x[0] for x in d])
|
self.dirtied_cache = set([x[0] for x in d])
|
||||||
|
|
||||||
self.refresh_ondevice = functools.partial(self.data.refresh_ondevice, self)
|
self.refresh_ondevice = functools.partial(self.data.refresh_ondevice, self)
|
||||||
st = time.time()
|
|
||||||
self.refresh()
|
self.refresh()
|
||||||
print 'refresh time:', time.time() - st
|
|
||||||
self.last_update_check = self.last_modified()
|
self.last_update_check = self.last_modified()
|
||||||
|
|
||||||
|
|
||||||
|
@ -5,9 +5,8 @@ __license__ = 'GPL v3'
|
|||||||
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||||
__docformat__ = 'restructuredtext en'
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
import operator, os, json
|
import operator, os, json, re
|
||||||
from binascii import hexlify, unhexlify
|
from binascii import hexlify, unhexlify
|
||||||
from urllib import quote, unquote
|
|
||||||
|
|
||||||
import cherrypy
|
import cherrypy
|
||||||
|
|
||||||
@ -21,6 +20,7 @@ from calibre.utils.magick import Image
|
|||||||
from calibre.library.comments import comments_to_html
|
from calibre.library.comments import comments_to_html
|
||||||
from calibre.library.server import custom_fields_to_display
|
from calibre.library.server import custom_fields_to_display
|
||||||
from calibre.library.field_metadata import category_icon_map
|
from calibre.library.field_metadata import category_icon_map
|
||||||
|
from calibre.library.server.utils import quote, unquote
|
||||||
|
|
||||||
def render_book_list(ids, prefix, suffix=''): # {{{
|
def render_book_list(ids, prefix, suffix=''): # {{{
|
||||||
pages = []
|
pages = []
|
||||||
@ -401,6 +401,16 @@ class BrowseServer(object):
|
|||||||
|
|
||||||
script = 'true'
|
script = 'true'
|
||||||
|
|
||||||
|
if len(items) == 1:
|
||||||
|
# Only one item in category, go directly to book list
|
||||||
|
prefix = '' if self.is_wsgi else self.opts.url_prefix
|
||||||
|
html = get_category_items(category, items,
|
||||||
|
self.search_restriction_name, datatype,
|
||||||
|
self.opts.url_prefix)
|
||||||
|
href = re.search(r'<a href="([^"]+)"', html)
|
||||||
|
if href is not None:
|
||||||
|
raise cherrypy.HTTPRedirect(prefix+href.group(1))
|
||||||
|
|
||||||
if len(items) <= self.opts.max_opds_ungrouped_items:
|
if len(items) <= self.opts.max_opds_ungrouped_items:
|
||||||
script = 'false'
|
script = 'false'
|
||||||
items = get_category_items(category, items,
|
items = get_category_items(category, items,
|
||||||
|
@ -6,10 +6,11 @@ __copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
|||||||
__docformat__ = 'restructuredtext en'
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
import time, sys
|
import time, sys
|
||||||
|
from urllib import quote as quote_, unquote as unquote_
|
||||||
|
|
||||||
import cherrypy
|
import cherrypy
|
||||||
|
|
||||||
from calibre import strftime as _strftime, prints
|
from calibre import strftime as _strftime, prints, isbytestring
|
||||||
from calibre.utils.date import now as nowf
|
from calibre.utils.date import now as nowf
|
||||||
from calibre.utils.config import tweaks
|
from calibre.utils.config import tweaks
|
||||||
|
|
||||||
@ -81,3 +82,14 @@ def format_tag_string(tags, sep, ignore_max=False, no_tag_count=False):
|
|||||||
return u'%s:&:%s'%(tweaks['max_content_server_tags_shown'],
|
return u'%s:&:%s'%(tweaks['max_content_server_tags_shown'],
|
||||||
', '.join(tlist)) if tlist else ''
|
', '.join(tlist)) if tlist else ''
|
||||||
|
|
||||||
|
def quote(s):
|
||||||
|
if isinstance(s, unicode):
|
||||||
|
s = s.encode('utf-8')
|
||||||
|
return quote_(s)
|
||||||
|
|
||||||
|
def unquote(s):
|
||||||
|
ans = unquote_(s)
|
||||||
|
if isbytestring(ans):
|
||||||
|
ans = ans.decode('utf-8')
|
||||||
|
return ans
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ __docformat__ = 'restructuredtext en'
|
|||||||
Wrapper for multi-threaded access to a single sqlite database connection. Serializes
|
Wrapper for multi-threaded access to a single sqlite database connection. Serializes
|
||||||
all calls.
|
all calls.
|
||||||
'''
|
'''
|
||||||
import sqlite3 as sqlite, traceback, time, uuid
|
import sqlite3 as sqlite, traceback, time, uuid, sys, os
|
||||||
from sqlite3 import IntegrityError, OperationalError
|
from sqlite3 import IntegrityError, OperationalError
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
from Queue import Queue
|
from Queue import Queue
|
||||||
@ -19,6 +19,7 @@ from calibre.ebooks.metadata import title_sort, author_to_author_sort
|
|||||||
from calibre.utils.config import tweaks
|
from calibre.utils.config import tweaks
|
||||||
from calibre.utils.date import parse_date, isoformat
|
from calibre.utils.date import parse_date, isoformat
|
||||||
from calibre import isbytestring
|
from calibre import isbytestring
|
||||||
|
from calibre.constants import iswindows, DEBUG
|
||||||
|
|
||||||
global_lock = RLock()
|
global_lock = RLock()
|
||||||
|
|
||||||
@ -114,6 +115,22 @@ def pynocase(one, two, encoding='utf-8'):
|
|||||||
pass
|
pass
|
||||||
return cmp(one.lower(), two.lower())
|
return cmp(one.lower(), two.lower())
|
||||||
|
|
||||||
|
|
||||||
|
def load_c_extensions(conn, debug=DEBUG):
|
||||||
|
try:
|
||||||
|
conn.enable_load_extension(True)
|
||||||
|
ext_path = os.path.join(sys.extensions_location, 'sqlite_custom.'+
|
||||||
|
('pyd' if iswindows else 'so'))
|
||||||
|
conn.load_extension(ext_path)
|
||||||
|
conn.enable_load_extension(False)
|
||||||
|
return True
|
||||||
|
except Exception, e:
|
||||||
|
if debug:
|
||||||
|
print 'Failed to load high performance sqlite C extension'
|
||||||
|
print e
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
class DBThread(Thread):
|
class DBThread(Thread):
|
||||||
|
|
||||||
CLOSE = '-------close---------'
|
CLOSE = '-------close---------'
|
||||||
@ -131,9 +148,12 @@ class DBThread(Thread):
|
|||||||
def connect(self):
|
def connect(self):
|
||||||
self.conn = sqlite.connect(self.path, factory=Connection,
|
self.conn = sqlite.connect(self.path, factory=Connection,
|
||||||
detect_types=sqlite.PARSE_DECLTYPES|sqlite.PARSE_COLNAMES)
|
detect_types=sqlite.PARSE_DECLTYPES|sqlite.PARSE_COLNAMES)
|
||||||
|
self.conn.execute('pragma cache_size=5000')
|
||||||
encoding = self.conn.execute('pragma encoding').fetchone()[0]
|
encoding = self.conn.execute('pragma encoding').fetchone()[0]
|
||||||
|
c_ext_loaded = False #load_c_extensions(self.conn)
|
||||||
self.conn.row_factory = sqlite.Row if self.row_factory else lambda cursor, row : list(row)
|
self.conn.row_factory = sqlite.Row if self.row_factory else lambda cursor, row : list(row)
|
||||||
self.conn.create_aggregate('concat', 1, Concatenate)
|
self.conn.create_aggregate('concat', 1, Concatenate)
|
||||||
|
if not c_ext_loaded:
|
||||||
self.conn.create_aggregate('sortconcat', 2, SortedConcatenate)
|
self.conn.create_aggregate('sortconcat', 2, SortedConcatenate)
|
||||||
self.conn.create_aggregate('sort_concat', 2, SafeSortedConcatenate)
|
self.conn.create_aggregate('sort_concat', 2, SafeSortedConcatenate)
|
||||||
self.conn.create_collation('PYNOCASE', partial(pynocase,
|
self.conn.create_collation('PYNOCASE', partial(pynocase,
|
||||||
@ -263,3 +283,9 @@ def connect(dbpath, row_factory=None):
|
|||||||
if conn.proxy.unhandled_error[0] is not None:
|
if conn.proxy.unhandled_error[0] is not None:
|
||||||
raise DatabaseException(*conn.proxy.unhandled_error)
|
raise DatabaseException(*conn.proxy.unhandled_error)
|
||||||
return conn
|
return conn
|
||||||
|
|
||||||
|
def test():
|
||||||
|
c = sqlite.connect(':memory:')
|
||||||
|
if load_c_extensions(c, True):
|
||||||
|
print 'Loaded C extension successfully'
|
||||||
|
|
||||||
|
173
src/calibre/library/sqlite_custom.c
Normal file
@ -0,0 +1,173 @@
|
|||||||
|
#define UNICODE
|
||||||
|
#include <Python.h>
|
||||||
|
|
||||||
|
|
||||||
|
#include <stdlib.h>
|
||||||
|
|
||||||
|
#include <sqlite3ext.h>
|
||||||
|
SQLITE_EXTENSION_INIT1
|
||||||
|
|
||||||
|
#ifdef _MSC_VER
|
||||||
|
#define MYEXPORT __declspec(dllexport)
|
||||||
|
#else
|
||||||
|
#define MYEXPORT
|
||||||
|
#endif
|
||||||
|
|
||||||
|
// sortconcat {{{
|
||||||
|
|
||||||
|
typedef struct {
|
||||||
|
unsigned char *val;
|
||||||
|
int index;
|
||||||
|
int length;
|
||||||
|
} SortConcatItem;
|
||||||
|
|
||||||
|
typedef struct {
|
||||||
|
SortConcatItem **vals;
|
||||||
|
int count;
|
||||||
|
int length;
|
||||||
|
} SortConcatList;
|
||||||
|
|
||||||
|
static void sort_concat_step(sqlite3_context *context, int argc, sqlite3_value **argv) {
|
||||||
|
const unsigned char *val;
|
||||||
|
int idx, sz;
|
||||||
|
SortConcatList *list;
|
||||||
|
|
||||||
|
assert(argc == 2);
|
||||||
|
|
||||||
|
list = (SortConcatList*) sqlite3_aggregate_context(context, sizeof(*list));
|
||||||
|
if (list == NULL) return;
|
||||||
|
|
||||||
|
if (list->vals == NULL) {
|
||||||
|
list->vals = (SortConcatItem**)calloc(100, sizeof(SortConcatItem*));
|
||||||
|
if (list->vals == NULL) return;
|
||||||
|
list->length = 100;
|
||||||
|
list->count = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (list->count == list->length) {
|
||||||
|
list->vals = (SortConcatItem**)realloc(list->vals, list->length + 100);
|
||||||
|
if (list->vals == NULL) return;
|
||||||
|
list->length = list->length + 100;
|
||||||
|
}
|
||||||
|
|
||||||
|
list->vals[list->count] = (SortConcatItem*)calloc(1, sizeof(SortConcatItem));
|
||||||
|
if (list->vals[list->count] == NULL) return;
|
||||||
|
|
||||||
|
idx = sqlite3_value_int(argv[0]);
|
||||||
|
val = sqlite3_value_text(argv[1]);
|
||||||
|
sz = sqlite3_value_bytes(argv[1]);
|
||||||
|
if (idx == 0 || val == NULL || sz == 0) {free(list->vals[list->count]); return;}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
list->vals[list->count]->val = (unsigned char*)calloc(sz, sizeof(unsigned char));
|
||||||
|
if (list->vals[list->count]->val == NULL)
|
||||||
|
{free(list->vals[list->count]); return;}
|
||||||
|
list->vals[list->count]->index = idx;
|
||||||
|
list->vals[list->count]->length = sz;
|
||||||
|
memcpy(list->vals[list->count]->val, val, sz);
|
||||||
|
list->count = list->count + 1;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
static void sort_concat_free(SortConcatList *list) {
|
||||||
|
int i;
|
||||||
|
if (list == NULL) return;
|
||||||
|
for (i = 0; i < list->count; i++) {
|
||||||
|
free(list->vals[i]->val);
|
||||||
|
free(list->vals[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static int sort_concat_cmp(const void *a_, const void *b_) {
|
||||||
|
return (*((SortConcatItem**)a_))->index - (*((SortConcatItem**)b_))->index;
|
||||||
|
}
|
||||||
|
|
||||||
|
static unsigned char* sort_concat_do_finalize(SortConcatList *list, const unsigned char join) {
|
||||||
|
unsigned char *ans, *pos;
|
||||||
|
int sz = 0, i;
|
||||||
|
|
||||||
|
for (i = 0; i < list->count; i++) {
|
||||||
|
sz += list->vals[i]->length;
|
||||||
|
}
|
||||||
|
sz += list->count;
|
||||||
|
|
||||||
|
ans = (unsigned char *) calloc(sz, sizeof(unsigned char));
|
||||||
|
if (ans == NULL) return ans;
|
||||||
|
|
||||||
|
pos = ans;
|
||||||
|
for (i = 0; i < list->count; i++) {
|
||||||
|
if (list->vals[i]->length > 0) {
|
||||||
|
memcpy(pos, list->vals[i]->val, list->vals[i]->length);
|
||||||
|
pos += list->vals[i]->length;
|
||||||
|
if (i < list->count -1) { *pos = join; pos += 1; }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ans;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
static void sort_concat_finalize(sqlite3_context *context) {
|
||||||
|
SortConcatList *list;
|
||||||
|
unsigned char *ans;
|
||||||
|
|
||||||
|
list = (SortConcatList*) sqlite3_aggregate_context(context, sizeof(*list));
|
||||||
|
|
||||||
|
if (list != NULL && list->vals != NULL && list->count > 0) {
|
||||||
|
qsort(list->vals, list->count, sizeof(list->vals[0]), sort_concat_cmp);
|
||||||
|
ans = sort_concat_do_finalize(list, ',');
|
||||||
|
if (ans != NULL) sqlite3_result_text(context, (char*)ans, -1, SQLITE_TRANSIENT);
|
||||||
|
free(ans);
|
||||||
|
sort_concat_free(list);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
static void sort_concat_finalize2(sqlite3_context *context) {
|
||||||
|
SortConcatList *list;
|
||||||
|
unsigned char *ans;
|
||||||
|
|
||||||
|
list = (SortConcatList*) sqlite3_aggregate_context(context, sizeof(*list));
|
||||||
|
|
||||||
|
if (list != NULL && list->vals != NULL && list->count > 0) {
|
||||||
|
qsort(list->vals, list->count, sizeof(list->vals[0]), sort_concat_cmp);
|
||||||
|
ans = sort_concat_do_finalize(list, '|');
|
||||||
|
if (ans != NULL) sqlite3_result_text(context, (char*)ans, -1, SQLITE_TRANSIENT);
|
||||||
|
free(ans);
|
||||||
|
sort_concat_free(list);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// }}}
|
||||||
|
|
||||||
|
MYEXPORT int sqlite3_extension_init(
|
||||||
|
sqlite3 *db, char **pzErrMsg, const sqlite3_api_routines *pApi){
|
||||||
|
SQLITE_EXTENSION_INIT2(pApi);
|
||||||
|
sqlite3_create_function(db, "sortconcat", 2, SQLITE_UTF8, NULL, NULL, sort_concat_step, sort_concat_finalize);
|
||||||
|
sqlite3_create_function(db, "sort_concat", 2, SQLITE_UTF8, NULL, NULL, sort_concat_step, sort_concat_finalize2);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
static PyObject *
|
||||||
|
sqlite_custom_init_funcs(PyObject *self, PyObject *args) {
|
||||||
|
Py_RETURN_NONE;
|
||||||
|
}
|
||||||
|
|
||||||
|
static PyMethodDef sqlite_custom_methods[] = {
|
||||||
|
{"init_funcs", sqlite_custom_init_funcs, METH_VARARGS,
|
||||||
|
"init_funcs()\n\nInitialize module."
|
||||||
|
},
|
||||||
|
|
||||||
|
{NULL, NULL, 0, NULL}
|
||||||
|
};
|
||||||
|
|
||||||
|
PyMODINIT_FUNC
|
||||||
|
initsqlite_custom(void) {
|
||||||
|
PyObject *m;
|
||||||
|
m = Py_InitModule3("sqlite_custom", sqlite_custom_methods,
|
||||||
|
"Implementation of custom sqlite methods in C for speed."
|
||||||
|
);
|
||||||
|
if (m == NULL) return;
|
||||||
|
}
|
@ -247,6 +247,20 @@ Also, ::
|
|||||||
|
|
||||||
must return ``CONFIG_SCSI_MULTI_LUN=y``. If you don't see either, you have to recompile your kernel with the correct settings.
|
must return ``CONFIG_SCSI_MULTI_LUN=y``. If you don't see either, you have to recompile your kernel with the correct settings.
|
||||||
|
|
||||||
|
|
||||||
|
Why does |app| not support collection on the Kindle or shelves on the Nook?
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Neither the Kindle nor the Nook provide any way to manipulate collections over a USB connection.
|
||||||
|
If you really care about using collections, I would urge you to sell your Kindle/Nook and get a SONY.
|
||||||
|
Only SONY seems to understand that life is too short to be entering collections one by one on an
|
||||||
|
e-ink screen :)
|
||||||
|
|
||||||
|
Note that in the case of the Kindle, there is a way to manipulate collections via USB,
|
||||||
|
but it requires that the Kindle be rebooted *every time* it is disconnected from the computer, for the
|
||||||
|
changes to the collections to be recognized. As such, it is unlikely that
|
||||||
|
any |app| developers will ever feel motivated enough to support it.
|
||||||
|
|
||||||
Library Management
|
Library Management
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
|
@ -58,11 +58,15 @@ def get_mx(host, verbose=0):
|
|||||||
int(getattr(y, 'preference', sys.maxint))))
|
int(getattr(y, 'preference', sys.maxint))))
|
||||||
return [str(x.exchange) for x in answers if hasattr(x, 'exchange')]
|
return [str(x.exchange) for x in answers if hasattr(x, 'exchange')]
|
||||||
|
|
||||||
def sendmail_direct(from_, to, msg, timeout, localhost, verbose):
|
def sendmail_direct(from_, to, msg, timeout, localhost, verbose,
|
||||||
import smtplib
|
debug_output=None):
|
||||||
|
import calibre.utils.smtplib as smtplib
|
||||||
hosts = get_mx(to.split('@')[-1].strip(), verbose)
|
hosts = get_mx(to.split('@')[-1].strip(), verbose)
|
||||||
timeout=None # Non blocking sockets sometimes don't work
|
timeout=None # Non blocking sockets sometimes don't work
|
||||||
s = smtplib.SMTP(timeout=timeout, local_hostname=localhost)
|
kwargs = dict(timeout=timeout, local_hostname=localhost)
|
||||||
|
if debug_output is not None:
|
||||||
|
kwargs['debug_to'] = debug_output
|
||||||
|
s = smtplib.SMTP(**kwargs)
|
||||||
s.set_debuglevel(verbose)
|
s.set_debuglevel(verbose)
|
||||||
if not hosts:
|
if not hosts:
|
||||||
raise ValueError('No mail server found for address: %s'%to)
|
raise ValueError('No mail server found for address: %s'%to)
|
||||||
@ -79,20 +83,23 @@ def sendmail_direct(from_, to, msg, timeout, localhost, verbose):
|
|||||||
raise IOError('Failed to send mail: '+repr(last_error))
|
raise IOError('Failed to send mail: '+repr(last_error))
|
||||||
|
|
||||||
|
|
||||||
def sendmail(msg, from_, to, localhost=None, verbose=0, timeout=30,
|
def sendmail(msg, from_, to, localhost=None, verbose=0, timeout=None,
|
||||||
relay=None, username=None, password=None, encryption='TLS',
|
relay=None, username=None, password=None, encryption='TLS',
|
||||||
port=-1):
|
port=-1, debug_output=None):
|
||||||
if relay is None:
|
if relay is None:
|
||||||
for x in to:
|
for x in to:
|
||||||
return sendmail_direct(from_, x, msg, timeout, localhost, verbose)
|
return sendmail_direct(from_, x, msg, timeout, localhost, verbose)
|
||||||
import smtplib
|
import calibre.utils.smtplib as smtplib
|
||||||
cls = smtplib.SMTP if encryption == 'TLS' else smtplib.SMTP_SSL
|
cls = smtplib.SMTP_SSL if encryption == 'SSL' else smtplib.SMTP
|
||||||
timeout = None # Non-blocking sockets sometimes don't work
|
timeout = None # Non-blocking sockets sometimes don't work
|
||||||
port = int(port)
|
port = int(port)
|
||||||
s = cls(timeout=timeout, local_hostname=localhost)
|
kwargs = dict(timeout=timeout, local_hostname=localhost)
|
||||||
|
if debug_output is not None:
|
||||||
|
kwargs['debug_to'] = debug_output
|
||||||
|
s = cls(**kwargs)
|
||||||
s.set_debuglevel(verbose)
|
s.set_debuglevel(verbose)
|
||||||
if port < 0:
|
if port < 0:
|
||||||
port = 25 if encryption == 'TLS' else 465
|
port = 25 if encryption != 'SSL' else 465
|
||||||
s.connect(relay, port)
|
s.connect(relay, port)
|
||||||
if encryption == 'TLS':
|
if encryption == 'TLS':
|
||||||
s.starttls()
|
s.starttls()
|
||||||
@ -151,9 +158,9 @@ def option_parser():
|
|||||||
r('-u', '--username', help='Username for relay')
|
r('-u', '--username', help='Username for relay')
|
||||||
r('-p', '--password', help='Password for relay')
|
r('-p', '--password', help='Password for relay')
|
||||||
r('-e', '--encryption-method', default='TLS',
|
r('-e', '--encryption-method', default='TLS',
|
||||||
choices=['TLS', 'SSL'],
|
choices=['TLS', 'SSL', 'NONE'],
|
||||||
help='Encryption method to use when connecting to relay. Choices are '
|
help='Encryption method to use when connecting to relay. Choices are '
|
||||||
'TLS and SSL. Default is TLS.')
|
'TLS, SSL and NONE. Default is TLS. WARNING: Choosing NONE is highly insecure')
|
||||||
parser.add_option('-o', '--outbox', help='Path to maildir folder to store '
|
parser.add_option('-o', '--outbox', help='Path to maildir folder to store '
|
||||||
'failed email messages in.')
|
'failed email messages in.')
|
||||||
parser.add_option('-f', '--fork', default=False, action='store_true',
|
parser.add_option('-f', '--fork', default=False, action='store_true',
|
||||||
@ -224,6 +231,7 @@ def main(args=sys.argv):
|
|||||||
if opts.fork:
|
if opts.fork:
|
||||||
if os.fork() != 0:
|
if os.fork() != 0:
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sendmail(msg, efrom, eto, localhost=opts.localhost, verbose=opts.verbose,
|
sendmail(msg, efrom, eto, localhost=opts.localhost, verbose=opts.verbose,
|
||||||
timeout=opts.timeout, relay=opts.relay, username=opts.username,
|
timeout=opts.timeout, relay=opts.relay, username=opts.username,
|
||||||
|
826
src/calibre/utils/smtplib.py
Executable file
@ -0,0 +1,826 @@
|
|||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
'''SMTP/ESMTP client class.
|
||||||
|
|
||||||
|
This should follow RFC 821 (SMTP), RFC 1869 (ESMTP), RFC 2554 (SMTP
|
||||||
|
Authentication) and RFC 2487 (Secure SMTP over TLS).
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
|
||||||
|
Please remember, when doing ESMTP, that the names of the SMTP service
|
||||||
|
extensions are NOT the same thing as the option keywords for the RCPT
|
||||||
|
and MAIL commands!
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
>>> import smtplib
|
||||||
|
>>> s=smtplib.SMTP("localhost")
|
||||||
|
>>> print s.help()
|
||||||
|
This is Sendmail version 8.8.4
|
||||||
|
Topics:
|
||||||
|
HELO EHLO MAIL RCPT DATA
|
||||||
|
RSET NOOP QUIT HELP VRFY
|
||||||
|
EXPN VERB ETRN DSN
|
||||||
|
For more info use "HELP <topic>".
|
||||||
|
To report bugs in the implementation send email to
|
||||||
|
sendmail-bugs@sendmail.org.
|
||||||
|
For local information send email to Postmaster at your site.
|
||||||
|
End of HELP info
|
||||||
|
>>> s.putcmd("vrfy","someone@here")
|
||||||
|
>>> s.getreply()
|
||||||
|
(250, "Somebody OverHere <somebody@here.my.org>")
|
||||||
|
>>> s.quit()
|
||||||
|
'''
|
||||||
|
|
||||||
|
# Author: The Dragon De Monsyne <dragondm@integral.org>
|
||||||
|
# ESMTP support, test code and doc fixes added by
|
||||||
|
# Eric S. Raymond <esr@thyrsus.com>
|
||||||
|
# Better RFC 821 compliance (MAIL and RCPT, and CRLF in data)
|
||||||
|
# by Carey Evans <c.evans@clear.net.nz>, for picky mail servers.
|
||||||
|
# RFC 2554 (authentication) support by Gerhard Haering <gerhard@bigfoot.de>.
|
||||||
|
# Enhanced debugging support by Kovid Goyal
|
||||||
|
#
|
||||||
|
# This was modified from the Python 1.5 library HTTP lib.
|
||||||
|
|
||||||
|
import socket
|
||||||
|
import re
|
||||||
|
import email.utils
|
||||||
|
import base64
|
||||||
|
import hmac
|
||||||
|
import sys
|
||||||
|
from email.base64mime import encode as encode_base64
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
__all__ = ["SMTPException","SMTPServerDisconnected","SMTPResponseException",
|
||||||
|
"SMTPSenderRefused","SMTPRecipientsRefused","SMTPDataError",
|
||||||
|
"SMTPConnectError","SMTPHeloError","SMTPAuthenticationError",
|
||||||
|
"quoteaddr","quotedata","SMTP"]
|
||||||
|
|
||||||
|
SMTP_PORT = 25
|
||||||
|
SMTP_SSL_PORT = 465
|
||||||
|
CRLF="\r\n"
|
||||||
|
|
||||||
|
OLDSTYLE_AUTH = re.compile(r"auth=(.*)", re.I)
|
||||||
|
|
||||||
|
# Exception classes used by this module.
|
||||||
|
class SMTPException(Exception):
|
||||||
|
"""Base class for all exceptions raised by this module."""
|
||||||
|
|
||||||
|
class SMTPServerDisconnected(SMTPException):
|
||||||
|
"""Not connected to any SMTP server.
|
||||||
|
|
||||||
|
This exception is raised when the server unexpectedly disconnects,
|
||||||
|
or when an attempt is made to use the SMTP instance before
|
||||||
|
connecting it to a server.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class SMTPResponseException(SMTPException):
|
||||||
|
"""Base class for all exceptions that include an SMTP error code.
|
||||||
|
|
||||||
|
These exceptions are generated in some instances when the SMTP
|
||||||
|
server returns an error code. The error code is stored in the
|
||||||
|
`smtp_code' attribute of the error, and the `smtp_error' attribute
|
||||||
|
is set to the error message.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, code, msg):
|
||||||
|
self.smtp_code = code
|
||||||
|
self.smtp_error = msg
|
||||||
|
self.args = (code, msg)
|
||||||
|
|
||||||
|
class SMTPSenderRefused(SMTPResponseException):
|
||||||
|
"""Sender address refused.
|
||||||
|
|
||||||
|
In addition to the attributes set by on all SMTPResponseException
|
||||||
|
exceptions, this sets `sender' to the string that the SMTP refused.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, code, msg, sender):
|
||||||
|
self.smtp_code = code
|
||||||
|
self.smtp_error = msg
|
||||||
|
self.sender = sender
|
||||||
|
self.args = (code, msg, sender)
|
||||||
|
|
||||||
|
class SMTPRecipientsRefused(SMTPException):
|
||||||
|
"""All recipient addresses refused.
|
||||||
|
|
||||||
|
The errors for each recipient are accessible through the attribute
|
||||||
|
'recipients', which is a dictionary of exactly the same sort as
|
||||||
|
SMTP.sendmail() returns.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, recipients):
|
||||||
|
self.recipients = recipients
|
||||||
|
self.args = ( recipients,)
|
||||||
|
|
||||||
|
|
||||||
|
class SMTPDataError(SMTPResponseException):
|
||||||
|
"""The SMTP server didn't accept the data."""
|
||||||
|
|
||||||
|
class SMTPConnectError(SMTPResponseException):
|
||||||
|
"""Error during connection establishment."""
|
||||||
|
|
||||||
|
class SMTPHeloError(SMTPResponseException):
|
||||||
|
"""The server refused our HELO reply."""
|
||||||
|
|
||||||
|
class SMTPAuthenticationError(SMTPResponseException):
|
||||||
|
"""Authentication error.
|
||||||
|
|
||||||
|
Most probably the server didn't accept the username/password
|
||||||
|
combination provided.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def quoteaddr(addr):
|
||||||
|
"""Quote a subset of the email addresses defined by RFC 821.
|
||||||
|
|
||||||
|
Should be able to handle anything rfc822.parseaddr can handle.
|
||||||
|
"""
|
||||||
|
m = (None, None)
|
||||||
|
try:
|
||||||
|
m = email.utils.parseaddr(addr)[1]
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
if m == (None, None): # Indicates parse failure or AttributeError
|
||||||
|
# something weird here.. punt -ddm
|
||||||
|
return "<%s>" % addr
|
||||||
|
elif m is None:
|
||||||
|
# the sender wants an empty return address
|
||||||
|
return "<>"
|
||||||
|
else:
|
||||||
|
return "<%s>" % m
|
||||||
|
|
||||||
|
def quotedata(data):
|
||||||
|
"""Quote data for email.
|
||||||
|
|
||||||
|
Double leading '.', and change Unix newline '\\n', or Mac '\\r' into
|
||||||
|
Internet CRLF end-of-line.
|
||||||
|
"""
|
||||||
|
return re.sub(r'(?m)^\.', '..',
|
||||||
|
re.sub(r'(?:\r\n|\n|\r(?!\n))', CRLF, data))
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ssl
|
||||||
|
except ImportError:
|
||||||
|
_have_ssl = False
|
||||||
|
else:
|
||||||
|
class SSLFakeFile:
|
||||||
|
"""A fake file like object that really wraps a SSLObject.
|
||||||
|
|
||||||
|
It only supports what is needed in smtplib.
|
||||||
|
"""
|
||||||
|
def __init__(self, sslobj):
|
||||||
|
self.sslobj = sslobj
|
||||||
|
|
||||||
|
def readline(self):
|
||||||
|
str = ""
|
||||||
|
chr = None
|
||||||
|
while chr != "\n":
|
||||||
|
chr = self.sslobj.read(1)
|
||||||
|
if not chr: break
|
||||||
|
str += chr
|
||||||
|
return str
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
_have_ssl = True
|
||||||
|
|
||||||
|
class SMTP:
|
||||||
|
"""This class manages a connection to an SMTP or ESMTP server.
|
||||||
|
SMTP Objects:
|
||||||
|
SMTP objects have the following attributes:
|
||||||
|
helo_resp
|
||||||
|
This is the message given by the server in response to the
|
||||||
|
most recent HELO command.
|
||||||
|
|
||||||
|
ehlo_resp
|
||||||
|
This is the message given by the server in response to the
|
||||||
|
most recent EHLO command. This is usually multiline.
|
||||||
|
|
||||||
|
does_esmtp
|
||||||
|
This is a True value _after you do an EHLO command_, if the
|
||||||
|
server supports ESMTP.
|
||||||
|
|
||||||
|
esmtp_features
|
||||||
|
This is a dictionary, which, if the server supports ESMTP,
|
||||||
|
will _after you do an EHLO command_, contain the names of the
|
||||||
|
SMTP service extensions this server supports, and their
|
||||||
|
parameters (if any).
|
||||||
|
|
||||||
|
Note, all extension names are mapped to lower case in the
|
||||||
|
dictionary.
|
||||||
|
|
||||||
|
See each method's docstrings for details. In general, there is a
|
||||||
|
method of the same name to perform each SMTP command. There is also a
|
||||||
|
method called 'sendmail' that will do an entire mail transaction.
|
||||||
|
"""
|
||||||
|
debuglevel = 0
|
||||||
|
file = None
|
||||||
|
helo_resp = None
|
||||||
|
ehlo_msg = "ehlo"
|
||||||
|
ehlo_resp = None
|
||||||
|
does_esmtp = 0
|
||||||
|
|
||||||
|
def __init__(self, host='', port=0, local_hostname=None,
|
||||||
|
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
|
||||||
|
debug_to=partial(print, file=sys.stderr)):
|
||||||
|
"""Initialize a new instance.
|
||||||
|
|
||||||
|
If specified, `host' is the name of the remote host to which to
|
||||||
|
connect. If specified, `port' specifies the port to which to connect.
|
||||||
|
By default, smtplib.SMTP_PORT is used. An SMTPConnectError is raised
|
||||||
|
if the specified `host' doesn't respond correctly. If specified,
|
||||||
|
`local_hostname` is used as the FQDN of the local host. By default,
|
||||||
|
the local hostname is found using socket.getfqdn(). `debug_to`
|
||||||
|
specifies where debug output is written to. By default it is written to
|
||||||
|
sys.stderr. You should pass in a print function of your own to control
|
||||||
|
where debug output is written.
|
||||||
|
"""
|
||||||
|
self.timeout = timeout
|
||||||
|
self.debug = debug_to
|
||||||
|
self.esmtp_features = {}
|
||||||
|
self.default_port = SMTP_PORT
|
||||||
|
if host:
|
||||||
|
(code, msg) = self.connect(host, port)
|
||||||
|
if code != 220:
|
||||||
|
raise SMTPConnectError(code, msg)
|
||||||
|
if local_hostname is not None:
|
||||||
|
self.local_hostname = local_hostname
|
||||||
|
else:
|
||||||
|
# RFC 2821 says we should use the fqdn in the EHLO/HELO verb, and
|
||||||
|
# if that can't be calculated, that we should use a domain literal
|
||||||
|
# instead (essentially an encoded IP address like [A.B.C.D]).
|
||||||
|
fqdn = socket.getfqdn()
|
||||||
|
if '.' in fqdn:
|
||||||
|
self.local_hostname = fqdn
|
||||||
|
else:
|
||||||
|
# We can't find an fqdn hostname, so use a domain literal
|
||||||
|
addr = '127.0.0.1'
|
||||||
|
try:
|
||||||
|
addr = socket.gethostbyname(socket.gethostname())
|
||||||
|
except socket.gaierror:
|
||||||
|
pass
|
||||||
|
self.local_hostname = '[%s]' % addr
|
||||||
|
|
||||||
|
def set_debuglevel(self, debuglevel):
|
||||||
|
"""Set the debug output level.
|
||||||
|
|
||||||
|
A value of 0 means no debug logging. A value of 1 means all interaction
|
||||||
|
with the server is logged except that long lines are truncated to 100
|
||||||
|
characters and AUTH messages are censored. A value of 2 or higher means
|
||||||
|
the complete session is logged.
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.debuglevel = debuglevel
|
||||||
|
|
||||||
|
def _get_socket(self, port, host, timeout):
|
||||||
|
# This makes it simpler for SMTP_SSL to use the SMTP connect code
|
||||||
|
# and just alter the socket connection bit.
|
||||||
|
if self.debuglevel > 0: self.debug('connect:', (host, port))
|
||||||
|
return socket.create_connection((port, host), timeout)
|
||||||
|
|
||||||
|
def connect(self, host='localhost', port = 0):
|
||||||
|
"""Connect to a host on a given port.
|
||||||
|
|
||||||
|
If the hostname ends with a colon (`:') followed by a number, and
|
||||||
|
there is no port specified, that suffix will be stripped off and the
|
||||||
|
number interpreted as the port number to use.
|
||||||
|
|
||||||
|
Note: This method is automatically invoked by __init__, if a host is
|
||||||
|
specified during instantiation.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not port and (host.find(':') == host.rfind(':')):
|
||||||
|
i = host.rfind(':')
|
||||||
|
if i >= 0:
|
||||||
|
host, port = host[:i], host[i+1:]
|
||||||
|
try: port = int(port)
|
||||||
|
except ValueError:
|
||||||
|
raise socket.error, "nonnumeric port"
|
||||||
|
if not port: port = self.default_port
|
||||||
|
if self.debuglevel > 0: self.debug('connect:', (host, port))
|
||||||
|
self.sock = self._get_socket(host, port, self.timeout)
|
||||||
|
(code, msg) = self.getreply()
|
||||||
|
if self.debuglevel > 0: self.debug("connect:", msg)
|
||||||
|
return (code, msg)
|
||||||
|
|
||||||
|
def send(self, str):
|
||||||
|
"""Send `str' to the server."""
|
||||||
|
if self.debuglevel > 0:
|
||||||
|
raw = repr(str)
|
||||||
|
if self.debuglevel < 2:
|
||||||
|
if len(raw) > 100:
|
||||||
|
raw = raw[:100] + '...'
|
||||||
|
if 'AUTH' in raw:
|
||||||
|
raw = 'AUTH <censored>'
|
||||||
|
self.debug('send:', raw)
|
||||||
|
if hasattr(self, 'sock') and self.sock:
|
||||||
|
try:
|
||||||
|
self.sock.sendall(str)
|
||||||
|
except socket.error:
|
||||||
|
self.close()
|
||||||
|
raise SMTPServerDisconnected('Server not connected')
|
||||||
|
else:
|
||||||
|
raise SMTPServerDisconnected('please run connect() first')
|
||||||
|
|
||||||
|
def putcmd(self, cmd, args=""):
|
||||||
|
"""Send a command to the server."""
|
||||||
|
if args == "":
|
||||||
|
str = '%s%s' % (cmd, CRLF)
|
||||||
|
else:
|
||||||
|
str = '%s %s%s' % (cmd, args, CRLF)
|
||||||
|
self.send(str)
|
||||||
|
|
||||||
|
def getreply(self):
|
||||||
|
"""Get a reply from the server.
|
||||||
|
|
||||||
|
Returns a tuple consisting of:
|
||||||
|
|
||||||
|
- server response code (e.g. '250', or such, if all goes well)
|
||||||
|
Note: returns -1 if it can't read response code.
|
||||||
|
|
||||||
|
- server response string corresponding to response code (multiline
|
||||||
|
responses are converted to a single, multiline string).
|
||||||
|
|
||||||
|
Raises SMTPServerDisconnected if end-of-file is reached.
|
||||||
|
"""
|
||||||
|
resp=[]
|
||||||
|
if self.file is None:
|
||||||
|
self.file = self.sock.makefile('rb')
|
||||||
|
while 1:
|
||||||
|
try:
|
||||||
|
line = self.file.readline()
|
||||||
|
except socket.error:
|
||||||
|
line = ''
|
||||||
|
if line == '':
|
||||||
|
self.close()
|
||||||
|
raise SMTPServerDisconnected("Connection unexpectedly closed")
|
||||||
|
if self.debuglevel > 0: self.debug('reply:', repr(line))
|
||||||
|
resp.append(line[4:].strip())
|
||||||
|
code=line[:3]
|
||||||
|
# Check that the error code is syntactically correct.
|
||||||
|
# Don't attempt to read a continuation line if it is broken.
|
||||||
|
try:
|
||||||
|
errcode = int(code)
|
||||||
|
except ValueError:
|
||||||
|
errcode = -1
|
||||||
|
break
|
||||||
|
# Check if multiline response.
|
||||||
|
if line[3:4]!="-":
|
||||||
|
break
|
||||||
|
|
||||||
|
errmsg = "\n".join(resp)
|
||||||
|
if self.debuglevel > 0:
|
||||||
|
self.debug('reply: retcode (%s); Msg: %s' % (errcode,errmsg))
|
||||||
|
return errcode, errmsg
|
||||||
|
|
||||||
|
def docmd(self, cmd, args=""):
|
||||||
|
"""Send a command, and return its response code."""
|
||||||
|
self.putcmd(cmd,args)
|
||||||
|
return self.getreply()
|
||||||
|
|
||||||
|
# std smtp commands
|
||||||
|
def helo(self, name=''):
|
||||||
|
"""SMTP 'helo' command.
|
||||||
|
Hostname to send for this command defaults to the FQDN of the local
|
||||||
|
host.
|
||||||
|
"""
|
||||||
|
self.putcmd("helo", name or self.local_hostname)
|
||||||
|
(code,msg)=self.getreply()
|
||||||
|
self.helo_resp=msg
|
||||||
|
return (code,msg)
|
||||||
|
|
||||||
|
def ehlo(self, name=''):
|
||||||
|
""" SMTP 'ehlo' command.
|
||||||
|
Hostname to send for this command defaults to the FQDN of the local
|
||||||
|
host.
|
||||||
|
"""
|
||||||
|
self.esmtp_features = {}
|
||||||
|
self.putcmd(self.ehlo_msg, name or self.local_hostname)
|
||||||
|
(code,msg)=self.getreply()
|
||||||
|
# According to RFC1869 some (badly written)
|
||||||
|
# MTA's will disconnect on an ehlo. Toss an exception if
|
||||||
|
# that happens -ddm
|
||||||
|
if code == -1 and len(msg) == 0:
|
||||||
|
self.close()
|
||||||
|
raise SMTPServerDisconnected("Server not connected")
|
||||||
|
self.ehlo_resp=msg
|
||||||
|
if code != 250:
|
||||||
|
return (code,msg)
|
||||||
|
self.does_esmtp=1
|
||||||
|
#parse the ehlo response -ddm
|
||||||
|
resp=self.ehlo_resp.split('\n')
|
||||||
|
del resp[0]
|
||||||
|
for each in resp:
|
||||||
|
# To be able to communicate with as many SMTP servers as possible,
|
||||||
|
# we have to take the old-style auth advertisement into account,
|
||||||
|
# because:
|
||||||
|
# 1) Else our SMTP feature parser gets confused.
|
||||||
|
# 2) There are some servers that only advertise the auth methods we
|
||||||
|
# support using the old style.
|
||||||
|
auth_match = OLDSTYLE_AUTH.match(each)
|
||||||
|
if auth_match:
|
||||||
|
# This doesn't remove duplicates, but that's no problem
|
||||||
|
self.esmtp_features["auth"] = self.esmtp_features.get("auth", "") \
|
||||||
|
+ " " + auth_match.groups(0)[0]
|
||||||
|
continue
|
||||||
|
|
||||||
|
# RFC 1869 requires a space between ehlo keyword and parameters.
|
||||||
|
# It's actually stricter, in that only spaces are allowed between
|
||||||
|
# parameters, but were not going to check for that here. Note
|
||||||
|
# that the space isn't present if there are no parameters.
|
||||||
|
m=re.match(r'(?P<feature>[A-Za-z0-9][A-Za-z0-9\-]*) ?',each)
|
||||||
|
if m:
|
||||||
|
feature=m.group("feature").lower()
|
||||||
|
params=m.string[m.end("feature"):].strip()
|
||||||
|
if feature == "auth":
|
||||||
|
self.esmtp_features[feature] = self.esmtp_features.get(feature, "") \
|
||||||
|
+ " " + params
|
||||||
|
else:
|
||||||
|
self.esmtp_features[feature]=params
|
||||||
|
return (code,msg)
|
||||||
|
|
||||||
|
def has_extn(self, opt):
|
||||||
|
"""Does the server support a given SMTP service extension?"""
|
||||||
|
return opt.lower() in self.esmtp_features
|
||||||
|
|
||||||
|
def help(self, args=''):
|
||||||
|
"""SMTP 'help' command.
|
||||||
|
Returns help text from server."""
|
||||||
|
self.putcmd("help", args)
|
||||||
|
return self.getreply()[1]
|
||||||
|
|
||||||
|
def rset(self):
|
||||||
|
"""SMTP 'rset' command -- resets session."""
|
||||||
|
return self.docmd("rset")
|
||||||
|
|
||||||
|
def noop(self):
|
||||||
|
"""SMTP 'noop' command -- doesn't do anything :>"""
|
||||||
|
return self.docmd("noop")
|
||||||
|
|
||||||
|
def mail(self,sender,options=[]):
|
||||||
|
"""SMTP 'mail' command -- begins mail xfer session."""
|
||||||
|
optionlist = ''
|
||||||
|
if options and self.does_esmtp:
|
||||||
|
optionlist = ' ' + ' '.join(options)
|
||||||
|
self.putcmd("mail", "FROM:%s%s" % (quoteaddr(sender) ,optionlist))
|
||||||
|
return self.getreply()
|
||||||
|
|
||||||
|
def rcpt(self,recip,options=[]):
|
||||||
|
"""SMTP 'rcpt' command -- indicates 1 recipient for this mail."""
|
||||||
|
optionlist = ''
|
||||||
|
if options and self.does_esmtp:
|
||||||
|
optionlist = ' ' + ' '.join(options)
|
||||||
|
self.putcmd("rcpt","TO:%s%s" % (quoteaddr(recip),optionlist))
|
||||||
|
return self.getreply()
|
||||||
|
|
||||||
|
def data(self,msg):
|
||||||
|
"""SMTP 'DATA' command -- sends message data to server.
|
||||||
|
|
||||||
|
Automatically quotes lines beginning with a period per rfc821.
|
||||||
|
Raises SMTPDataError if there is an unexpected reply to the
|
||||||
|
DATA command; the return value from this method is the final
|
||||||
|
response code received when the all data is sent.
|
||||||
|
"""
|
||||||
|
self.putcmd("data")
|
||||||
|
(code,repl)=self.getreply()
|
||||||
|
if self.debuglevel >0 : self.debug("data:", (code,repl))
|
||||||
|
if code != 354:
|
||||||
|
raise SMTPDataError(code,repl)
|
||||||
|
else:
|
||||||
|
q = quotedata(msg)
|
||||||
|
if q[-2:] != CRLF:
|
||||||
|
q = q + CRLF
|
||||||
|
q = q + "." + CRLF
|
||||||
|
self.send(q)
|
||||||
|
(code,msg)=self.getreply()
|
||||||
|
if self.debuglevel > 0 :
|
||||||
|
self.debug("data:", (code,msg))
|
||||||
|
return (code,msg)
|
||||||
|
|
||||||
|
def verify(self, address):
|
||||||
|
"""SMTP 'verify' command -- checks for address validity."""
|
||||||
|
self.putcmd("vrfy", quoteaddr(address))
|
||||||
|
return self.getreply()
|
||||||
|
# a.k.a.
|
||||||
|
vrfy=verify
|
||||||
|
|
||||||
|
def expn(self, address):
|
||||||
|
"""SMTP 'expn' command -- expands a mailing list."""
|
||||||
|
self.putcmd("expn", quoteaddr(address))
|
||||||
|
return self.getreply()
|
||||||
|
|
||||||
|
# some useful methods
|
||||||
|
|
||||||
|
def ehlo_or_helo_if_needed(self):
|
||||||
|
"""Call self.ehlo() and/or self.helo() if needed.
|
||||||
|
|
||||||
|
If there has been no previous EHLO or HELO command this session, this
|
||||||
|
method tries ESMTP EHLO first.
|
||||||
|
|
||||||
|
This method may raise the following exceptions:
|
||||||
|
|
||||||
|
SMTPHeloError The server didn't reply properly to
|
||||||
|
the helo greeting.
|
||||||
|
"""
|
||||||
|
if self.helo_resp is None and self.ehlo_resp is None:
|
||||||
|
if not (200 <= self.ehlo()[0] <= 299):
|
||||||
|
(code, resp) = self.helo()
|
||||||
|
if not (200 <= code <= 299):
|
||||||
|
raise SMTPHeloError(code, resp)
|
||||||
|
|
||||||
|
def login(self, user, password):
|
||||||
|
"""Log in on an SMTP server that requires authentication.
|
||||||
|
|
||||||
|
The arguments are:
|
||||||
|
- user: The user name to authenticate with.
|
||||||
|
- password: The password for the authentication.
|
||||||
|
|
||||||
|
If there has been no previous EHLO or HELO command this session, this
|
||||||
|
method tries ESMTP EHLO first.
|
||||||
|
|
||||||
|
This method will return normally if the authentication was successful.
|
||||||
|
|
||||||
|
This method may raise the following exceptions:
|
||||||
|
|
||||||
|
SMTPHeloError The server didn't reply properly to
|
||||||
|
the helo greeting.
|
||||||
|
SMTPAuthenticationError The server didn't accept the username/
|
||||||
|
password combination.
|
||||||
|
SMTPException No suitable authentication method was
|
||||||
|
found.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def encode_cram_md5(challenge, user, password):
|
||||||
|
challenge = base64.decodestring(challenge)
|
||||||
|
response = user + " " + hmac.HMAC(password, challenge).hexdigest()
|
||||||
|
return encode_base64(response, eol="")
|
||||||
|
|
||||||
|
def encode_plain(user, password):
|
||||||
|
return encode_base64("\0%s\0%s" % (user, password), eol="")
|
||||||
|
|
||||||
|
|
||||||
|
AUTH_PLAIN = "PLAIN"
|
||||||
|
AUTH_CRAM_MD5 = "CRAM-MD5"
|
||||||
|
AUTH_LOGIN = "LOGIN"
|
||||||
|
|
||||||
|
self.ehlo_or_helo_if_needed()
|
||||||
|
|
||||||
|
if not self.has_extn("auth"):
|
||||||
|
raise SMTPException("SMTP AUTH extension not supported by server.")
|
||||||
|
|
||||||
|
# Authentication methods the server supports:
|
||||||
|
authlist = self.esmtp_features["auth"].split()
|
||||||
|
|
||||||
|
# List of authentication methods we support: from preferred to
|
||||||
|
# less preferred methods. Except for the purpose of testing the weaker
|
||||||
|
# ones, we prefer stronger methods like CRAM-MD5:
|
||||||
|
preferred_auths = [AUTH_CRAM_MD5, AUTH_PLAIN, AUTH_LOGIN]
|
||||||
|
|
||||||
|
# Determine the authentication method we'll use
|
||||||
|
authmethod = None
|
||||||
|
for method in preferred_auths:
|
||||||
|
if method in authlist:
|
||||||
|
authmethod = method
|
||||||
|
break
|
||||||
|
|
||||||
|
if authmethod == AUTH_CRAM_MD5:
|
||||||
|
(code, resp) = self.docmd("AUTH", AUTH_CRAM_MD5)
|
||||||
|
if code == 503:
|
||||||
|
# 503 == 'Error: already authenticated'
|
||||||
|
return (code, resp)
|
||||||
|
(code, resp) = self.docmd(encode_cram_md5(resp, user, password))
|
||||||
|
elif authmethod == AUTH_PLAIN:
|
||||||
|
(code, resp) = self.docmd("AUTH",
|
||||||
|
AUTH_PLAIN + " " + encode_plain(user, password))
|
||||||
|
elif authmethod == AUTH_LOGIN:
|
||||||
|
(code, resp) = self.docmd("AUTH",
|
||||||
|
"%s %s" % (AUTH_LOGIN, encode_base64(user, eol="")))
|
||||||
|
if code != 334:
|
||||||
|
raise SMTPAuthenticationError(code, resp)
|
||||||
|
(code, resp) = self.docmd(encode_base64(password, eol=""))
|
||||||
|
elif authmethod is None:
|
||||||
|
raise SMTPException("No suitable authentication method found.")
|
||||||
|
if code not in (235, 503):
|
||||||
|
# 235 == 'Authentication successful'
|
||||||
|
# 503 == 'Error: already authenticated'
|
||||||
|
raise SMTPAuthenticationError(code, resp)
|
||||||
|
return (code, resp)
|
||||||
|
|
||||||
|
def starttls(self, keyfile = None, certfile = None):
|
||||||
|
"""Puts the connection to the SMTP server into TLS mode.
|
||||||
|
|
||||||
|
If there has been no previous EHLO or HELO command this session, this
|
||||||
|
method tries ESMTP EHLO first.
|
||||||
|
|
||||||
|
If the server supports TLS, this will encrypt the rest of the SMTP
|
||||||
|
session. If you provide the keyfile and certfile parameters,
|
||||||
|
the identity of the SMTP server and client can be checked. This,
|
||||||
|
however, depends on whether the socket module really checks the
|
||||||
|
certificates.
|
||||||
|
|
||||||
|
This method may raise the following exceptions:
|
||||||
|
|
||||||
|
SMTPHeloError The server didn't reply properly to
|
||||||
|
the helo greeting.
|
||||||
|
"""
|
||||||
|
self.ehlo_or_helo_if_needed()
|
||||||
|
if not self.has_extn("starttls"):
|
||||||
|
raise SMTPException("STARTTLS extension not supported by server.")
|
||||||
|
(resp, reply) = self.docmd("STARTTLS")
|
||||||
|
if resp == 220:
|
||||||
|
if not _have_ssl:
|
||||||
|
raise RuntimeError("No SSL support included in this Python")
|
||||||
|
self.sock = ssl.wrap_socket(self.sock, keyfile, certfile)
|
||||||
|
self.file = SSLFakeFile(self.sock)
|
||||||
|
# RFC 3207:
|
||||||
|
# The client MUST discard any knowledge obtained from
|
||||||
|
# the server, such as the list of SMTP service extensions,
|
||||||
|
# which was not obtained from the TLS negotiation itself.
|
||||||
|
self.helo_resp = None
|
||||||
|
self.ehlo_resp = None
|
||||||
|
self.esmtp_features = {}
|
||||||
|
self.does_esmtp = 0
|
||||||
|
return (resp, reply)
|
||||||
|
|
||||||
|
def sendmail(self, from_addr, to_addrs, msg, mail_options=[],
|
||||||
|
rcpt_options=[]):
|
||||||
|
"""This command performs an entire mail transaction.
|
||||||
|
|
||||||
|
The arguments are:
|
||||||
|
- from_addr : The address sending this mail.
|
||||||
|
- to_addrs : A list of addresses to send this mail to. A bare
|
||||||
|
string will be treated as a list with 1 address.
|
||||||
|
- msg : The message to send.
|
||||||
|
- mail_options : List of ESMTP options (such as 8bitmime) for the
|
||||||
|
mail command.
|
||||||
|
- rcpt_options : List of ESMTP options (such as DSN commands) for
|
||||||
|
all the rcpt commands.
|
||||||
|
|
||||||
|
If there has been no previous EHLO or HELO command this session, this
|
||||||
|
method tries ESMTP EHLO first. If the server does ESMTP, message size
|
||||||
|
and each of the specified options will be passed to it. If EHLO
|
||||||
|
fails, HELO will be tried and ESMTP options suppressed.
|
||||||
|
|
||||||
|
This method will return normally if the mail is accepted for at least
|
||||||
|
one recipient. It returns a dictionary, with one entry for each
|
||||||
|
recipient that was refused. Each entry contains a tuple of the SMTP
|
||||||
|
error code and the accompanying error message sent by the server.
|
||||||
|
|
||||||
|
This method may raise the following exceptions:
|
||||||
|
|
||||||
|
SMTPHeloError The server didn't reply properly to
|
||||||
|
the helo greeting.
|
||||||
|
SMTPRecipientsRefused The server rejected ALL recipients
|
||||||
|
(no mail was sent).
|
||||||
|
SMTPSenderRefused The server didn't accept the from_addr.
|
||||||
|
SMTPDataError The server replied with an unexpected
|
||||||
|
error code (other than a refusal of
|
||||||
|
a recipient).
|
||||||
|
|
||||||
|
Note: the connection will be open even after an exception is raised.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
>>> import smtplib
|
||||||
|
>>> s=smtplib.SMTP("localhost")
|
||||||
|
>>> tolist=["one@one.org","two@two.org","three@three.org","four@four.org"]
|
||||||
|
>>> msg = '''\\
|
||||||
|
... From: Me@my.org
|
||||||
|
... Subject: testin'...
|
||||||
|
...
|
||||||
|
... This is a test '''
|
||||||
|
>>> s.sendmail("me@my.org",tolist,msg)
|
||||||
|
{ "three@three.org" : ( 550 ,"User unknown" ) }
|
||||||
|
>>> s.quit()
|
||||||
|
|
||||||
|
In the above example, the message was accepted for delivery to three
|
||||||
|
of the four addresses, and one was rejected, with the error code
|
||||||
|
550. If all addresses are accepted, then the method will return an
|
||||||
|
empty dictionary.
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.ehlo_or_helo_if_needed()
|
||||||
|
esmtp_opts = []
|
||||||
|
if self.does_esmtp:
|
||||||
|
# Hmmm? what's this? -ddm
|
||||||
|
# self.esmtp_features['7bit']=""
|
||||||
|
if self.has_extn('size'):
|
||||||
|
esmtp_opts.append("size=%d" % len(msg))
|
||||||
|
for option in mail_options:
|
||||||
|
esmtp_opts.append(option)
|
||||||
|
|
||||||
|
(code,resp) = self.mail(from_addr, esmtp_opts)
|
||||||
|
if code != 250:
|
||||||
|
self.rset()
|
||||||
|
raise SMTPSenderRefused(code, resp, from_addr)
|
||||||
|
senderrs={}
|
||||||
|
if isinstance(to_addrs, basestring):
|
||||||
|
to_addrs = [to_addrs]
|
||||||
|
for each in to_addrs:
|
||||||
|
(code,resp)=self.rcpt(each, rcpt_options)
|
||||||
|
if (code != 250) and (code != 251):
|
||||||
|
senderrs[each]=(code,resp)
|
||||||
|
if len(senderrs)==len(to_addrs):
|
||||||
|
# the server refused all our recipients
|
||||||
|
self.rset()
|
||||||
|
raise SMTPRecipientsRefused(senderrs)
|
||||||
|
(code,resp) = self.data(msg)
|
||||||
|
if code != 250:
|
||||||
|
self.rset()
|
||||||
|
raise SMTPDataError(code, resp)
|
||||||
|
#if we got here then somebody got our mail
|
||||||
|
return senderrs
|
||||||
|
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""Close the connection to the SMTP server."""
|
||||||
|
if self.file:
|
||||||
|
self.file.close()
|
||||||
|
self.file = None
|
||||||
|
if self.sock:
|
||||||
|
self.sock.close()
|
||||||
|
self.sock = None
|
||||||
|
|
||||||
|
|
||||||
|
def quit(self):
|
||||||
|
"""Terminate the SMTP session."""
|
||||||
|
res = self.docmd("quit")
|
||||||
|
self.close()
|
||||||
|
return res
|
||||||
|
|
||||||
|
if _have_ssl:
|
||||||
|
|
||||||
|
class SMTP_SSL(SMTP):
|
||||||
|
""" This is a subclass derived from SMTP that connects over an SSL encrypted
|
||||||
|
socket (to use this class you need a socket module that was compiled with SSL
|
||||||
|
support). If host is not specified, '' (the local host) is used. If port is
|
||||||
|
omitted, the standard SMTP-over-SSL port (465) is used. keyfile and certfile
|
||||||
|
are also optional - they can contain a PEM formatted private key and
|
||||||
|
certificate chain file for the SSL connection.
|
||||||
|
"""
|
||||||
|
def __init__(self, host='', port=0, local_hostname=None,
|
||||||
|
keyfile=None, certfile=None,
|
||||||
|
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
|
||||||
|
debug_to=partial(print, file=sys.stderr)):
|
||||||
|
self.keyfile = keyfile
|
||||||
|
self.certfile = certfile
|
||||||
|
SMTP.__init__(self, host, port, local_hostname, timeout,
|
||||||
|
debug_to=debug_to)
|
||||||
|
self.default_port = SMTP_SSL_PORT
|
||||||
|
|
||||||
|
def _get_socket(self, host, port, timeout):
|
||||||
|
if self.debuglevel > 0: self.debug('connect:', (host, port))
|
||||||
|
new_socket = socket.create_connection((host, port), timeout)
|
||||||
|
new_socket = ssl.wrap_socket(new_socket, self.keyfile, self.certfile)
|
||||||
|
self.file = SSLFakeFile(new_socket)
|
||||||
|
return new_socket
|
||||||
|
|
||||||
|
__all__.append("SMTP_SSL")
|
||||||
|
|
||||||
|
#
|
||||||
|
# LMTP extension
|
||||||
|
#
|
||||||
|
LMTP_PORT = 2003
|
||||||
|
|
||||||
|
class LMTP(SMTP):
|
||||||
|
"""LMTP - Local Mail Transfer Protocol
|
||||||
|
|
||||||
|
The LMTP protocol, which is very similar to ESMTP, is heavily based
|
||||||
|
on the standard SMTP client. It's common to use Unix sockets for LMTP,
|
||||||
|
so our connect() method must support that as well as a regular
|
||||||
|
host:port server. To specify a Unix socket, you must use an absolute
|
||||||
|
path as the host, starting with a '/'.
|
||||||
|
|
||||||
|
Authentication is supported, using the regular SMTP mechanism. When
|
||||||
|
using a Unix socket, LMTP generally don't support or require any
|
||||||
|
authentication, but your mileage might vary."""
|
||||||
|
|
||||||
|
ehlo_msg = "lhlo"
|
||||||
|
|
||||||
|
def __init__(self, host = '', port = LMTP_PORT, local_hostname = None):
|
||||||
|
"""Initialize a new instance."""
|
||||||
|
SMTP.__init__(self, host, port, local_hostname)
|
||||||
|
|
||||||
|
def connect(self, host = 'localhost', port = 0):
|
||||||
|
"""Connect to the LMTP daemon, on either a Unix or a TCP socket."""
|
||||||
|
if host[0] != '/':
|
||||||
|
return SMTP.connect(self, host, port)
|
||||||
|
|
||||||
|
# Handle Unix-domain sockets.
|
||||||
|
try:
|
||||||
|
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
||||||
|
self.sock.connect(host)
|
||||||
|
except socket.error, msg:
|
||||||
|
if self.debuglevel > 0: self.debug('connect fail:', host)
|
||||||
|
if self.sock:
|
||||||
|
self.sock.close()
|
||||||
|
self.sock = None
|
||||||
|
raise socket.error, msg
|
||||||
|
(code, msg) = self.getreply()
|
||||||
|
if self.debuglevel > 0: self.debug("connect:", msg)
|
||||||
|
return (code, msg)
|
||||||
|
|
||||||
|
|
||||||
|
|