News download: Update RSS feedparser module to latest version

This commit is contained in:
Kovid Goyal 2011-03-22 22:03:13 -06:00
parent 5f9032fa25
commit 5be578a9fd
4 changed files with 381 additions and 177 deletions

View File

@ -75,7 +75,7 @@ if plugins is None:
(['usbobserver'] if isosx else []):
try:
p, err = importlib.import_module(plugin), ''
except Exception, err:
except Exception as err:
p = None
err = str(err)
plugins[plugin] = (p, err)

File diff suppressed because it is too large Load Diff

View File

@ -14,7 +14,7 @@ from contextlib import nested, closing
from calibre import browser, __appname__, iswindows, \
strftime, preferred_encoding
strftime, preferred_encoding, as_unicode
from calibre.ebooks.BeautifulSoup import BeautifulSoup, NavigableString, CData, Tag
from calibre.ebooks.metadata.opf2 import OPFCreator
from calibre import entity_to_unicode
@ -986,8 +986,8 @@ class BasicNewsRecipe(Recipe):
self.cover_path = None
try:
cu = self.get_cover_url()
except Exception, err:
self.log.error(_('Could not download cover: %s')%str(err))
except Exception as err:
self.log.error(_('Could not download cover: %s')%as_unicode(err))
self.log.debug(traceback.format_exc())
else:
if not cu:
@ -1318,11 +1318,11 @@ class BasicNewsRecipe(Recipe):
oldest_article=self.oldest_article,
max_articles_per_feed=self.max_articles_per_feed,
get_article_url=self.get_article_url))
except Exception, err:
except Exception as err:
feed = Feed()
msg = 'Failed feed: %s'%(title if title else url)
feed.populate_from_preparsed_feed(msg, [])
feed.description = repr(err)
feed.description = as_unicode(err)
parsed_feeds.append(feed)
self.log.exception(msg)
@ -1468,7 +1468,7 @@ class CalibrePeriodical(BasicNewsRecipe):
'http://news.calibre-ebook.com/subscribed_files/%s/0/temp.downloaded_recipe'
% self.calibre_periodicals_slug
).read()
except Exception, e:
except Exception as e:
if hasattr(e, 'getcode') and e.getcode() == 403:
raise DownloadDenied(
_('You do not have permission to download this issue.'

View File

@ -210,7 +210,7 @@ class RecursiveFetcher(object):
with closing(open_func(url, timeout=self.timeout)) as f:
data = response(f.read()+f.read())
data.newurl = f.geturl()
except urllib2.URLError, err:
except urllib2.URLError as err:
if hasattr(err, 'code') and responses.has_key(err.code):
raise FetchError, responses[err.code]
if getattr(err, 'reason', [0])[0] == 104 or \