Making code more PEP8 friendly

This commit is contained in:
Alayn Gortazar 2012-04-23 01:17:10 +02:00
parent 6185fa1552
commit 211ff892b2

View File

@ -1,7 +1,7 @@
''' '''
readitlaterlist.com readitlaterlist.com
''' '''
__license__ = 'GPL v3' __license__ = 'GPL v3'
__copyright__ = ''' __copyright__ = '''
2010, Darko Miletic <darko.miletic at gmail.com> 2010, Darko Miletic <darko.miletic at gmail.com>
2011, Przemyslaw Kryger <pkryger at gmail.com> 2011, Przemyslaw Kryger <pkryger at gmail.com>
@ -10,7 +10,7 @@ __copyright__ = '''
2012, Alayn Gortazar <zutoin at gmail dot com> 2012, Alayn Gortazar <zutoin at gmail dot com>
''' '''
from operator import itemgetter from operator import itemgetter
from contextlib import closing from contextlib import closing
from calibre.web.feeds.news import BasicNewsRecipe from calibre.web.feeds.news import BasicNewsRecipe
from calibre.ebooks.BeautifulSoup import Tag from calibre.ebooks.BeautifulSoup import Tag
@ -19,6 +19,7 @@ import json
import urllib import urllib
import urllib2 import urllib2
class Readitlater(BasicNewsRecipe): class Readitlater(BasicNewsRecipe):
title = 'Read It Later' title = 'Read It Later'
__author__ = 'Darko Miletic, Przemyslaw Kryger, Keith Callenberg, tBunnyMan, Alayn Gortazar' __author__ = 'Darko Miletic, Przemyslaw Kryger, Keith Callenberg, tBunnyMan, Alayn Gortazar'
@ -35,7 +36,7 @@ class Readitlater(BasicNewsRecipe):
needs_subscription = True needs_subscription = True
mark_as_read_after_dl = False mark_as_read_after_dl = False
enhanced_version = True enhanced_version = True
KEY = '8e0p5f19A74emL3a47goP87m69d4VF8b' KEY = '8e0p5f19A74emL3a47goP87m69d4VF8b'
API_TEXT_INDEX = 'https://text.readitlaterlist.com/' API_TEXT_INDEX = 'https://text.readitlaterlist.com/'
API_INDEX = 'https://readitlaterlist.com/' API_INDEX = 'https://readitlaterlist.com/'
@ -59,16 +60,17 @@ class Readitlater(BasicNewsRecipe):
def get_auth_params(self): def get_auth_params(self):
auth_params = 'apikey=' + self.KEY auth_params = 'apikey=' + self.KEY
if self.username is not None: if self.username is not None:
auth_params += '&username=' + self.username auth_params += '&username=' + self.username
if self.password is not None: if self.password is not None:
auth_params += '&password=' + self.password auth_params += '&password=' + self.password
return auth_params return auth_params
def parse_index(self): def parse_index(self):
# WARNING: Pre-alpha API, I just figured out this calls params. Surprisingly worked! :)
index = self.API_INDEX + 'v3/get?' + self.get_auth_params() index = self.API_INDEX + 'v3/get?' + self.get_auth_params()
index += '&state=queue' index += '&state=queue'
index += '&count=' + str(self.max_articles_per_feed) index += '&count=' + str(self.max_articles_per_feed)
index += '&sort=oldest' index += '&sort=oldest'
open_func = getattr(self.browser, 'open_novisit', self.browser.open) open_func = getattr(self.browser, 'open_novisit', self.browser.open)
with closing(open_func(index)) as f: with closing(open_func(index)) as f:
@ -77,10 +79,10 @@ class Readitlater(BasicNewsRecipe):
raise RuntimeError('Could not fetch index!') raise RuntimeError('Could not fetch index!')
json_obj = json.loads(results) json_obj = json.loads(results)
if len(json_obj['list']) >= self.minimum_articles: if len(json_obj['list']) >= self.minimum_articles:
for item in json_obj['list'].iteritems(): for item in json_obj['list'].iteritems():
# TODO: This URL should be modified by it's corresponding API call in a future. # TODO: This URL should be modified by it's corresponding API call in a future.
# Actually is not possible to get the Article View potential throught an API call (12/04/2012) # Actually is not possible to get the Article View potential throught an API call (12/04/2012)
if self.enhanced_version: if self.enhanced_version:
dataurl = self.INDEX + 'a/x/getArticle.php?itemId=' + item[1]['item_id'] dataurl = self.INDEX + 'a/x/getArticle.php?itemId=' + item[1]['item_id']
@ -88,16 +90,16 @@ class Readitlater(BasicNewsRecipe):
dataurl = self.API_TEXT_INDEX + 'v2/text?' + self.get_auth_params() dataurl = self.API_TEXT_INDEX + 'v2/text?' + self.get_auth_params()
dataurl += '&url=' + item[1]['url'] dataurl += '&url=' + item[1]['url']
self.articles.append({ self.articles.append({
'title':item[1]['resolved_title'], 'title': item[1]['resolved_title'],
'date':item[1]['time_added'], 'date': item[1]['time_added'],
'url':dataurl, 'url': dataurl,
'description':item[1]['item_id'], 'description': item[1]['item_id'],
'sort_id':int(item[1]['sort_id']), 'sort_id': int(item[1]['sort_id']),
'real_url':item[1]['given_url'] 'real_url': item[1]['given_url']
}) })
else: else:
raise Exception("Not enough articles in RIL! Change minimum_articles or add more.") raise Exception("Not enough articles in RIL! Change minimum_articles or add more.")
self.articles = sorted(self.articles, key=itemgetter('sort_id')) self.articles = sorted(self.articles, key=itemgetter('sort_id'))
return [('Unread', self.articles)] return [('Unread', self.articles)]
@ -108,7 +110,7 @@ class Readitlater(BasicNewsRecipe):
self.images = {} self.images = {}
for image in json_obj['article']['images']: for image in json_obj['article']['images']:
self.images[image] = json_obj['article']['images'][image]['src'] self.images[image] = json_obj['article']['images'][image]['src']
title = '<h1>{title}</h1>'.format(title=json_obj['article']['title']) title = '<h1>{title}</h1>'.format(title=json_obj['article']['title'])
link = '<p>Original: <a href="{url}">{url}</a></p>'.format(url=json_obj['article']['resolvedUrl']) link = '<p>Original: <a href="{url}">{url}</a></p>'.format(url=json_obj['article']['resolvedUrl'])
html = link + title + json_obj['article']['article'] html = link + title + json_obj['article']['article']
else: else:
@ -121,37 +123,37 @@ class Readitlater(BasicNewsRecipe):
for key, url in self.images.iteritems(): for key, url in self.images.iteritems():
imgtag = Tag(soup, 'img') imgtag = Tag(soup, 'img')
imgtag['src'] = url imgtag['src'] = url
div = soup.find('div', attrs={'id':'RIL_IMG_' + key}) div = soup.find('div', attrs={'id': 'RIL_IMG_' + key})
div.insert(0, imgtag) div.insert(0, imgtag)
return soup return soup
def cleanup(self): def cleanup(self):
# From a list of urls, create a human-readable JSON string # From a list of urls, create a human-readable JSON string
# suitable for passing to the ReadItLater SEND::READ method. # suitable for passing to the ReadItLater SEND::READ method.
if self.mark_as_read_after_dl: if self.mark_as_read_after_dl:
self.markAsRead(self.createMarkList(self.articles)) self.markAsRead(self.createMarkList(self.articles))
def createMarkList(self, articles): def createMarkList(self, articles):
urls = [] urls = []
for article in self.articles: for article in self.articles:
urls.append(article['real_url']) urls.append(article['real_url'])
items = ['"%d": {"url": "%s"}' % (n,u) for n,u in enumerate(urls)] items = ['"%d": {"url": "%s"}' % (n, u) for n, u in enumerate(urls)]
s = '{\n %s\n}' % (',\n '.join(items),) s = '{\n %s\n}' % (',\n '.join(items),)
return s return s
def markAsRead(self, markList): def markAsRead(self, markList):
url = self.API_INDEX + 'v2/send' url = self.API_INDEX + 'v2/send'
values = { values = {
'username' : self.username, 'username': self.username,
'password' : self.password, 'password': self.password,
'apikey' : self.KEY, 'apikey': self.KEY,
'read' : markList 'read': markList
} }
data = urllib.urlencode(values) data = urllib.urlencode(values)
try: try:
print 'Calling ReadItLater API...' print 'Calling ReadItLater API...'
request = urllib2.Request(url,data) request = urllib2.Request(url, data)
response = urllib2.urlopen(request) response = urllib2.urlopen(request)
the_page = response.read() the_page = response.read()
print 'response =', response.code print 'response =', response.code