Making code more PEP8 friendly

This commit is contained in:
Alayn Gortazar 2012-04-23 01:17:10 +02:00
parent 6185fa1552
commit 211ff892b2

View File

@ -1,7 +1,7 @@
'''
readitlaterlist.com
'''
__license__ = 'GPL v3'
__license__ = 'GPL v3'
__copyright__ = '''
2010, Darko Miletic <darko.miletic at gmail.com>
2011, Przemyslaw Kryger <pkryger at gmail.com>
@ -19,6 +19,7 @@ import json
import urllib
import urllib2
class Readitlater(BasicNewsRecipe):
title = 'Read It Later'
__author__ = 'Darko Miletic, Przemyslaw Kryger, Keith Callenberg, tBunnyMan, Alayn Gortazar'
@ -65,6 +66,7 @@ class Readitlater(BasicNewsRecipe):
return auth_params
def parse_index(self):
# WARNING: Pre-alpha API, I just figured out this calls params. Surprisingly worked! :)
index = self.API_INDEX + 'v3/get?' + self.get_auth_params()
index += '&state=queue'
index += '&count=' + str(self.max_articles_per_feed)
@ -88,12 +90,12 @@ class Readitlater(BasicNewsRecipe):
dataurl = self.API_TEXT_INDEX + 'v2/text?' + self.get_auth_params()
dataurl += '&url=' + item[1]['url']
self.articles.append({
'title':item[1]['resolved_title'],
'date':item[1]['time_added'],
'url':dataurl,
'description':item[1]['item_id'],
'sort_id':int(item[1]['sort_id']),
'real_url':item[1]['given_url']
'title': item[1]['resolved_title'],
'date': item[1]['time_added'],
'url': dataurl,
'description': item[1]['item_id'],
'sort_id': int(item[1]['sort_id']),
'real_url': item[1]['given_url']
})
else:
raise Exception("Not enough articles in RIL! Change minimum_articles or add more.")
@ -121,7 +123,7 @@ class Readitlater(BasicNewsRecipe):
for key, url in self.images.iteritems():
imgtag = Tag(soup, 'img')
imgtag['src'] = url
div = soup.find('div', attrs={'id':'RIL_IMG_' + key})
div = soup.find('div', attrs={'id': 'RIL_IMG_' + key})
div.insert(0, imgtag)
return soup
@ -135,23 +137,23 @@ class Readitlater(BasicNewsRecipe):
urls = []
for article in self.articles:
urls.append(article['real_url'])
items = ['"%d": {"url": "%s"}' % (n,u) for n,u in enumerate(urls)]
items = ['"%d": {"url": "%s"}' % (n, u) for n, u in enumerate(urls)]
s = '{\n %s\n}' % (',\n '.join(items),)
return s
def markAsRead(self, markList):
url = self.API_INDEX + 'v2/send'
values = {
'username' : self.username,
'password' : self.password,
'apikey' : self.KEY,
'read' : markList
'username': self.username,
'password': self.password,
'apikey': self.KEY,
'read': markList
}
data = urllib.urlencode(values)
try:
print 'Calling ReadItLater API...'
request = urllib2.Request(url,data)
request = urllib2.Request(url, data)
response = urllib2.urlopen(request)
the_page = response.read()
print 'response =', response.code