Fix google reader recipes

This commit is contained in:
Kovid Goyal 2010-07-14 08:57:56 -06:00
parent c56a4a5aaf
commit 9a4b661ac6
2 changed files with 70 additions and 75 deletions

View File

@ -1,37 +1,35 @@
import urllib, re, mechanize import urllib, re, mechanize
from calibre.web.feeds.recipes import BasicNewsRecipe from calibre.web.feeds.recipes import BasicNewsRecipe
from calibre import __appname__ from calibre import __appname__
class GoogleReader(BasicNewsRecipe): class GoogleReader(BasicNewsRecipe):
title = 'Google Reader' title = 'Google Reader'
description = 'This recipe downloads feeds you have tagged from your Google Reader account.' description = 'This recipe fetches from your Google Reader account unread Starred items and unread Feeds you have placed in a folder via the manage subscriptions feature.'
needs_subscription = True needs_subscription = True
__author__ = 'davec' __author__ = 'davec, rollercoaster, Starson17'
base_url = 'http://www.google.com/reader/atom/' base_url = 'http://www.google.com/reader/atom/'
max_articles_per_feed = 50 oldest_article = 365
get_options = '?n=%d&xt=user/-/state/com.google/read' % max_articles_per_feed max_articles_per_feed = 250
use_embedded_content = True get_options = '?n=%d&xt=user/-/state/com.google/read' % max_articles_per_feed
use_embedded_content = True
def get_browser(self):
br = BasicNewsRecipe.get_browser() def get_browser(self):
br = BasicNewsRecipe.get_browser(self)
if self.username is not None and self.password is not None: if self.username is not None and self.password is not None:
request = urllib.urlencode([('Email', self.username), ('Passwd', self.password), request = urllib.urlencode([('Email', self.username), ('Passwd', self.password),
('service', 'reader'), ('source', __appname__)]) ('service', 'reader'), ('accountType', 'HOSTED_OR_GOOGLE'), ('source', __appname__)])
response = br.open('https://www.google.com/accounts/ClientLogin', request) response = br.open('https://www.google.com/accounts/ClientLogin', request)
sid = re.search('SID=(\S*)', response.read()).group(1) auth = re.search('Auth=(\S*)', response.read()).group(1)
cookies = mechanize.CookieJar()
cookies = mechanize.CookieJar() br = mechanize.build_opener(mechanize.HTTPCookieProcessor(cookies))
br = mechanize.build_opener(mechanize.HTTPCookieProcessor(cookies)) br.addheaders = [('Authorization', 'GoogleLogin auth='+auth)]
cookies.set_cookie(mechanize.Cookie(None, 'SID', sid, None, False, '.google.com', True, True, '/', True, False, None, True, '', '', None)) return br
return br
def get_feeds(self):
feeds = []
def get_feeds(self): soup = self.index_to_soup('http://www.google.com/reader/api/0/tag/list')
feeds = [] for id in soup.findAll(True, attrs={'name':['id']}):
soup = self.index_to_soup('http://www.google.com/reader/api/0/tag/list') url = id.contents[0]
for id in soup.findAll(True, attrs={'name':['id']}): feeds.append((re.search('/([^/]*)$', url).group(1),
url = id.contents[0] self.base_url + urllib.quote(url.encode('utf-8')) + self.get_options))
feeds.append((re.search('/([^/]*)$', url).group(1), return feeds
self.base_url + urllib.quote(url.encode('utf-8')) + self.get_options))
return feeds

View File

@ -1,38 +1,35 @@
import urllib, re, mechanize import urllib, re, mechanize
from calibre.web.feeds.recipes import BasicNewsRecipe from calibre.web.feeds.recipes import BasicNewsRecipe
from calibre import __appname__ from calibre import __appname__
class GoogleReaderUber(BasicNewsRecipe): class GoogleReaderUber(BasicNewsRecipe):
title = 'Google Reader Uber' title = 'Google Reader uber'
description = 'This recipe downloads all unread feedsfrom your Google Reader account.' description = 'Fetches all feeds from your Google Reader account including the uncategorized items.'
needs_subscription = True needs_subscription = True
__author__ = 'rollercoaster, davec' __author__ = 'davec, rollercoaster, Starson17'
base_url = 'http://www.google.com/reader/atom/' base_url = 'http://www.google.com/reader/atom/'
oldest_article = 365 oldest_article = 365
max_articles_per_feed = 250 max_articles_per_feed = 250
get_options = '?n=%d&xt=user/-/state/com.google/read' % max_articles_per_feed get_options = '?n=%d&xt=user/-/state/com.google/read' % max_articles_per_feed
use_embedded_content = True use_embedded_content = True
def get_browser(self): def get_browser(self):
br = BasicNewsRecipe.get_browser() br = BasicNewsRecipe.get_browser(self)
if self.username is not None and self.password is not None:
if self.username is not None and self.password is not None: request = urllib.urlencode([('Email', self.username), ('Passwd', self.password),
request = urllib.urlencode([('Email', self.username), ('Passwd', self.password), ('service', 'reader'), ('accountType', 'HOSTED_OR_GOOGLE'), ('source', __appname__)])
('service', 'reader'), ('source', __appname__)]) response = br.open('https://www.google.com/accounts/ClientLogin', request)
response = br.open('https://www.google.com/accounts/ClientLogin', request) auth = re.search('Auth=(\S*)', response.read()).group(1)
sid = re.search('SID=(\S*)', response.read()).group(1) cookies = mechanize.CookieJar()
br = mechanize.build_opener(mechanize.HTTPCookieProcessor(cookies))
cookies = mechanize.CookieJar() br.addheaders = [('Authorization', 'GoogleLogin auth='+auth)]
br = mechanize.build_opener(mechanize.HTTPCookieProcessor(cookies)) return br
cookies.set_cookie(mechanize.Cookie(None, 'SID', sid, None, False, '.google.com', True, True, '/', True, False, None, True, '', '', None))
return br def get_feeds(self):
feeds = []
soup = self.index_to_soup('http://www.google.com/reader/api/0/tag/list')
def get_feeds(self): for id in soup.findAll(True, attrs={'name':['id']}):
feeds = [] url = id.contents[0].replace('broadcast','reading-list')
soup = self.index_to_soup('http://www.google.com/reader/api/0/tag/list') feeds.append((re.search('/([^/]*)$', url).group(1),
for id in soup.findAll(True, attrs={'name':['id']}): self.base_url + urllib.quote(url.encode('utf-8')) + self.get_options))
url = id.contents[0].replace('broadcast','reading-list') return feeds
feeds.append((re.search('/([^/]*)$', url).group(1),
self.base_url + urllib.quote(url.encode('utf-8')) + self.get_options))
return feeds