Update Jacobin

This commit is contained in:
Kovid Goyal 2021-01-20 09:20:42 +05:30
parent 0d2fb54475
commit f64dfddd40
No known key found for this signature in database
GPG Key ID: 06BC317B515ACE7C

View File

@ -11,6 +11,12 @@ www.jacobinmag.com
from calibre.web.feeds.news import BasicNewsRecipe from calibre.web.feeds.news import BasicNewsRecipe
def classes(classes):
q = frozenset(classes.split(' '))
return dict(attrs={
'class': lambda x: x and frozenset(x.split()).intersection(q)})
class Jacobinmag(BasicNewsRecipe): class Jacobinmag(BasicNewsRecipe):
title = 'Jacobin' title = 'Jacobin'
__author__ = 'Darko Miletic' __author__ = 'Darko Miletic'
@ -29,12 +35,11 @@ class Jacobinmag(BasicNewsRecipe):
issue_url = None issue_url = None
PREFIX = 'https://www.jacobinmag.com' PREFIX = 'https://www.jacobinmag.com'
LOGIN = 'https://auth.jacobinmag.com/mini_profile?redirect=https%3A%2F%2Fwww.jacobinmag.com%2F' LOGIN = 'https://auth.jacobinmag.com/mini_profile?redirect=https%3A%2F%2Fwww.jacobinmag.com%2F'
masthead_url = 'https://www.jacobinmag.com/wp-content/themes/boukman/images/banner/type.svg'
extra_css = """ extra_css = """
body{font-family: Antwerp, 'Times New Roman', Times, serif} body{font-family: Antwerp, 'Times New Roman', Times, serif}
img{margin-top:1em; margin-bottom: 1em; display:block} img{margin-top:1em; margin-bottom: 1em; display:block}
.entry-dek,.entry-author{font-family: Hurme-No3, Futura, sans-serif} .entry-dek,.entry-author{font-family: Hurme-No3, Futura, sans-serif}
""" """
conversion_options = { conversion_options = {
'comment': description, 'comment': description,
@ -44,56 +49,50 @@ class Jacobinmag(BasicNewsRecipe):
} }
remove_tags = [ remove_tags = [
dict(name=['meta', 'link']), dict(id=['post-header-share', 'post-print']),
dict(name='div', attrs={'class': 'entry-bottom'}), dict(name='form'),
dict(name='div', attrs={'data-app': 'share_buttons'}),
] ]
keep_only_tags = [dict(attrs={'class': ['entry-header', 'entry-content']})] keep_only_tags = [
classes('po__article')
]
def parse_index(self): def parse_index(self):
ans = [] ans = []
articles = [] articles = []
lurl = self.get_issue() soup = self.index_to_soup('https://www.jacobinmag.com/store/issues')
if lurl: lurl = 'https://jacobinmag.com' + soup.find('a', text='View Issue')['href']
soup = self.index_to_soup(lurl) feedtitle = 'Articles'
self.log('Loading issue from', lurl)
soup = self.index_to_soup(lurl)
# Find cover url # Find cover url
myimg = soup.find('img', attrs={'id': 'front-cover'}) di = soup.find('figure', attrs={'class': lambda x: x and '__cover' in x})
if myimg: img = di.find('img')
self.cover_url = self.image_url_processor(None, myimg['src']) self.cover_url = img['src']
# End find cover url # End find cover url
# Configure series # Get series title
self.conversion_options.update({'series': 'Jacobin'}) title = soup.find('h1', attrs={'class': lambda x: x and '__heading' in x})
feedtitle = self.tag_to_string(title)
# Get series title # Scrape article links
feedtitle = 'Articles' for section in soup.findAll('div', attrs={'class': lambda x: x and '__content' in x}):
title = soup.find('div', attrs={'id': 'iss-title-name'}) for art in section.findAll('article'):
if title: h1 = art.find('h1')
feedtitle = self.tag_to_string(title) a = h1.find('a')
title = self.tag_to_string(a)
# Scrape article links url = 'https://jacobinmag.com' + a['href']
for section in soup.findAll('div', attrs={'class': 'section-articles'}): desc = ''
for art in section.findAll('article'): p = art.find('p')
urlbase = art.find('h3', attrs={'class': 'iss-hed'}) if p:
if urlbase and urlbase.a[ desc = self.tag_to_string(p)
'href' articles.append({'title': title, 'url': url, 'description': desc})
] != 'https://www.jacobinmag.com/subscribe/': self.log(title, 'at', url)
url = urlbase.a['href'] if desc:
title = self.tag_to_string(urlbase) self.log('\t', desc)
desc = '' if articles:
descbase = urlbase = art.find( ans.append((feedtitle, articles))
'p', attrs={'class': 'iss-dek'}
)
if descbase:
desc = self.tag_to_string(descbase)
articles.append({
'title': title,
'url': url,
'description': desc
})
ans.append((feedtitle, articles))
return ans return ans
def get_browser(self): def get_browser(self):
@ -111,11 +110,3 @@ class Jacobinmag(BasicNewsRecipe):
if div: if div:
br.open(div['data-redirect']) br.open(div['data-redirect'])
return br return br
def get_issue(self):
issue = None
soup = self.index_to_soup(self.PREFIX)
mag = soup.find('li', attrs={'class': 'magazine'})
if mag:
issue = mag.a['href']
return issue