Update wash_post.recipe

This commit is contained in:
unkn0w7n 2023-10-09 22:32:20 +05:30
parent b22c7fddf8
commit 6d1846abe7

View File

@ -5,17 +5,13 @@ www.washingtonpost.com
''' '''
from calibre.web.feeds.news import BasicNewsRecipe from calibre.web.feeds.news import BasicNewsRecipe
from html5_parser import parse
import json
def classes(classes):
q = frozenset(classes.split(' '))
return dict(attrs={
'class': lambda x: x and frozenset(x.split()).intersection(q)})
class TheWashingtonPost(BasicNewsRecipe): class TheWashingtonPost(BasicNewsRecipe):
title = 'The Washington Post' title = 'The Washington Post'
__author__ = 'Darko Miletic' __author__ = 'Darko Miletic, unkn0wn'
description = 'Leading source for news, video and opinion on politics, business, world and national news, science, travel, entertainment and more. Our local coverage includes reporting on education, crime, weather, traffic, real estate, jobs and cars for DC, Maryland and Virginia. Offering award-winning opinion writing, entertainment information and restaurant reviews.' # noqa description = 'Leading source for news, video and opinion on politics, business, world and national news, science, travel, entertainment and more. Our local coverage includes reporting on education, crime, weather, traffic, real estate, jobs and cars for DC, Maryland and Virginia. Offering award-winning opinion writing, entertainment information and restaurant reviews.' # noqa
publisher = 'The Washington Post Company' publisher = 'The Washington Post Company'
category = 'news, politics, USA' category = 'news, politics, USA'
@ -30,16 +26,11 @@ class TheWashingtonPost(BasicNewsRecipe):
publication_type = 'newspaper' publication_type = 'newspaper'
remove_attributes = ['style', 'width', 'height'] remove_attributes = ['style', 'width', 'height']
keep_only_tags = [ extra_css = '''
dict(name=['h1', 'figure']), .img { text-align:center; font-size:small; }
dict(attrs={'data-qa': 'lede-art'}), .auth { font-weight:bold; font-size:small; }
classes('byline article-body'), .time { font-size:small; color: #202020; }
] '''
remove_tags = [
dict(name=['meta', 'link', 'svg']),
classes('inline-video author-tooltip author-image powa-wrapper'),
dict(attrs={'data-qa': ['article-body-ad', 'subscribe-promo', 'interstitial-link-wrapper']}),
]
# Official feeds: https://www.washingtonpost.com/discussions/2018/10/12/washington-post-rss-feeds/ # Official feeds: https://www.washingtonpost.com/discussions/2018/10/12/washington-post-rss-feeds/
feeds = [ feeds = [
@ -61,9 +52,36 @@ class TheWashingtonPost(BasicNewsRecipe):
(u'Commanders', u'http://feeds.washingtonpost.com/rss/sports/redskins'), (u'Commanders', u'http://feeds.washingtonpost.com/rss/sports/redskins'),
] ]
def preprocess_html(self, soup, *a): def preprocess_raw_html(self, raw, *a):
for img in soup.findAll('img', src=True): root = parse(raw)
src = img['src'] m = root.xpath('//script[@id="__NEXT_DATA__"]')
if src.endswith('&w=32'):
img['src'] = src[:-2] + '440' data = json.loads(m[0].text)
data = data['props']['pageProps']['globalContent']
title = '<h1>' + data['headlines']['basic'] + '</h1>'
subhead = '<h3>' + data['description'].get('basic', '') + '</h3>'
author = ''
if 'credits' in data:
author = '<div><span class="auth">' + 'By ' + ', '.join(x['name'] for x in data['credits']['by']) \
+ '</span> | <span class="time">' + data['publish_date'][:-14] + '</span></div>'
body = ''
for x in data['content_elements']:
if x['type'] == 'text':
body += '<p>' + x['content'] + '</p>'
elif x['type'] == 'video':
if 'promo_image' in x:
body += '<p><div class="img"><img src="{}"><div>{}</div></div></p>'.format(
x['promo_image']['url'], x['description'].get('basic', '')
)
elif x['type'] == 'image':
body += '<p><div class="img"><img src="{}"><div>{}</div></div></p>'.format(x['url'], x['credits_caption_display'])
return '<html><body><div>' + title + subhead + author + body + '</div></body></html>'
def preprocess_html(self, soup):
for img in soup.findAll('img', attrs={'src':True}):
img['src'] = 'https://www.washingtonpost.com/wp-apps/imrs.php?src=' + img['src'] + '&w=540'
return soup return soup