mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
cleanup previous PR
This commit is contained in:
parent
e5524059f6
commit
258a9791fe
@ -268,13 +268,21 @@ class Economist(BasicNewsRecipe):
|
|||||||
dt = dt.strftime('%b %d, %Y')
|
dt = dt.strftime('%b %d, %Y')
|
||||||
self.timefmt = ' [' + dt + ']'
|
self.timefmt = ' [' + dt + ']'
|
||||||
# get local issue cover, title
|
# get local issue cover, title
|
||||||
region = json.loads(self.index_to_soup('https://geolocation-db.com/json', raw=True))['country_code']
|
try:
|
||||||
|
region = json.loads(self.index_to_soup('https://geolocation-db.com/json', raw=True))['country_code']
|
||||||
|
except Exception:
|
||||||
|
region = ''
|
||||||
for cov in data['image']['cover']:
|
for cov in data['image']['cover']:
|
||||||
if region in cov['regionsAllowed']:
|
if region in cov['regionsAllowed']:
|
||||||
self.description = cov['headline']
|
self.description = cov['headline']
|
||||||
self.cover_url = cov['url']['canonical'].replace('economist.com/',
|
self.cover_url = cov['url']['canonical'].replace('economist.com/',
|
||||||
'economist.com/cdn-cgi/image/width=960,quality=80,format=auto/')
|
'economist.com/cdn-cgi/image/width=960,quality=80,format=auto/')
|
||||||
self.log('Got cover:', self.cover_url, '\n', self.description)
|
break
|
||||||
|
else:
|
||||||
|
self.description = data['image']['cover'][0]['headline']
|
||||||
|
self.cover_url = data['image']['cover'][0]['url']['canonical'].replace('economist.com/',
|
||||||
|
'economist.com/cdn-cgi/image/width=960,quality=80,format=auto/')
|
||||||
|
self.log('Got cover:', self.cover_url, '\n', self.description)
|
||||||
|
|
||||||
feeds_dict = defaultdict(list)
|
feeds_dict = defaultdict(list)
|
||||||
for part in safe_dict(data, "hasPart", "parts"):
|
for part in safe_dict(data, "hasPart", "parts"):
|
||||||
@ -302,7 +310,7 @@ class Economist(BasicNewsRecipe):
|
|||||||
|
|
||||||
def preprocess_html(self, soup):
|
def preprocess_html(self, soup):
|
||||||
for img in soup.findAll('img', src=True):
|
for img in soup.findAll('img', src=True):
|
||||||
img['src'] = img['src'].replace('economist.com/',
|
img['src'] = img['src'].replace('economist.com/',
|
||||||
'economist.com/cdn-cgi/image/width=600,quality=80,format=auto/')
|
'economist.com/cdn-cgi/image/width=600,quality=80,format=auto/')
|
||||||
return soup
|
return soup
|
||||||
|
|
||||||
|
@ -268,13 +268,21 @@ class Economist(BasicNewsRecipe):
|
|||||||
dt = dt.strftime('%b %d, %Y')
|
dt = dt.strftime('%b %d, %Y')
|
||||||
self.timefmt = ' [' + dt + ']'
|
self.timefmt = ' [' + dt + ']'
|
||||||
# get local issue cover, title
|
# get local issue cover, title
|
||||||
region = json.loads(self.index_to_soup('https://geolocation-db.com/json', raw=True))['country_code']
|
try:
|
||||||
|
region = json.loads(self.index_to_soup('https://geolocation-db.com/json', raw=True))['country_code']
|
||||||
|
except Exception:
|
||||||
|
region = ''
|
||||||
for cov in data['image']['cover']:
|
for cov in data['image']['cover']:
|
||||||
if region in cov['regionsAllowed']:
|
if region in cov['regionsAllowed']:
|
||||||
self.description = cov['headline']
|
self.description = cov['headline']
|
||||||
self.cover_url = cov['url']['canonical'].replace('economist.com/',
|
self.cover_url = cov['url']['canonical'].replace('economist.com/',
|
||||||
'economist.com/cdn-cgi/image/width=960,quality=80,format=auto/')
|
'economist.com/cdn-cgi/image/width=960,quality=80,format=auto/')
|
||||||
self.log('Got cover:', self.cover_url, '\n', self.description)
|
break
|
||||||
|
else:
|
||||||
|
self.description = data['image']['cover'][0]['headline']
|
||||||
|
self.cover_url = data['image']['cover'][0]['url']['canonical'].replace('economist.com/',
|
||||||
|
'economist.com/cdn-cgi/image/width=960,quality=80,format=auto/')
|
||||||
|
self.log('Got cover:', self.cover_url, '\n', self.description)
|
||||||
|
|
||||||
feeds_dict = defaultdict(list)
|
feeds_dict = defaultdict(list)
|
||||||
for part in safe_dict(data, "hasPart", "parts"):
|
for part in safe_dict(data, "hasPart", "parts"):
|
||||||
@ -302,7 +310,7 @@ class Economist(BasicNewsRecipe):
|
|||||||
|
|
||||||
def preprocess_html(self, soup):
|
def preprocess_html(self, soup):
|
||||||
for img in soup.findAll('img', src=True):
|
for img in soup.findAll('img', src=True):
|
||||||
img['src'] = img['src'].replace('economist.com/',
|
img['src'] = img['src'].replace('economist.com/',
|
||||||
'economist.com/cdn-cgi/image/width=600,quality=80,format=auto/')
|
'economist.com/cdn-cgi/image/width=600,quality=80,format=auto/')
|
||||||
return soup
|
return soup
|
||||||
|
|
||||||
|
@ -10,7 +10,6 @@ from urllib.parse import quote, urlencode
|
|||||||
from calibre import replace_entities
|
from calibre import replace_entities
|
||||||
from calibre.ebooks.BeautifulSoup import NavigableString, Tag
|
from calibre.ebooks.BeautifulSoup import NavigableString, Tag
|
||||||
from calibre.ptempfile import PersistentTemporaryFile
|
from calibre.ptempfile import PersistentTemporaryFile
|
||||||
from calibre.utils.date import parse_only_date
|
|
||||||
from calibre.web.feeds.news import BasicNewsRecipe
|
from calibre.web.feeds.news import BasicNewsRecipe
|
||||||
from html5_parser import parse
|
from html5_parser import parse
|
||||||
from lxml import etree
|
from lxml import etree
|
||||||
@ -270,7 +269,7 @@ class Economist(BasicNewsRecipe):
|
|||||||
|
|
||||||
def preprocess_html(self, soup):
|
def preprocess_html(self, soup):
|
||||||
for img in soup.findAll('img', src=True):
|
for img in soup.findAll('img', src=True):
|
||||||
img['src'] = img['src'].replace('economist.com/',
|
img['src'] = img['src'].replace('economist.com/',
|
||||||
'economist.com/cdn-cgi/image/width=600,quality=80,format=auto/')
|
'economist.com/cdn-cgi/image/width=600,quality=80,format=auto/')
|
||||||
return soup
|
return soup
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from calibre.web.feeds.news import BasicNewsRecipe
|
from calibre.web.feeds.news import BasicNewsRecipe
|
||||||
|
|
||||||
|
|
||||||
class lexfridman(BasicNewsRecipe):
|
class lexfridman(BasicNewsRecipe):
|
||||||
title = 'Lex Fridman Podcast'
|
title = 'Lex Fridman Podcast'
|
||||||
description = (
|
description = (
|
||||||
|
Loading…
x
Reference in New Issue
Block a user