This commit is contained in:
Kovid Goyal 2021-03-27 16:11:55 +05:30
commit bdfb061ac9
No known key found for this signature in database
GPG Key ID: 06BC317B515ACE7C
2 changed files with 8 additions and 28 deletions

View File

@ -264,20 +264,10 @@ class Economist(BasicNewsRecipe):
return ans return ans
def economist_parse_index(self, soup): def economist_parse_index(self, soup):
img = None script_tag = soup.select_one("script#__NEXT_DATA__")
if edition_date: if script_tag is not None:
archive_url = "https://www.economist.com/weeklyedition/archive?year={}".format(edition_date[:4]) data = json.loads(script_tag.string)
archive = self.index_to_soup(archive_url) self.cover_url = data['props']['pageProps']['content']['image']['main']['url']['canonical']
q = edition_date.replace('-', '')
q = '/print-covers/{}_'.format(q)
img = archive.find('img', srcset=lambda x: x and q in x)
else:
archive = self.index_to_soup("https://www.economist.com/weeklyedition/archive")
div = archive.find(attrs={'class': 'edition-teaser__image'})
if div is not None:
img = div.find('img', srcset=True)
if img:
self.cover_url = img['srcset'].split(',')[-1].split()[0]
self.log('Got cover:', self.cover_url) self.log('Got cover:', self.cover_url)
feeds = [] feeds = []
for section in soup.findAll(**classes('layout-weekly-edition-section')): for section in soup.findAll(**classes('layout-weekly-edition-section')):

View File

@ -264,20 +264,10 @@ class Economist(BasicNewsRecipe):
return ans return ans
def economist_parse_index(self, soup): def economist_parse_index(self, soup):
img = None script_tag = soup.select_one("script#__NEXT_DATA__")
if edition_date: if script_tag is not None:
archive_url = "https://www.economist.com/weeklyedition/archive?year={}".format(edition_date[:4]) data = json.loads(script_tag.string)
archive = self.index_to_soup(archive_url) self.cover_url = data['props']['pageProps']['content']['image']['main']['url']['canonical']
q = edition_date.replace('-', '')
q = '/print-covers/{}_'.format(q)
img = archive.find('img', srcset=lambda x: x and q in x)
else:
archive = self.index_to_soup("https://www.economist.com/weeklyedition/archive")
div = archive.find(attrs={'class': 'edition-teaser__image'})
if div is not None:
img = div.find('img', srcset=True)
if img:
self.cover_url = img['srcset'].split(',')[-1].split()[0]
self.log('Got cover:', self.cover_url) self.log('Got cover:', self.cover_url)
feeds = [] feeds = []
for section in soup.findAll(**classes('layout-weekly-edition-section')): for section in soup.findAll(**classes('layout-weekly-edition-section')):