This commit is contained in:
Kovid Goyal 2024-11-21 09:52:33 +05:30
commit 1c55b093e5
No known key found for this signature in database
GPG Key ID: 06BC317B515ACE7C
2 changed files with 10 additions and 12 deletions

View File

@ -217,7 +217,7 @@ class Economist(BasicNewsRecipe):
'archive': { 'archive': {
'short': 'Past Edition fails?', 'short': 'Past Edition fails?',
'long': 'enter yes, this will fetch content from wayback machine.', 'long': 'enter yes, this will fetch content from wayback machine.',
'default': 'No', 'default': 'no',
}, },
} }
@ -303,12 +303,11 @@ class Economist(BasicNewsRecipe):
try: try:
if edition_date and isinstance(edition_date, str): if edition_date and isinstance(edition_date, str):
if not content_id: if not content_id:
raise ValueError(edition_date, ' not found.') self.log(edition_date, ' not found, trying web edition.')
self.from_archive = True
return self.parse_web_index()
raw = self.index_to_soup(url, raw=True) raw = self.index_to_soup(url, raw=True)
except ValueError: except Exception:
self.from_archive = True
return self.parse_web_index()
else:
raise ValueError('Server is not reachable, try again after some time.') raise ValueError('Server is not reachable, try again after some time.')
ans = self.economist_parse_index(raw) ans = self.economist_parse_index(raw)
return self.economist_return_index(ans) return self.economist_return_index(ans)

View File

@ -217,7 +217,7 @@ class Economist(BasicNewsRecipe):
'archive': { 'archive': {
'short': 'Past Edition fails?', 'short': 'Past Edition fails?',
'long': 'enter yes, this will fetch content from wayback machine.', 'long': 'enter yes, this will fetch content from wayback machine.',
'default': 'No', 'default': 'no',
}, },
} }
@ -303,12 +303,11 @@ class Economist(BasicNewsRecipe):
try: try:
if edition_date and isinstance(edition_date, str): if edition_date and isinstance(edition_date, str):
if not content_id: if not content_id:
raise ValueError(edition_date, ' not found.') self.log(edition_date, ' not found, trying web edition.')
self.from_archive = True
return self.parse_web_index()
raw = self.index_to_soup(url, raw=True) raw = self.index_to_soup(url, raw=True)
except ValueError: except Exception:
self.from_archive = True
return self.parse_web_index()
else:
raise ValueError('Server is not reachable, try again after some time.') raise ValueError('Server is not reachable, try again after some time.')
ans = self.economist_parse_index(raw) ans = self.economist_parse_index(raw)
return self.economist_return_index(ans) return self.economist_return_index(ans)