From 519791f780c0ebc0ba5163fbd820ff1c5d6c4fdb Mon Sep 17 00:00:00 2001
From: unkn0w7n <51942695+unkn0w7n@users.noreply.github.com>
Date: Sun, 19 Jan 2025 17:35:52 +0530
Subject: [PATCH] update economist
---
recipes/economist.recipe | 3 ++-
recipes/economist_free.recipe | 3 ++-
recipes/hindu.recipe | 6 +++---
3 files changed, 7 insertions(+), 5 deletions(-)
diff --git a/recipes/economist.recipe b/recipes/economist.recipe
index 6326830086..b562e7e501 100644
--- a/recipes/economist.recipe
+++ b/recipes/economist.recipe
@@ -123,7 +123,8 @@ def load_article_from_web_json(raw):
data = json.loads(raw)['props']['pageProps']['cp2Content']
body += f'
{data.get("flyTitle", "")}
'
body += f'{data["headline"]}
'
- body += f'{data.get("rubric", "")}
'
+ if data.get("rubric") and data.get("rubric") is not None:
+ body += f'{data.get("rubric", "")}
'
try:
date = data['dateModified']
except Exception:
diff --git a/recipes/economist_free.recipe b/recipes/economist_free.recipe
index 6326830086..b562e7e501 100644
--- a/recipes/economist_free.recipe
+++ b/recipes/economist_free.recipe
@@ -123,7 +123,8 @@ def load_article_from_web_json(raw):
data = json.loads(raw)['props']['pageProps']['cp2Content']
body += f'{data.get("flyTitle", "")}
'
body += f'{data["headline"]}
'
- body += f'{data.get("rubric", "")}
'
+ if data.get("rubric") and data.get("rubric") is not None:
+ body += f'{data.get("rubric", "")}
'
try:
date = data['dateModified']
except Exception:
diff --git a/recipes/hindu.recipe b/recipes/hindu.recipe
index beef09b995..c1719195f8 100644
--- a/recipes/hindu.recipe
+++ b/recipes/hindu.recipe
@@ -98,13 +98,13 @@ class TheHindu(BasicNewsRecipe):
raw = self.index_to_soup(url, raw=True)
soup = self.index_to_soup(raw)
ans = self.hindu_parse_index(soup)
- cover = soup.find(attrs={'class':'hindu-ad'})
- if cover:
- self.cover_url = cover.img['src']
if not ans:
raise ValueError(
'The Hindu Newspaper is not published Today.'
)
+ cover = soup.find(attrs={'class':'hindu-ad'})
+ if cover:
+ self.cover_url = cover.img['src']
if mag_url:
self.log('\nFetching Magazine')
soup = self.index_to_soup(mag_url)