Update science_news.recipe

Pull articles of latest magazine issue instead of RSS feed.
This commit is contained in:
yodha8 2022-06-20 00:29:06 -07:00 committed by GitHub
parent c9cb5fc703
commit fb3c9e0d1f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -1,20 +1,18 @@
#!/usr/bin/env python
__license__ = 'GPL v3'
'''
sciencenews.org
'''
from calibre.web.feeds.news import BasicNewsRecipe
import datetime
import re
class ScienceNewsIssue(BasicNewsRecipe):
title = u'Science News'
__author__ = u'Darko Miletic, Sujata Raman and Starson17'
description = u'''Science News is an award-winning bi-weekly
newsmagazine covering the most important research in all fields of science.
This recipe downloads the last 2 weeks of articles.'''
description = u"Science News is an award-winning bi-weekly newsmagazine covering the most important research in all fields of science. This recipe downloads all the articles from the latest issue."
category = u'Science, Technology, News'
publisher = u'Society for Science & the Public'
oldest_article = 14
@ -43,15 +41,55 @@ class ScienceNewsIssue(BasicNewsRecipe):
)
]
feeds = [(u"Articles", u'https://www.sciencenews.org/feed')]
def parse_index(self):
def get_cover_url(self):
d = datetime.date(2022, 3, 26)
# Get URL of latest mag page
ld = self._get_mag_date()
url = f"https://www.sciencenews.org/sn-magazine/{ld:%B}-{ld.day}-{ld.year}"
url = url.lower()
# Get articles
soup = self.index_to_soup(url)
re_article = re.compile("https://www.sciencenews.org/article/")
stories = []
past_urls = set()
for sec in soup.find_all(href=re_article):
article_url = sec["href"]
article_title = sec.text.strip()
# Ignore image URLs which do not have text title
if article_title == "":
continue
# Ignore if link is a duplicate
if article_url in past_urls:
continue
past_urls.add(article_url)
article_info = {
"url": article_url,
"title": article_title,
}
stories.append(article_info)
index = [("Articles", stories),]
return index
def _get_mag_date(self):
"""Return date of latest magazine issue.
It is published every 2 weeks."""
d = datetime.date(2022, 6, 18)
t = datetime.date.today()
ld = None
while d <= t:
ld = d
d += datetime.timedelta(days=14)
return ld
def get_cover_url(self):
ld = self._get_mag_date()
url = ld.strftime(
"https://www.sciencenews.org/wp-content/uploads/%Y/%m/%m%d%y_cover.jpg"
)