mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-08 02:34:06 -04:00
Update science_news.recipe
Pull articles of latest magazine issue instead of RSS feed.
This commit is contained in:
parent
c9cb5fc703
commit
fb3c9e0d1f
@ -1,20 +1,18 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
__license__ = 'GPL v3'
|
__license__ = 'GPL v3'
|
||||||
|
|
||||||
'''
|
'''
|
||||||
sciencenews.org
|
sciencenews.org
|
||||||
'''
|
'''
|
||||||
|
|
||||||
from calibre.web.feeds.news import BasicNewsRecipe
|
from calibre.web.feeds.news import BasicNewsRecipe
|
||||||
import datetime
|
import datetime
|
||||||
|
import re
|
||||||
|
|
||||||
class ScienceNewsIssue(BasicNewsRecipe):
|
class ScienceNewsIssue(BasicNewsRecipe):
|
||||||
title = u'Science News'
|
title = u'Science News'
|
||||||
__author__ = u'Darko Miletic, Sujata Raman and Starson17'
|
description = u"Science News is an award-winning bi-weekly newsmagazine covering the most important research in all fields of science. This recipe downloads all the articles from the latest issue."
|
||||||
description = u'''Science News is an award-winning bi-weekly
|
|
||||||
newsmagazine covering the most important research in all fields of science.
|
|
||||||
This recipe downloads the last 2 weeks of articles.'''
|
|
||||||
category = u'Science, Technology, News'
|
category = u'Science, Technology, News'
|
||||||
publisher = u'Society for Science & the Public'
|
publisher = u'Society for Science & the Public'
|
||||||
oldest_article = 14
|
oldest_article = 14
|
||||||
@ -43,15 +41,55 @@ class ScienceNewsIssue(BasicNewsRecipe):
|
|||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
feeds = [(u"Articles", u'https://www.sciencenews.org/feed')]
|
def parse_index(self):
|
||||||
|
|
||||||
def get_cover_url(self):
|
# Get URL of latest mag page
|
||||||
d = datetime.date(2022, 3, 26)
|
ld = self._get_mag_date()
|
||||||
|
url = f"https://www.sciencenews.org/sn-magazine/{ld:%B}-{ld.day}-{ld.year}"
|
||||||
|
url = url.lower()
|
||||||
|
|
||||||
|
# Get articles
|
||||||
|
soup = self.index_to_soup(url)
|
||||||
|
re_article = re.compile("https://www.sciencenews.org/article/")
|
||||||
|
stories = []
|
||||||
|
past_urls = set()
|
||||||
|
for sec in soup.find_all(href=re_article):
|
||||||
|
|
||||||
|
article_url = sec["href"]
|
||||||
|
article_title = sec.text.strip()
|
||||||
|
|
||||||
|
# Ignore image URLs which do not have text title
|
||||||
|
if article_title == "":
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Ignore if link is a duplicate
|
||||||
|
if article_url in past_urls:
|
||||||
|
continue
|
||||||
|
|
||||||
|
past_urls.add(article_url)
|
||||||
|
article_info = {
|
||||||
|
"url": article_url,
|
||||||
|
"title": article_title,
|
||||||
|
}
|
||||||
|
stories.append(article_info)
|
||||||
|
|
||||||
|
index = [("Articles", stories),]
|
||||||
|
return index
|
||||||
|
|
||||||
|
def _get_mag_date(self):
|
||||||
|
"""Return date of latest magazine issue.
|
||||||
|
It is published every 2 weeks."""
|
||||||
|
|
||||||
|
d = datetime.date(2022, 6, 18)
|
||||||
t = datetime.date.today()
|
t = datetime.date.today()
|
||||||
ld = None
|
ld = None
|
||||||
while d <= t:
|
while d <= t:
|
||||||
ld = d
|
ld = d
|
||||||
d += datetime.timedelta(days=14)
|
d += datetime.timedelta(days=14)
|
||||||
|
return ld
|
||||||
|
|
||||||
|
def get_cover_url(self):
|
||||||
|
ld = self._get_mag_date()
|
||||||
url = ld.strftime(
|
url = ld.strftime(
|
||||||
"https://www.sciencenews.org/wp-content/uploads/%Y/%m/%m%d%y_cover.jpg"
|
"https://www.sciencenews.org/wp-content/uploads/%Y/%m/%m%d%y_cover.jpg"
|
||||||
)
|
)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user