mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-07 10:14:46 -04:00
Merge branch 'master' of https://github.com/unkn0w7n/calibre
This commit is contained in:
commit
1fb12b929e
@ -1,40 +1,42 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
'''
|
"""
|
||||||
nautil.us
|
nautil.us
|
||||||
'''
|
"""
|
||||||
|
|
||||||
from calibre.web.feeds.news import BasicNewsRecipe, classes
|
from calibre.web.feeds.news import BasicNewsRecipe, classes
|
||||||
|
|
||||||
|
|
||||||
class Nautilus(BasicNewsRecipe):
|
class Nautilus(BasicNewsRecipe):
|
||||||
title = u'Nautilus Magazine'
|
title = 'Nautilus Magazine'
|
||||||
language = 'en_US'
|
language = 'en_US'
|
||||||
__author__ = 'unkn0wn'
|
__author__ = 'unkn0wn'
|
||||||
oldest_article = 45 # days
|
oldest_article = 30 # days
|
||||||
max_articles_per_feed = 50
|
max_articles_per_feed = 50
|
||||||
description = (
|
description = (
|
||||||
'Nautilus is a different kind of science magazine. Our stories take you into the depths'
|
'Nautilus is a different kind of science magazine. Our stories take you into the depths'
|
||||||
' of science and spotlight its ripples in our lives and cultures. We believe any subject in science,'
|
' of science and spotlight its ripples in our lives and cultures. We believe any subject in science,'
|
||||||
' no matter how complex, can be explained with clarity and vitality.')
|
' no matter how complex, can be explained with clarity and vitality.'
|
||||||
|
)
|
||||||
no_stylesheets = True
|
no_stylesheets = True
|
||||||
use_embedded_content = False
|
use_embedded_content = False
|
||||||
masthead_url = 'https://upload.wikimedia.org/wikipedia/commons/thumb/1/1b/Nautilus.svg/640px-Nautilus.svg.png'
|
masthead_url = 'https://upload.wikimedia.org/wikipedia/commons/thumb/1/1b/Nautilus.svg/640px-Nautilus.svg.png'
|
||||||
remove_attributes = ['height', 'width']
|
remove_attributes = ['height', 'width']
|
||||||
ignore_duplicate_articles = {'title', 'url'}
|
ignore_duplicate_articles = {'title', 'url'}
|
||||||
remove_empty_feeds = True
|
remove_empty_feeds = True
|
||||||
extra_css = '''
|
extra_css = """
|
||||||
.article-list_item-byline{font-size:small;}
|
.article-list_item-byline{font-size:small;}
|
||||||
blockquote{color:#404040; text-align:center;}
|
blockquote{color:#404040; text-align:center;}
|
||||||
#fig-c{font-size:small;}
|
#fig-c{font-size:small;}
|
||||||
em{color:#202020;}
|
em{color:#202020;}
|
||||||
.breadcrumb{color:gray; font-size:small;}
|
.breadcrumb{color:gray; font-size:small;}
|
||||||
.article-author{font-size:small;}
|
.article-author{font-size:small;}
|
||||||
'''
|
"""
|
||||||
|
|
||||||
recipe_specific_options = {
|
recipe_specific_options = {
|
||||||
'days': {
|
'days': {
|
||||||
'short': 'Oldest article to download from this news source. In days ',
|
'short': 'Oldest article to download from this news source. In days ',
|
||||||
'long': 'For example, 0.5, gives you articles from the past 12 hours',
|
'long': 'For example, 0.5, gives you articles from the past 12 hours',
|
||||||
'default': str(oldest_article)
|
'default': str(oldest_article),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -48,8 +50,9 @@ class Nautilus(BasicNewsRecipe):
|
|||||||
|
|
||||||
remove_tags = [
|
remove_tags = [
|
||||||
classes(
|
classes(
|
||||||
'article-action-list article-bottom-newsletter_box main-post-comments-toggle-wrap main-post-comments-wrapper'
|
'article-action-list article-bottom-newsletter_box article-ad article-ad__cta '
|
||||||
' social-share supported-one article-collection_box browsi-ad'
|
'main-post-comments-toggle-wrap main-post-comments-wrapper primis-ad '
|
||||||
|
'social-share supported-one article-collection_box browsi-ad'
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -80,13 +83,25 @@ class Nautilus(BasicNewsRecipe):
|
|||||||
|
|
||||||
def get_cover_url(self):
|
def get_cover_url(self):
|
||||||
soup = self.index_to_soup('https://nautil.us/shop/category/issues/')
|
soup = self.index_to_soup('https://nautil.us/shop/category/issues/')
|
||||||
a = soup.find('a', attrs={'href':lambda x: x and x.startswith('https://nautil.us/shop/issues/issue-')})
|
a = soup.find(
|
||||||
|
'a',
|
||||||
|
attrs={
|
||||||
|
'href': lambda x: x
|
||||||
|
and x.startswith('https://nautil.us/shop/issues/issue-')
|
||||||
|
},
|
||||||
|
)
|
||||||
if a:
|
if a:
|
||||||
listing_url = a['href']
|
listing_url = a['href']
|
||||||
listing_soup = self.index_to_soup(listing_url)
|
listing_soup = self.index_to_soup(listing_url)
|
||||||
listing = listing_soup.find('div', {'class': 'product'})
|
listing = listing_soup.find('div', {'class': 'product'})
|
||||||
if listing:
|
if listing:
|
||||||
imgs = listing.find_all('img', attrs={'src':lambda x: x and x.startswith('https://assets.nautil.us/sites/3/nautilus/')})
|
imgs = listing.find_all(
|
||||||
|
'img',
|
||||||
|
attrs={
|
||||||
|
'src': lambda x: x
|
||||||
|
and x.startswith('https://assets.nautil.us/sites/3/nautilus/')
|
||||||
|
},
|
||||||
|
)
|
||||||
if len(imgs) > 1:
|
if len(imgs) > 1:
|
||||||
self.cover_url = imgs[1]['src'].split('?')[0]
|
self.cover_url = imgs[1]['src'].split('?')[0]
|
||||||
return getattr(self, 'cover_url', self.cover_url)
|
return getattr(self, 'cover_url', self.cover_url)
|
||||||
@ -95,12 +110,21 @@ class Nautilus(BasicNewsRecipe):
|
|||||||
for img in soup.findAll('img', attrs={'data-src': True}):
|
for img in soup.findAll('img', attrs={'data-src': True}):
|
||||||
img['src'] = img['data-src'].split('?')[0]
|
img['src'] = img['data-src'].split('?')[0]
|
||||||
for figcaption in soup.findAll('figcaption'):
|
for figcaption in soup.findAll('figcaption'):
|
||||||
figcaption['id']='fig-c'
|
figcaption['id'] = 'fig-c'
|
||||||
for ul in soup.findAll('ul', attrs={'class':
|
for ul in soup.findAll(
|
||||||
['breadcrumb', 'article-list_item-byline', 'channel-article-author', 'article-author']}):
|
'ul',
|
||||||
|
attrs={
|
||||||
|
'class': [
|
||||||
|
'breadcrumb',
|
||||||
|
'article-list_item-byline',
|
||||||
|
'channel-article-author',
|
||||||
|
'article-author',
|
||||||
|
]
|
||||||
|
},
|
||||||
|
):
|
||||||
ul.name = 'span'
|
ul.name = 'span'
|
||||||
for li in ul.findAll('li'):
|
for li in ul.findAll('li'):
|
||||||
li.name = 'p'
|
li.name = 'p'
|
||||||
for img in soup.findAll('img', attrs={'srcset':True}):
|
for img in soup.findAll('img', attrs={'srcset': True}):
|
||||||
img['src'] = img['srcset'].split(',')[-1].split()[0]
|
img['src'] = img['srcset'].split(',')[-1].split()[0]
|
||||||
return soup
|
return soup
|
||||||
|
Loading…
x
Reference in New Issue
Block a user