Update DW group of recipes

add recipe_specific_options
This commit is contained in:
unkn0w7n 2024-07-22 12:16:47 +05:30
parent 4c7255854d
commit a1a97acb07
8 changed files with 127 additions and 16 deletions

View File

@ -23,13 +23,27 @@ class DeutscheWelle_bs(BasicNewsRecipe):
keep_only_tags = [ keep_only_tags = [
dict(name='article') dict(name='article')
] ]
recipe_specific_options = {
'days': {
'short': 'Oldest article to download from this news source. In days ',
'long': 'For example, 0.5, gives you articles from the past 12 hours',
'default': str(oldest_article)
}
}
def __init__(self, *args, **kwargs):
BasicNewsRecipe.__init__(self, *args, **kwargs)
d = self.recipe_specific_options.get('days')
if d and isinstance(d, str):
self.oldest_article = float(d)
remove_tags = [ remove_tags = [
dict(name=['footer', 'source']), dict(name=['footer', 'source']),
dict(attrs={'data-tracking-name':'sharing-icons-inline'}), dict(attrs={'data-tracking-name':'sharing-icons-inline'}),
classes('kicker advertisement vjs-wrapper') classes('kicker advertisement vjs-wrapper')
] ]
feeds = [ feeds = [
(u'Politika', u'http://rss.dw-world.de/rdf/rss-bos-pol'), (u'Politika', u'http://rss.dw-world.de/rdf/rss-bos-pol'),
(u'Evropa', u'http://rss.dw-world.de/rdf/rss-bos-eu'), (u'Evropa', u'http://rss.dw-world.de/rdf/rss-bos-eu'),

View File

@ -21,6 +21,20 @@ class DeutscheWelle(BasicNewsRecipe):
dict(name='article') dict(name='article')
] ]
recipe_specific_options = {
'days': {
'short': 'Oldest article to download from this news source. In days ',
'long': 'For example, 0.5, gives you articles from the past 12 hours',
'default': str(oldest_article)
}
}
def __init__(self, *args, **kwargs):
BasicNewsRecipe.__init__(self, *args, **kwargs)
d = self.recipe_specific_options.get('days')
if d and isinstance(d, str):
self.oldest_article = float(d)
remove_tags = [ remove_tags = [
dict(name=['footer', 'source']), dict(name=['footer', 'source']),
dict(attrs={'data-tracking-name':'sharing-icons-inline'}), dict(attrs={'data-tracking-name':'sharing-icons-inline'}),

View File

@ -15,7 +15,21 @@ class DeutscheWelle_en(BasicNewsRecipe):
remove_empty_feeds = True remove_empty_feeds = True
ignore_duplicate_articles = {'title', 'url'} ignore_duplicate_articles = {'title', 'url'}
remove_attributes = ['height', 'width', 'style'] remove_attributes = ['height', 'width', 'style']
recipe_specific_options = {
'days': {
'short': 'Oldest article to download from this news source. In days ',
'long': 'For example, 0.5, gives you articles from the past 12 hours',
'default': str(oldest_article)
}
}
def __init__(self, *args, **kwargs):
BasicNewsRecipe.__init__(self, *args, **kwargs)
d = self.recipe_specific_options.get('days')
if d and isinstance(d, str):
self.oldest_article = float(d)
keep_only_tags = [ keep_only_tags = [
dict(name='article') dict(name='article')
] ]

View File

@ -17,10 +17,24 @@ class DeutscheWelle_es(BasicNewsRecipe):
remove_attributes = ['height', 'width', 'style'] remove_attributes = ['height', 'width', 'style']
recipe_specific_options = {
'days': {
'short': 'Oldest article to download from this news source. In days ',
'long': 'For example, 0.5, gives you articles from the past 12 hours',
'default': str(oldest_article)
}
}
def __init__(self, *args, **kwargs):
BasicNewsRecipe.__init__(self, *args, **kwargs)
d = self.recipe_specific_options.get('days')
if d and isinstance(d, str):
self.oldest_article = float(d)
keep_only_tags = [ keep_only_tags = [
dict(name='article') dict(name='article')
] ]
remove_tags = [ remove_tags = [
dict(name=['footer', 'source']), dict(name=['footer', 'source']),
dict(attrs={'data-tracking-name':'sharing-icons-inline'}), dict(attrs={'data-tracking-name':'sharing-icons-inline'}),
@ -40,7 +54,7 @@ class DeutscheWelle_es(BasicNewsRecipe):
('Conozca Alemania', 'http://rss.dw-world.de/rdf/rss-sp-con') ('Conozca Alemania', 'http://rss.dw-world.de/rdf/rss-sp-con')
] ]
def preprocess_html(self, soup): def preprocess_html(self, soup):
for img in soup.findAll('img', srcset=True): for img in soup.findAll('img', srcset=True):
img['src'] = img['srcset'].split()[6] img['src'] = img['srcset'].split()[6]

View File

@ -16,20 +16,34 @@ class DeutscheWelle_hr(BasicNewsRecipe):
remove_empty_feeds = True remove_empty_feeds = True
masthead_url = 'http://www.dw-world.de/skins/std/channel1/pics/dw_logo1024.gif' masthead_url = 'http://www.dw-world.de/skins/std/channel1/pics/dw_logo1024.gif'
remove_javascript = True remove_javascript = True
ignore_duplicate_articles = {'title', 'url'} ignore_duplicate_articles = {'title', 'url'}
remove_attributes = ['height', 'width', 'style'] remove_attributes = ['height', 'width', 'style']
recipe_specific_options = {
'days': {
'short': 'Oldest article to download from this news source. In days ',
'long': 'For example, 0.5, gives you articles from the past 12 hours',
'default': str(oldest_article)
}
}
def __init__(self, *args, **kwargs):
BasicNewsRecipe.__init__(self, *args, **kwargs)
d = self.recipe_specific_options.get('days')
if d and isinstance(d, str):
self.oldest_article = float(d)
keep_only_tags = [ keep_only_tags = [
dict(name='article') dict(name='article')
] ]
remove_tags = [ remove_tags = [
dict(name=['footer', 'source']), dict(name=['footer', 'source']),
dict(attrs={'data-tracking-name':'sharing-icons-inline'}), dict(attrs={'data-tracking-name':'sharing-icons-inline'}),
classes('kicker advertisement vjs-wrapper') classes('kicker advertisement vjs-wrapper')
] ]
feeds = [ feeds = [
(u'Svijet', u'http://rss.dw-world.de/rdf/rss-cro-svijet'), (u'Svijet', u'http://rss.dw-world.de/rdf/rss-cro-svijet'),
(u'Europa', u'http://rss.dw-world.de/rdf/rss-cro-eu'), (u'Europa', u'http://rss.dw-world.de/rdf/rss-cro-eu'),

View File

@ -15,12 +15,25 @@ class DeutscheWelle_pt(BasicNewsRecipe):
publication_type = 'newsportal' publication_type = 'newsportal'
remove_empty_feeds = True remove_empty_feeds = True
masthead_url = 'http://www.dw-world.de/skins/std/channel1/pics/dw_logo1024.gif' masthead_url = 'http://www.dw-world.de/skins/std/channel1/pics/dw_logo1024.gif'
remove_javascript = True remove_javascript = True
ignore_duplicate_articles = {'title', 'url'} ignore_duplicate_articles = {'title', 'url'}
remove_attributes = ['height', 'width', 'style'] remove_attributes = ['height', 'width', 'style']
recipe_specific_options = {
'days': {
'short': 'Oldest article to download from this news source. In days ',
'long': 'For example, 0.5, gives you articles from the past 12 hours',
'default': str(oldest_article)
}
}
def __init__(self, *args, **kwargs):
BasicNewsRecipe.__init__(self, *args, **kwargs)
d = self.recipe_specific_options.get('days')
if d and isinstance(d, str):
self.oldest_article = float(d)
def preprocess_html(self, soup): def preprocess_html(self, soup):
for img in soup.findAll('img', srcset=True): for img in soup.findAll('img', srcset=True):
img['src'] = img['srcset'].split()[6] img['src'] = img['srcset'].split()[6]
@ -29,7 +42,7 @@ class DeutscheWelle_pt(BasicNewsRecipe):
keep_only_tags = [ keep_only_tags = [
dict(name='article') dict(name='article')
] ]
remove_tags = [ remove_tags = [
dict(name=['footer', 'source']), dict(name=['footer', 'source']),
dict(attrs={'data-tracking-name':'sharing-icons-inline'}), dict(attrs={'data-tracking-name':'sharing-icons-inline'}),

View File

@ -16,7 +16,21 @@ class DeutscheWelle(BasicNewsRecipe):
remove_empty_feeds = True remove_empty_feeds = True
ignore_duplicate_articles = {'title', 'url'} ignore_duplicate_articles = {'title', 'url'}
remove_attributes = ['height', 'width', 'style'] remove_attributes = ['height', 'width', 'style']
recipe_specific_options = {
'days': {
'short': 'Oldest article to download from this news source. In days ',
'long': 'For example, 0.5, gives you articles from the past 12 hours',
'default': str(oldest_article)
}
}
def __init__(self, *args, **kwargs):
BasicNewsRecipe.__init__(self, *args, **kwargs)
d = self.recipe_specific_options.get('days')
if d and isinstance(d, str):
self.oldest_article = float(d)
def preprocess_html(self, soup): def preprocess_html(self, soup):
for img in soup.findAll('img', srcset=True): for img in soup.findAll('img', srcset=True):
img['src'] = img['srcset'].split()[6] img['src'] = img['srcset'].split()[6]

View File

@ -18,7 +18,21 @@ class DeutscheWelle_sr(BasicNewsRecipe):
remove_javascript = True remove_javascript = True
ignore_duplicate_articles = {'title', 'url'} ignore_duplicate_articles = {'title', 'url'}
remove_attributes = ['height', 'width', 'style'] remove_attributes = ['height', 'width', 'style']
recipe_specific_options = {
'days': {
'short': 'Oldest article to download from this news source. In days ',
'long': 'For example, 0.5, gives you articles from the past 12 hours',
'default': str(oldest_article)
}
}
def __init__(self, *args, **kwargs):
BasicNewsRecipe.__init__(self, *args, **kwargs)
d = self.recipe_specific_options.get('days')
if d and isinstance(d, str):
self.oldest_article = float(d)
def preprocess_html(self, soup): def preprocess_html(self, soup):
for img in soup.findAll('img', srcset=True): for img in soup.findAll('img', srcset=True):
img['src'] = img['srcset'].split()[6] img['src'] = img['srcset'].split()[6]
@ -27,13 +41,13 @@ class DeutscheWelle_sr(BasicNewsRecipe):
keep_only_tags = [ keep_only_tags = [
dict(name='article') dict(name='article')
] ]
remove_tags = [ remove_tags = [
dict(name=['footer', 'source']), dict(name=['footer', 'source']),
dict(attrs={'data-tracking-name':'sharing-icons-inline'}), dict(attrs={'data-tracking-name':'sharing-icons-inline'}),
classes('kicker advertisement vjs-wrapper') classes('kicker advertisement vjs-wrapper')
] ]
feeds = [ feeds = [
(u'Politika', u'http://rss.dw-world.de/rdf/rss-ser-pol'), (u'Politika', u'http://rss.dw-world.de/rdf/rss-ser-pol'),
(u'Srbija', u'http://rss.dw-world.de/rdf/rss-ser-pol-ser'), (u'Srbija', u'http://rss.dw-world.de/rdf/rss-ser-pol-ser'),