mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
Remove Berlin Policy Journal
This commit is contained in:
parent
2f4f786688
commit
fea8c7988b
@ -1,110 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
# vim:fileencoding=utf-8
|
|
||||||
# License: GPLv3 Copyright: 2016, Aimylios <aimylios at gmx.de>
|
|
||||||
|
|
||||||
from __future__ import unicode_literals, division, absolute_import, print_function
|
|
||||||
|
|
||||||
'''
|
|
||||||
berlinpolicyjournal.com
|
|
||||||
'''
|
|
||||||
|
|
||||||
import re
|
|
||||||
import time
|
|
||||||
from calibre.web.feeds.news import BasicNewsRecipe
|
|
||||||
|
|
||||||
|
|
||||||
class BerlinPolicyJournal(BasicNewsRecipe):
|
|
||||||
title = 'Berlin Policy Journal'
|
|
||||||
__author__ = 'Aimylios'
|
|
||||||
description = 'Articles from berlinpolicyjournal.com'
|
|
||||||
publisher = 'Deutsche Gesellschaft für Auswärtige Politik e.V.'
|
|
||||||
publication_type = 'magazine'
|
|
||||||
language = 'en_DE'
|
|
||||||
|
|
||||||
oldest_article = 50
|
|
||||||
max_articles_per_feed = 30
|
|
||||||
simultaneous_downloads = 5
|
|
||||||
no_stylesheets = True
|
|
||||||
remove_javascript = True
|
|
||||||
|
|
||||||
conversion_options = {'smarten_punctuation': True,
|
|
||||||
'publisher': publisher}
|
|
||||||
|
|
||||||
INDEX = 'http://berlinpolicyjournal.com/'
|
|
||||||
masthead_url = INDEX + 'IP/wp-content/uploads/2015/04/logo_bpj_header.gif'
|
|
||||||
|
|
||||||
keep_only_tags = [
|
|
||||||
dict(name='article')
|
|
||||||
]
|
|
||||||
|
|
||||||
remove_tags = [
|
|
||||||
dict(name='div', attrs={
|
|
||||||
'class': ['hidden', 'meta-count', 'meta-share']}),
|
|
||||||
dict(name='span', attrs={'class': 'ava-auth'}),
|
|
||||||
dict(name='img', attrs={'alt': re.compile('_store_120px_width$')}),
|
|
||||||
dict(name='img', attrs={'alt': re.compile('^bpj_app_')}),
|
|
||||||
dict(name='img', attrs={'alt': re.compile('^BPJ-Montage_')}),
|
|
||||||
dict(name=['link', 'footer', 'br'])
|
|
||||||
]
|
|
||||||
|
|
||||||
remove_attributes = ['sizes', 'width', 'height', 'align']
|
|
||||||
|
|
||||||
extra_css = 'h1 {font-size: 1.6em; text-align: left} \
|
|
||||||
.entry-subtitle {font-style: italic; margin-bottom: 1em} \
|
|
||||||
.wp-caption {margin-top: 1em} \
|
|
||||||
.wp-caption-text {font-size: 0.6em; margin-top: 0em}'
|
|
||||||
|
|
||||||
def parse_index(self):
|
|
||||||
soup = self.index_to_soup(self.INDEX)
|
|
||||||
img_div = soup.find('div', {'id': 'text-2'})
|
|
||||||
self.cover_url = img_div.find('img', src=True)['src']
|
|
||||||
menu = soup.find('ul', {'id': re.compile('menu-ip')})
|
|
||||||
submenus = menu.findAll(
|
|
||||||
'li', {'class': re.compile('item-has-children')})
|
|
||||||
mag = submenus[0].find('li')
|
|
||||||
mag_name = self.tag_to_string(mag.a)
|
|
||||||
mag_url = mag.a['href']
|
|
||||||
categories = [{'name': mag_name, 'url': mag_url, 'type': 'magazine'}]
|
|
||||||
for blog in submenus[1].findAll('li'):
|
|
||||||
blog_name = self.tag_to_string(blog.a)
|
|
||||||
blog_url = blog.a['href']
|
|
||||||
categories.append(
|
|
||||||
{'name': blog_name, 'url': blog_url, 'type': 'blog'})
|
|
||||||
feeds = []
|
|
||||||
for cat in categories:
|
|
||||||
cat['articles'] = []
|
|
||||||
for i in ['1', '2']:
|
|
||||||
soup = self.index_to_soup(cat['url'] + '/page/' + i)
|
|
||||||
for div in soup.findAll('div', {'class': 'post-box-big'}):
|
|
||||||
timestamp = time.strptime(div.find('time')['datetime'][
|
|
||||||
:15], '%Y-%m-%dT%H:%M')
|
|
||||||
age = (time.time() - time.mktime(timestamp)) / (24 * 3600)
|
|
||||||
if age > self.oldest_article and cat['type'] == 'blog':
|
|
||||||
continue
|
|
||||||
article_title = self.tag_to_string(
|
|
||||||
div.find('h3', {'class': 'entry-title'}).a)
|
|
||||||
article_url = div.find(
|
|
||||||
'h3', {'class': 'entry-title'}).a['href']
|
|
||||||
article_date = type(u'')(time.strftime(
|
|
||||||
' [%a, %d %b %H:%M]', timestamp))
|
|
||||||
article_desc = self.tag_to_string(
|
|
||||||
div.find('div', {'class': 'i-summary'}).p)
|
|
||||||
cat['articles'].append({'title': article_title,
|
|
||||||
'url': article_url,
|
|
||||||
'date': article_date,
|
|
||||||
'description': article_desc})
|
|
||||||
if soup.find('div', {'class': 'pagination'}) is None:
|
|
||||||
break
|
|
||||||
if cat['articles']:
|
|
||||||
feeds.append((cat['name'], cat['articles']))
|
|
||||||
return feeds
|
|
||||||
|
|
||||||
def postprocess_html(self, soup, first_fetch):
|
|
||||||
# clean up formatting of author(s) and date
|
|
||||||
div = soup.find('div', {'class': 'meta-info'})
|
|
||||||
authors = ''
|
|
||||||
for entry in div.findAll('span', {'class': 'entry-author'}):
|
|
||||||
authors = authors + entry.a.span.renderContents().decode('utf-8').strip() + ', '
|
|
||||||
date = div.find('time').renderContents().decode('utf-8').strip()
|
|
||||||
div.replaceWith('<div>' + date + ' | ' + authors[:-2] + '<br/></div>')
|
|
||||||
return soup
|
|
Loading…
x
Reference in New Issue
Block a user