mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
Merge branch 'patch-1' of https://github.com/aareet/calibre
This commit is contained in:
commit
6eb871d823
103
recipes/caravan_magazine_hindi.recipe
Normal file
103
recipes/caravan_magazine_hindi.recipe
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# vim:fileencoding=utf-8
|
||||||
|
# License: GPLv3 Copyright: 2015, Kovid Goyal <kovid at kovidgoyal.net>
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
from mechanize import Request
|
||||||
|
|
||||||
|
from calibre.web.feeds.recipes import BasicNewsRecipe
|
||||||
|
|
||||||
|
|
||||||
|
def classes(classes):
|
||||||
|
q = frozenset(classes.split(' '))
|
||||||
|
return dict(attrs={
|
||||||
|
'class': lambda x: x and frozenset(x.split()).intersection(q)})
|
||||||
|
|
||||||
|
|
||||||
|
class CaravanMagazineHindi(BasicNewsRecipe):
|
||||||
|
|
||||||
|
title = 'Caravan Magazine in Hindi'
|
||||||
|
__author__ = 'Kovid Goyal, Gobelinus, Aareet Mahadevan'
|
||||||
|
description = 'An Indian Journal of politics and culture'
|
||||||
|
language = 'hi'
|
||||||
|
timefmt = ' [%b, %Y]'
|
||||||
|
encoding = 'utf-8'
|
||||||
|
needs_subscription = 'optional'
|
||||||
|
|
||||||
|
no_stylesheets = True
|
||||||
|
|
||||||
|
keep_only_tags = [
|
||||||
|
classes('post-title short-desc author-details cover'),
|
||||||
|
dict(itemprop='articleBody'),
|
||||||
|
]
|
||||||
|
|
||||||
|
remove_tags = [
|
||||||
|
dict(name='meta'),
|
||||||
|
dict(attrs={'class': ['share-with']}),
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_browser(self, *args, **kw):
|
||||||
|
br = BasicNewsRecipe.get_browser(self, *args, **kw)
|
||||||
|
if not self.username or not self.password:
|
||||||
|
return br
|
||||||
|
data = json.dumps({'email': self.username, 'name': '', 'password': self.password})
|
||||||
|
if not isinstance(data, bytes):
|
||||||
|
data = data.encode('utf-8')
|
||||||
|
rq = Request(
|
||||||
|
url='https://caravanmagazine.in/api/users/login',
|
||||||
|
data=data,
|
||||||
|
headers={
|
||||||
|
'Accept': 'application/json, text/plain, */*',
|
||||||
|
'Origin': 'https://caravanmagazine.in',
|
||||||
|
'Referer': 'https://caravanmagazine.in/',
|
||||||
|
'Content-type': 'application/json;charset=UTF-8',
|
||||||
|
},
|
||||||
|
method='POST'
|
||||||
|
)
|
||||||
|
res = br.open(rq).read()
|
||||||
|
res = res.decode('utf-8')
|
||||||
|
self.log('Login request response: {}'.format(res))
|
||||||
|
res = json.loads(res)
|
||||||
|
if res['code'] != 200 or res['message'] != "Login success":
|
||||||
|
raise ValueError('Login failed, check your username and password')
|
||||||
|
return br
|
||||||
|
|
||||||
|
# To parse artice toc
|
||||||
|
def parse_index(self):
|
||||||
|
base_url = 'https://www.caravanmagazine.in/'
|
||||||
|
soup = self.index_to_soup('{0}magazine'.format(base_url))
|
||||||
|
|
||||||
|
# find current issue cover
|
||||||
|
feeds = []
|
||||||
|
sections = soup.find(attrs={'class': lambda x: x and 'current-magazine-issue' in x.split()}).find(
|
||||||
|
attrs={'class': lambda x: x and 'sections' in x.split()})
|
||||||
|
for section in sections.findAll(attrs={'class': lambda x: x and 'section' in x.split()}):
|
||||||
|
a = section.find('a')
|
||||||
|
section_title = self.tag_to_string(a)
|
||||||
|
self.log('\nSection:', section_title)
|
||||||
|
articles = []
|
||||||
|
for article in section.findAll('article'):
|
||||||
|
details = article.find(attrs={'class': lambda x: x and 'details' in x.split()})
|
||||||
|
pre = details.find(attrs={'class': lambda x: x and 'pre-heading' in x.split()})
|
||||||
|
if pre is not None:
|
||||||
|
pre.extract()
|
||||||
|
a = details.find('a')
|
||||||
|
url = base_url + a['href'].lstrip('/') + '-hindi'
|
||||||
|
title = self.tag_to_string(a)
|
||||||
|
desc = self.tag_to_string(details.find('div'))
|
||||||
|
self.log('\t', title, url)
|
||||||
|
articles.append({'title': title, 'description': desc, 'url': url})
|
||||||
|
if articles:
|
||||||
|
feeds.append((section_title, articles))
|
||||||
|
|
||||||
|
return feeds
|
||||||
|
|
||||||
|
def preprocess_html(self, soup):
|
||||||
|
for div in soup.findAll(itemprop='image'):
|
||||||
|
for img in div.findAll('img'):
|
||||||
|
img['src'] = div['content']
|
||||||
|
return soup
|
||||||
|
|
||||||
|
|
||||||
|
calibre_most_common_ua = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.87 Safari/537.36'
|
Loading…
x
Reference in New Issue
Block a user