mirror of
				https://github.com/kovidgoyal/calibre.git
				synced 2025-11-04 03:27:00 -05:00 
			
		
		
		
	TVXS by Hargikas
This commit is contained in:
		
							parent
							
								
									69373bb236
								
							
						
					
					
						commit
						0ee352b189
					
				
							
								
								
									
										58
									
								
								recipes/tvxs.recipe
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										58
									
								
								recipes/tvxs.recipe
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,58 @@
 | 
				
			|||||||
 | 
					# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					from calibre.web.feeds.recipes import BasicNewsRecipe
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class TVXS(BasicNewsRecipe):
 | 
				
			||||||
 | 
					    title                  = 'TVXS'
 | 
				
			||||||
 | 
					    __author__             = 'hargikas'
 | 
				
			||||||
 | 
					    description            = 'News from Greece'
 | 
				
			||||||
 | 
					    max_articles_per_feed  = 100
 | 
				
			||||||
 | 
					    oldest_article         = 100
 | 
				
			||||||
 | 
					    publisher              = 'TVXS'
 | 
				
			||||||
 | 
					    category               = 'news, GR'
 | 
				
			||||||
 | 
					    language               = 'el'
 | 
				
			||||||
 | 
					    encoding               = None
 | 
				
			||||||
 | 
					    #conversion_options     = { 'linearize_tables': True}
 | 
				
			||||||
 | 
					    no_stylesheets         = True
 | 
				
			||||||
 | 
					    remove_tags_before     = dict(name='h1',attrs={'class':'print-title'})
 | 
				
			||||||
 | 
					    remove_tags_after      = dict(name='div',attrs={'class':'field field-type-relevant-content field-field-relevant-articles'})
 | 
				
			||||||
 | 
					    remove_attributes      = ['width', 'src', 'header', 'footer']
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    feeds = [(u'Ελλάδα', 'http://tvxs.gr/feeds/2/feed.xml'),
 | 
				
			||||||
 | 
					        (u'Κόσμος', 'http://tvxs.gr/feeds/5/feed.xml'),
 | 
				
			||||||
 | 
					        (u'Τοπικά Νέα', 'http://tvxs.gr/feeds/5363/feed.xml'),
 | 
				
			||||||
 | 
					        (u'Sci Tech', 'http://tvxs.gr/feeds/26/feed.xml'),
 | 
				
			||||||
 | 
					        (u'Αθλητικά', 'http://tvxs.gr/feeds/243/feed.xml'),
 | 
				
			||||||
 | 
					        (u'Internet & ΜΜΕ', 'http://tvxs.gr/feeds/32/feed.xml'),
 | 
				
			||||||
 | 
					        (u'Καλά Νέα', 'http://tvxs.gr/feeds/914/feed.xml'),
 | 
				
			||||||
 | 
					        (u'Απόψεις', 'http://tvxs.gr/feeds/1109/feed.xml'),
 | 
				
			||||||
 | 
					        (u'Πολιτισμός', 'http://tvxs.gr/feeds/1317/feed.xml'),
 | 
				
			||||||
 | 
					        (u'Greenlife', 'http://tvxs.gr/feeds/3/feed.xml'),
 | 
				
			||||||
 | 
					        (u'Ιστορία', 'http://tvxs.gr/feeds/1573/feed.xml'),
 | 
				
			||||||
 | 
					        (u'Χιούμορ', 'http://tvxs.gr/feeds/692/feed.xml')]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def print_version(self, url):
 | 
				
			||||||
 | 
					        import urllib2, urlparse, StringIO, gzip
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        fp = urllib2.urlopen(url)
 | 
				
			||||||
 | 
					        data = fp.read()
 | 
				
			||||||
 | 
					        if fp.info()['content-encoding'] == 'gzip':
 | 
				
			||||||
 | 
					            gzip_data = StringIO.StringIO(data)
 | 
				
			||||||
 | 
					            gzipper = gzip.GzipFile(fileobj=gzip_data)
 | 
				
			||||||
 | 
					            data = gzipper.read()
 | 
				
			||||||
 | 
					        fp.close()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        pos_1 = data.find('<a href="/print/')
 | 
				
			||||||
 | 
					        if pos_1 == -1:
 | 
				
			||||||
 | 
					            return url
 | 
				
			||||||
 | 
					        pos_2 = data.find('">', pos_1)
 | 
				
			||||||
 | 
					        if pos_2 == -1:
 | 
				
			||||||
 | 
					            return url
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        pos_1 += len('<a href="')
 | 
				
			||||||
 | 
					        new_url = data[pos_1:pos_2]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        print_url = urlparse.urljoin(url, new_url)
 | 
				
			||||||
 | 
					        return print_url
 | 
				
			||||||
@ -112,6 +112,8 @@ _extra_lang_codes = {
 | 
				
			|||||||
        'zh_TW' : _('Traditional Chinese'),
 | 
					        'zh_TW' : _('Traditional Chinese'),
 | 
				
			||||||
        'en'    : _('English'),
 | 
					        'en'    : _('English'),
 | 
				
			||||||
        'en_AU' : _('English (Australia)'),
 | 
					        'en_AU' : _('English (Australia)'),
 | 
				
			||||||
 | 
					        'en_JP' : _('English (Japan)'),
 | 
				
			||||||
 | 
					        'en_DE' : _('English (Germany)'),
 | 
				
			||||||
        'en_BG' : _('English (Bulgaria)'),
 | 
					        'en_BG' : _('English (Bulgaria)'),
 | 
				
			||||||
        'en_NZ' : _('English (New Zealand)'),
 | 
					        'en_NZ' : _('English (New Zealand)'),
 | 
				
			||||||
        'en_CA' : _('English (Canada)'),
 | 
					        'en_CA' : _('English (Canada)'),
 | 
				
			||||||
 | 
				
			|||||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user