From 9c1d7e060980954707c70087f64a41ccabb38562 Mon Sep 17 00:00:00 2001 From: Kovid Goyal Date: Tue, 31 Aug 2010 10:02:30 -0600 Subject: [PATCH] Field and stream blog by Tony Stegall --- resources/images/news/fstream.png | Bin 0 -> 636 bytes resources/recipes/fstream.recipe | 64 ++++++++++++++++++++++++++++++ 2 files changed, 64 insertions(+) create mode 100644 resources/images/news/fstream.png create mode 100644 resources/recipes/fstream.recipe diff --git a/resources/images/news/fstream.png b/resources/images/news/fstream.png new file mode 100644 index 0000000000000000000000000000000000000000..a9fc6b4aae77e4f3c9d164b4ddd8795d8965568d GIT binary patch literal 636 zcmeAS@N?(olHy`uVBq!ia0vp^0wB!61|;P_|4#%`Y)RhkE)4%caKYZ?lYt_f1s;*b zK-vS0-A-oPfdtD69Mgd`SU*F|v9*U87#Np&x;TbdoL)Lvw?HaTq}6^#M~Pa~jz1a--+3$@2ApS4FiJY4N{Iy=*9liY*&%YFhvfo`$Ui>z z8wYj1A7gUd#CxPd!p0$CTD7BucbyK z;VjQhc>&M&iLs?8Uph^T&5U{A6T!4bBY^*}=lYvpd_pZ_FWBFkF8ovK#;SJvi+ZV- zy(I4JetWv`XxOyC;B1Z8g_>n0b-kAhWzTH-@qWFGiD3kf;|7iYO~Pu2pP02(Jh8gZ ztg!RQMSl~XUSRC0mbgZgq$HN4S|t~y0x1R~149E{0}EYa!w>^YD^n9IBU4=iGb;mw tNjg$fP&DM`r(~v8A~YC;7#RcASs9u^Gz5NbC;@6<@O1TaS?83{1OO_o{AvII literal 0 HcmV?d00001 diff --git a/resources/recipes/fstream.recipe b/resources/recipes/fstream.recipe new file mode 100644 index 0000000000..f6d56042d1 --- /dev/null +++ b/resources/recipes/fstream.recipe @@ -0,0 +1,64 @@ +from calibre.web.feeds.news import BasicNewsRecipe + +class FIELDSTREAM(BasicNewsRecipe): + title = 'Field and Stream' + __author__ = 'Starson17 and Tonythebookworm' + description = 'Hunting and Fishing and Gun Talk' + language = 'en' + no_stylesheets = True + publisher = 'Starson17 and Tonythebookworm' + category = 'food recipes, hunting, fishing, guns' + use_embedded_content= False + no_stylesheets = True + oldest_article = 24 + remove_javascript = True + remove_empty_feeds = True + masthead_url = 'http://www.fieldandstream.com/sites/all/themes/fs/logo.png' + cover_url = 'http://www.arrowheadflyangler.com/Portals/1/Articles/FieldStream/Field%20and%20Stream%20March%20Fishing%20Edition%20Article%20Cover.jpg' + # recursions = 0 + max_articles_per_feed = 10 + INDEX = 'http://www.fieldandstream.com' + + keep_only_tags = [dict(name='div', attrs={'class':['interior-main']}) + ] + remove_tags = [dict(name='div', attrs={'id':['comments']})] + + def parse_index(self): + feeds = [] + for title, url in [ + (u"Wild Chef", u"http://www.fieldandstream.com/blogs/wild-chef"), + (u"The Gun Nut", u"http://www.fieldandstream.com/blogs/gun-nut"), + (u"Whitetail 365", u"http://www.fieldandstream.com/blogs/whitetail-365"), + (u"Fly Talk", u"http://www.fieldandstream.com/blogs/flytalk"), + (u"Generation Wild", u"http://www.fieldandstream.com/blogs/generation-wild"), + (u"Conservationist", u"http://www.fieldandstream.com/blogs/conservationist"), + (u"Honest Angler", u"http://www.fieldandstream.com/blogs/honest-angler"), + (u"Mans Best Friend", u"http://www.fieldandstream.com/blogs/mans-best-friend"), + + ]: + articles = self.make_links(url) + if articles: + feeds.append((title, articles)) + return feeds + + def make_links(self, url): + title = 'Temp' + current_articles = [] + soup = self.index_to_soup(url) + print 'The soup is: ', soup + for item in soup.findAll('h2'): + print 'item is: ', item + link = item.find('a') + print 'the link is: ', link + if link: + url = self.INDEX + link['href'] + title = self.tag_to_string(link) + print 'the title is: ', title + print 'the url is: ', url + print 'the title is: ', title + current_articles.append({'title': title, 'url': url, 'description':'', 'date':''}) # append all this + return current_articles + + + +