0.9.28
50
COPYRIGHT
@ -79,13 +79,6 @@ License: GPL2+
|
||||
The full text of the GPL is distributed as in
|
||||
/usr/share/common-licenses/GPL-2 on Debian systems.
|
||||
|
||||
Files: src/pyPdf/*
|
||||
Copyright: Copyright (c) 2006, Mathieu Fenniak
|
||||
Copyright: Copyright (c) 2007, Ashish Kulkarni <kulkarni.ashish@gmail.com>
|
||||
License: BSD
|
||||
The full text of the BSD license is distributed as in
|
||||
/usr/share/common-licenses/BSD on Debian systems.
|
||||
|
||||
Files: src/calibre/utils/lzx/*
|
||||
Copyright: Copyright (C) 2002, Matthew T. Russotto
|
||||
Copyright: Copyright (C) 2008, Marshall T. Vandegrift <llasram@gmail.com>
|
||||
@ -100,49 +93,6 @@ License: BSD
|
||||
The full text of the BSD license is distributed as in
|
||||
/usr/share/common-licenses/BSD on Debian systems.
|
||||
|
||||
Files: src/calibre/utils/pyparsing.py
|
||||
Copyright: Copyright (c) 2003-2008, Paul T. McGuire
|
||||
License: MIT
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
Files: src/calibre/utils/PythonMagickWand.py
|
||||
Copyright: (c) 2007 - Achim Domma - domma@procoders.net
|
||||
License: MIT
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
Files: src/calibre/utils/msdes/d3des.h:
|
||||
Files: src/calibre/utils/msdes/des.c:
|
||||
Copyright: Copyright (C) 1988,1989,1990,1991,1992, Richard Outerbridge
|
||||
|
@ -20,6 +20,82 @@
|
||||
# new recipes:
|
||||
# - title:
|
||||
|
||||
- version: 0.9.28
|
||||
date: 2013-04-26
|
||||
|
||||
new features:
|
||||
- title: "Virtual Libraries: Easily partition your large calibre library into smaller 'virtual' libraries"
|
||||
type: major
|
||||
description: "A virtual library is a way to tell calibre to open only a subset of a normal library. For example, you might want to only work with books by a certain author, or books having only a certain tag. To use this feature, click the button labeled 'Virtual Library' to the left of the search bar. For details, see http://manual.calibre-ebook.com/virtual_libraries.html. This feature used to be called 'Search restriction', the new virtual libraries are easier to use, but otherwise fulfil the same function."
|
||||
|
||||
- title: "Book details panel: Allow copying of links in the book details panel by right clicking on them."
|
||||
tickets: [1171963]
|
||||
|
||||
- title: "Kobo driver: Add support for the new Kobo Aura HD and firmware version 2.5.0"
|
||||
tickets: [1169571,1169968]
|
||||
|
||||
- title: "Metadata download: When showing downloaded covers, allow right clicking on a cover to view a full size version."
|
||||
tickets: [1170544]
|
||||
|
||||
- title: "Driver for Easy player cyber book e touch and Droid 4"
|
||||
tickets: [1171633,1170763]
|
||||
|
||||
- title: "Edit ToC: Allow the size of the panels in the location view to be adjusted"
|
||||
|
||||
- title: "When copying to a library by path, make it more efficient to choose between moving and copying"
|
||||
tickets: [1168231]
|
||||
|
||||
- title: "When checking if a zip/rar file is a comic or contains a single ebook to be auto-extracted, ignore thumbs.db files inside the archive"
|
||||
|
||||
bug fixes:
|
||||
- title: "EPUB Input: Fix handling of EPUB files that contain images with non-ascii filenames."
|
||||
tickets: [1171186]
|
||||
|
||||
- title: "Device driver: Detect Laser EB720 with newer firmware."
|
||||
tickets: [1171341]
|
||||
|
||||
- title: "Fix bug in Danish translation causing books with language Ingush being incorrectly translated as Engelsk"
|
||||
|
||||
- title: "PDF Output: Fix hyperlinks not working when converting an EPUB whose individual files have names with URL unsafe characters."
|
||||
tickets: [1169795]
|
||||
|
||||
- title: "Book polishing: Fix inserting cover into an epub with no cover could lead to incorrect guide entry if the opf is not at the root of the epub."
|
||||
tickets: [1167941]
|
||||
|
||||
- title: "ZIP Output: Fix links containing backslashes on windows"
|
||||
tickets: [1169910]
|
||||
|
||||
- title: "Fix polishing of AZW3 files not working on OS X."
|
||||
tickets: [1168789]
|
||||
|
||||
- title: "Polishing books: Fix polishing erroring out if the book being polished has no cover"
|
||||
|
||||
- title: "RTF Input: Add partial support for hyperlinks to web resources."
|
||||
tickets: [1167562]
|
||||
|
||||
- title: "Fix book details panel showing incorrect info after deleting books from a connected device"
|
||||
tickets: [1172839]
|
||||
|
||||
improved recipes:
|
||||
- NZZ Online
|
||||
- Baltimore Sun
|
||||
- Metro NL
|
||||
- Financial Times
|
||||
- EcoGeek
|
||||
- comics.com
|
||||
- Psychology Today
|
||||
- Science News
|
||||
|
||||
new recipes:
|
||||
- title: Voice of America
|
||||
author: Krittika Goyal
|
||||
|
||||
- title: Lightspeed Magazine
|
||||
author: Jose Pinto
|
||||
|
||||
- title: The Feature
|
||||
author: Jose Pinto
|
||||
|
||||
- version: 0.9.27
|
||||
date: 2013-04-12
|
||||
|
||||
|
@ -367,6 +367,8 @@ For example::
|
||||
|
||||
date:>10daysago
|
||||
date:<=45daysago
|
||||
|
||||
To avoid potential problems with translated strings when using a non-English version of calibre, the strings ``_today``, ``_yesterday``, ``_thismonth``, and ``_daysago`` are always available. They are not translated.
|
||||
|
||||
You can search for books that have a format of a certain size like this::
|
||||
|
||||
@ -424,6 +426,8 @@ Identifiers (e.g., isbn, doi, lccn etc) also use an extended syntax. First, note
|
||||
|
||||
:guilabel:`Advanced Search Dialog`
|
||||
|
||||
.. _saved_searches:
|
||||
|
||||
Saving searches
|
||||
-----------------
|
||||
|
||||
@ -433,6 +437,15 @@ Now you can access your saved search in the Tag Browser under "Searches". A sing
|
||||
|
||||
.. _config_filename_metadata:
|
||||
|
||||
Virtual Libraries
|
||||
-------------------
|
||||
|
||||
A :guilabel:`Virtual Library` is a way to pretend that your |app| library has
|
||||
only a few books instead of its full collection. This is an excellent way to
|
||||
partition your large collection of books into smaller, manageable chunks. To
|
||||
learn how to create and use virtual libraries, see the tutorial:
|
||||
:ref:`virtual_libraries`.
|
||||
|
||||
Guessing metadata from file names
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
In the :guilabel:`Add/Save` section of the configuration dialog, you can specify a regular expression that |app| will use to try and guess metadata from the names of ebook files
|
||||
|
BIN
manual/images/virtual_library_button.png
Normal file
After Width: | Height: | Size: 2.3 KiB |
BIN
manual/images/vl_by_author.png
Normal file
After Width: | Height: | Size: 65 KiB |
@ -20,4 +20,5 @@ Here you will find tutorials to get you started using |app|'s more advanced feat
|
||||
creating_plugins
|
||||
typesetting_math
|
||||
catalogs
|
||||
virtual_libraries
|
||||
|
||||
|
89
manual/virtual_libraries.rst
Normal file
@ -0,0 +1,89 @@
|
||||
|
||||
.. include:: global.rst
|
||||
|
||||
.. _virtual_libraries:
|
||||
|
||||
|
||||
Virtual Libraries
|
||||
============================
|
||||
|
||||
In |app|, a virtual library is a way to tell |app| to open only a subset of a
|
||||
normal library. For example, you might want to only work with books by a certain
|
||||
author, or books having only a certain tag. Using virtual libraries is the
|
||||
preferred way of partitioning your large book collection into smaller sub
|
||||
collections. It is superior to splitting up your library into multiple smaller
|
||||
libraries as, when you want to search through your entire collection, you can
|
||||
simply go back to the full library. There is no way to search through multiple
|
||||
separate libraries simultaneously in |app|.
|
||||
|
||||
A virtual library is different from a simple search. A search will only restrict
|
||||
the list of books shown in the book list. A virtual library does that, and in
|
||||
addition it also restricts the entries shown in the :guilabel:`Tag Browser` to
|
||||
the left. The Tag Browser will only show tags, authors, series, publishers, etc.
|
||||
that come from the books in the virtual library. A virtual library thus behaves
|
||||
as though the actual library contains only the restricted set of books.
|
||||
|
||||
Creating Virtual Libraries
|
||||
----------------------------
|
||||
|
||||
.. |vlb| image:: images/virtual_library_button.png
|
||||
:class: float-left-img
|
||||
|
||||
|vlb| To use a virtual library click the :guilabel:`Virtual Library` button located
|
||||
to the left of the search bar and select the :guilabel:`Create Virtual Library`
|
||||
option. As a first example, let's create a virtual library that shows us only
|
||||
the books by a particular author. Click the :guilabel:`Authors` link as shown
|
||||
in the image below and choose the author you want to use and click OK.
|
||||
|
||||
.. image:: images/vl_by_author.png
|
||||
:align: center
|
||||
|
||||
The Create Virtual Library dialog has been filled in for you. Click OK and you
|
||||
will see that a new Virtual Library has been created, and automatically
|
||||
switched to, that displays only the books by the selected author. As far as
|
||||
|app| is concerned, it is as if your library contains only the books by the
|
||||
selected author.
|
||||
|
||||
You can switch back to the full library at any time by once again clicking the
|
||||
:guilabel:`Virtual Library` and selecting the entry named :guilabel:`<None>`.
|
||||
|
||||
Virtual Libraries are based on *searches*. You can use any search as the
|
||||
basis of a virtual library. The virtual library will contain only the
|
||||
books matched by that search. First, type in the search you want to use
|
||||
in the search bar or build a search using the :guilabel:`Tag Browser`.
|
||||
When you are happy with the returned results, click the Virtual Library
|
||||
button, choose Create Library and enter a name for the new virtual
|
||||
library. The virtual library will then be created based on the search
|
||||
you just typed in. Searches are very powerful, for examples of the kinds
|
||||
of things you can do with them, see :ref:`search_interface`.
|
||||
|
||||
Working with Virtual Libraries
|
||||
-------------------------------------
|
||||
|
||||
You can edit a previously created virtual library or remove it, by clicking the
|
||||
:guilabel:`Virtual Library` and choosing the appropriate action.
|
||||
|
||||
You can tell |app| that you always want to apply a particular virtual library
|
||||
when the current library is opened, by going to
|
||||
:guilabel:`Preferences->Behavior`.
|
||||
|
||||
If you use the |app| Content Server, you can have it share a virtual library
|
||||
instead of the full library by going to :guilabel:`Preferences->Sharing over the net`.
|
||||
|
||||
You can quickly use the current search as a temporary virtual library by
|
||||
clicking the :guilabel:`Virtual Library` button and choosing the
|
||||
:guilabel:`*current search` entry.
|
||||
|
||||
Using additional restrictions
|
||||
-------------------------------
|
||||
|
||||
You can further restrict the books shown in a Virtual Library by using
|
||||
:guilabel:`Additional restrictions`. An additional restriction is saved search
|
||||
you previously created that can be applied to the current Virtual Library to
|
||||
further restrict the books shown in a virtual library. For example, say you
|
||||
have a Virtual Library for books tagged as :guilabel:`Historical Fiction` and a
|
||||
saved search that shows you unread books, you can click the :guilabel:`Virtual
|
||||
Library` button and choose the :guilabel:`Additional restriction` option to
|
||||
show only unread Historical Fiction books. To learn about saved searches, see
|
||||
:ref:`saved_searches`.
|
||||
|
@ -13,14 +13,14 @@ class BaltimoreSun(BasicNewsRecipe):
|
||||
__author__ = 'Josh Hall'
|
||||
description = 'Complete local news and blogs from Baltimore'
|
||||
language = 'en'
|
||||
version = 2.1
|
||||
oldest_article = 1
|
||||
version = 2.4
|
||||
oldest_article = 1.5
|
||||
max_articles_per_feed = 100
|
||||
use_embedded_content = False
|
||||
no_stylesheets = True
|
||||
remove_javascript = True
|
||||
#auto_cleanup = True
|
||||
recursions = 1
|
||||
remove_empty_feeds= True
|
||||
recursions = 3
|
||||
|
||||
ignore_duplicate_articles = {'title'}
|
||||
keep_only_tags = [dict(name='div', attrs={'class':["story","entry-asset asset hentry"]}),
|
||||
@ -31,7 +31,7 @@ class BaltimoreSun(BasicNewsRecipe):
|
||||
match_regexps = [r'page=[0-9]+']
|
||||
|
||||
remove_tags = [{'id':["moduleArticleTools","content-bottom","rail","articleRelates module","toolSet","relatedrailcontent","div-wrapper","beta","atp-comments","footer",'gallery-subcontent','subFooter']},
|
||||
{'class':["clearfix","relatedTitle","articleRelates module","asset-footer","tools","comments","featurePromo","featurePromo fp-topjobs brownBackground","clearfix fullSpan brownBackground","curvedContent",'nextgen-share-tools','outbrainTools', 'google-ad-story-bottom']},
|
||||
{'class':["clearfix","relatedTitle","articleRelates module","asset-footer","tools","comments","featurePromo","featurePromo fp-topjobs brownBackground","clearfix fullSpan brownBackground","curvedContent",'nextgen-share-tools','nextgen-comments-container','nextgen-comments-content','outbrainTools','fb-like' 'google-ad-story-bottom']},
|
||||
dict(name='font',attrs={'id':["cr-other-headlines"]})]
|
||||
extra_css = '''
|
||||
h1{font-family:Arial,Helvetica,sans-serif; font-weight:bold;font-size:large;}
|
||||
@ -49,40 +49,39 @@ class BaltimoreSun(BasicNewsRecipe):
|
||||
'''
|
||||
feeds = [
|
||||
## News ##
|
||||
(u'Top Headlines', u'http://www.baltimoresun.com/rss2.0.xml'),
|
||||
(u'Breaking News', u'http://www.baltimoresun.com/news/breaking/rss2.0.xml'),
|
||||
(u'Top Maryland', u'http://www.baltimoresun.com/news/maryland/rss2.0.xml'),
|
||||
#(u'Anne Arundel County', u'http://www.baltimoresun.com/news/maryland/anne-arundel/rss2.0.xml'),
|
||||
(u'Baltimore City', u'http://www.baltimoresun.com/news/maryland/baltimore-city/rss2.0.xml'),
|
||||
#(u'Baltimore County', u'http://www.baltimoresun.com/news/maryland/baltimore-county/rss2.0.xml'),
|
||||
#(u'Carroll County', u'http://www.baltimoresun.com/news/maryland/carroll/rss2.0.xml'),
|
||||
#(u'Harford County', u'http://www.baltimoresun.com/news/maryland/harford/rss2.0.xml'),
|
||||
#(u'Howard County', u'http://www.baltimoresun.com/news/maryland/howard/rss2.0.xml'),
|
||||
(u'Education', u'http://www.baltimoresun.com/news/education/rss2.0.xml'),
|
||||
#(u'Obituaries', u'http://www.baltimoresun.com/news/obituaries/rss2.0.xml'),
|
||||
(u'Local Politics', u'http://www.baltimoresun.com/news/maryland/politics/rss2.0.xml'),
|
||||
(u'Weather', u'http://www.baltimoresun.com/news/weather/rss2.0.xml'),
|
||||
#(u'Traffic', u'http://www.baltimoresun.com/features/commuting/rss2.0.xml'),
|
||||
(u'Top Headlines', u'http://feeds.feedburner.com/baltimoresun/news/rss2'),
|
||||
(u'Breaking News', u'http://feeds.feedburner.com/baltimoresun/news/local/annearundel/rss2'),
|
||||
(u'Top Maryland', u'http://feeds.feedburner.com/baltimoresun/news/local/rss2'),
|
||||
#(u'Anne Arundel County', u'http://feeds.feedburner.com/baltimoresun/news/local/annearundel/rss2'),
|
||||
(u'Baltimore City', u'http://feeds.feedburner.com/baltimoresun/news/local/baltimore_city/rss20xml'),
|
||||
#(u'Baltimore County', u'http://feeds.feedburner.com/baltimoresun/news/local/baltimore_county/rss2'),
|
||||
#(u'Carroll County', u'http://feeds.feedburner.com/baltimoresun/news/local/carroll/rss2'),
|
||||
#(u'Harford County', u'http://feeds.feedburner.com/baltimoresun/news/local/harford/rss2),
|
||||
#(u'Howard County', u'http://feeds.feedburner.com/baltimoresun/news/local/howard/rss2'),
|
||||
(u'Education', u'http://feeds.feedburner.com/baltimoresun/news/education/rss2'),
|
||||
#(u'Obituaries', u'http://feeds.feedburner.com/baltimoresun/news/obituaries/rss2'),
|
||||
(u'Local Politics', u'http://feeds.feedburner.com/baltimoresun/news/local/politics/rss2'),
|
||||
(u'Weather', u'http://feeds.feedburner.com/baltimoresun/news/weather/site/rss2'),
|
||||
#(u'Traffic', u'http://feeds.feedburner.com/baltimoresun/news/traffic/rss2'),
|
||||
(u'Nation/world', u'http://feeds.feedburner.com/baltimoresun/news/nationworld/rss2'),
|
||||
(u'Weird News', u'http://www.baltimoresun.com/news/offbeat/rss2.0.xml'),
|
||||
#(u'Weird News', u'http://feeds.feedburner.com/baltsun-weirdnews'),
|
||||
|
||||
##Sports##
|
||||
(u'Top Sports', u'http://www.baltimoresun.com/sports/rss2.0.xml'),
|
||||
(u'Top Sports', u'http://feeds.feedburner.com/baltimoresun/sports/rss2'),
|
||||
(u'Orioles/Baseball', u'http://www.baltimoresun.com/sports/orioles/rss2.0.xml'),
|
||||
(u'Ravens/Football', u'http://www.baltimoresun.com/sports/ravens/rss2.0.xml'),
|
||||
#(u'Terps', u'http://www.baltimoresun.com/sports/terps/rss2.0.xml'),
|
||||
#(u'College Football', u'http://www.baltimoresun.com/sports/college/football/rss2.0.xml'),
|
||||
#(u'Lacrosse', u'http://www.baltimoresun.com/sports/college/lacrosse/rss2.0.xml'),
|
||||
#(u'Horse Racing', u'http://www.baltimoresun.com/sports/horse-racing/rss2.0.xml'),
|
||||
#(u'Golf', u'http://www.baltimoresun.com/sports/golf/rss2.0.xml'),
|
||||
#(u'NBA', u'http://www.baltimoresun.com/sports/nba/rss2.0.xml'),
|
||||
#(u'High School', u'http://www.baltimoresun.com/sports/high-school/rss2.0.xml'),
|
||||
#(u'Outdoors', u'http://www.baltimoresun.com/sports/outdoors/rss2.0.xml'),
|
||||
|
||||
(u'Ravens/Football', u'http://feeds.feedburner.com/baltimoresun/sports/football/rss2'),
|
||||
#(u'Terps', u''http://feeds.feedburner.com/baltimoresun/sports/terps/rss2'),
|
||||
#(u'College Football', u''feed://feeds.feedburner.com/baltimoresun/sports/college/football/rss2'),
|
||||
#(u'Lacrosse', u'http://feeds.feedburner.com/baltimoresun/sports/college/lacrosse/rss2'),
|
||||
#(u'Horse Racing', u'http://feeds.feedburner.com/baltimoresun/sports/horseracing/rss2'),
|
||||
#(u'Golf', u'http://feeds.feedburner.com/baltimoresun/sports/golf/rss2'),
|
||||
#(u'NBA', u'http://feeds.feedburner.com/baltimoresun/sports/basketball/rss2'),
|
||||
#(u'High School', u'http://feeds.feedburner.com/baltimoresun/sports/highschool/rss2'),
|
||||
#(u'Outdoors', u'http://feeds.feedburner.com/baltimoresun/sports/outdoors/rss2'),
|
||||
|
||||
## Entertainment ##
|
||||
(u'Celebrity News', u'http://www.baltimoresun.com/entertainment/celebrities/rss2.0.xml'),
|
||||
(u'Arts & Theater', u'http://www.baltimoresun.com/entertainment/arts/rss2.0.xml'),
|
||||
(u'Celebrity News', u'http://baltimore.feedsportal.com/c/34255/f/623042/index.rss'),
|
||||
(u'Arts & Theater', u'http://feeds.feedburner.com/baltimoresun/entertainment/galleriesmuseums/rss2'),
|
||||
(u'Movies', u'http://www.baltimoresun.com/entertainment/movies/rss2.0.xml'),
|
||||
(u'Music & Nightlife', u'http://www.baltimoresun.com/entertainment/music/rss2.0.xml'),
|
||||
(u'Restaurants & Food', u'http://www.baltimoresun.com/entertainment/dining/rss2.0.xml'),
|
||||
@ -92,7 +91,6 @@ class BaltimoreSun(BasicNewsRecipe):
|
||||
(u'Health&Wellness', u'http://www.baltimoresun.com/health/rss2.0.xml'),
|
||||
(u'Home & Garden', u'http://www.baltimoresun.com/features/home-garden/rss2.0.xml'),
|
||||
(u'Living Green', u'http://www.baltimoresun.com/features/green/rss2.0.xml'),
|
||||
(u'Parenting', u'http://www.baltimoresun.com/features/parenting/rss2.0.xml'),
|
||||
(u'Fashion', u'http://www.baltimoresun.com/features/fashion/rss2.0.xml'),
|
||||
(u'Travel', u'http://www.baltimoresun.com/travel/rss2.0.xml'),
|
||||
#(u'Faith', u'http://www.baltimoresun.com/features/faith/rss2.0.xml'),
|
||||
@ -100,17 +98,17 @@ class BaltimoreSun(BasicNewsRecipe):
|
||||
## Business ##
|
||||
(u'Top Business', u'http://www.baltimoresun.com/business/rss2.0.xml'),
|
||||
(u'Technology', u'http://www.baltimoresun.com/business/technology/rss2.0.xml'),
|
||||
(u'Personal finance', u'http://www.baltimoresun.com/business/money/rss2.0.xml'),
|
||||
(u'Personal finance', u'http://baltimore.feedsportal.com/c/34255/f/623057/index.rss'),
|
||||
(u'Real Estate', u'http://www.baltimoresun.com/classified/realestate/rss2.0.xml'),
|
||||
(u'Jobs', u'http://www.baltimoresun.com/classified/jobs/rss2.0.xml'),
|
||||
(u'DIY', u'http://www.baltimoresun.com/features/do-it-yourself/rss2.0.xml'),
|
||||
(u'Consumer Safety', u'http://www.baltimoresun.com/business/consumer-safety/rss2.0.xml'),
|
||||
(u'Jobs', u'http://baltimore.feedsportal.com/c/34255/f/623059/index.rss'),
|
||||
#(u'DIY', u'http://baltimore.feedsportal.com/c/34255/f/623060/index.rss'),
|
||||
#(u'Consumer Safety', u'http://baltimore.feedsportal.com/c/34255/f/623061/index.rss'),
|
||||
(u'Investing', u'http://www.baltimoresun.com/business/money/rss2.0.xml'),
|
||||
|
||||
## Opinion##
|
||||
(u'Sun Editorials', u'http://www.baltimoresun.com/news/opinion/editorial/rss2.0.xml'),
|
||||
(u'Op/Ed', u'http://www.baltimoresun.com/news/opinion/oped/rss2.0.xml'),
|
||||
(u'Readers Respond', u'http://www.baltimoresun.com/news/opinion/readersrespond/'),
|
||||
(u'Readers Respond', u'http://baltimore.feedsportal.com/c/34255/f/623065/index.rss'),
|
||||
|
||||
## Columnists ##
|
||||
(u'Kevin Cowherd', u'http://www.baltimoresun.com/sports/bal-columnist-cowherd,0,6829726.columnist-rss2.0.xml'),
|
||||
@ -138,30 +136,26 @@ class BaltimoreSun(BasicNewsRecipe):
|
||||
(u'The Real Estate Wonk', u'http://www.baltimoresun.com/business/real-estate/wonk/rss2.0.xml'),
|
||||
|
||||
## Entertainment Blogs ##
|
||||
(u'Clef Notes & Drama Queens', 'http://weblogs.baltimoresun.com/entertainment/classicalmusic/index.xml'),
|
||||
(u'Baltimore Diner', u'http://baltimore.feedsportal.com/c/34255/f/623088/index.rss'),
|
||||
(u'ArtSmash', 'http://www.baltimoresun.com/entertainment/arts/artsmash/rss2.0.xml'),
|
||||
(u'Baltimore Diner', u'http://baltimore.feedsportal.com/c/34255/f/623088/index.rss'),
|
||||
(u'Midnight Sun', u'http://www.baltimoresun.com/entertainment/music/midnight-sun-blog/rss2.0.xml'),
|
||||
(u'Read Street', u'http://www.baltimoresun.com/features/books/read-street/rss2.0.xml'),
|
||||
(u'Z on TV', u'http://www.baltimoresun.com/entertainment/tv/z-on-tv-blog/rss2.0.xml'),
|
||||
|
||||
### Life Blogs ##
|
||||
## Life Blogs ##
|
||||
#(u'BMore Green', u'http://weblogs.baltimoresun.com/features/green/index.xml'),
|
||||
#(u'Baltimore Insider',u'http://www.baltimoresun.com/features/baltimore-insider-blog/rss2.0.xml'),
|
||||
#(u'Homefront', u'http://www.baltimoresun.com/features/parenting/homefront/rss2.0.xml'),
|
||||
#(u'Picture of Health', u'http://www.baltimoresun.com/health/blog/rss2.0.xml'),
|
||||
#(u'Unleashed', u'http://weblogs.baltimoresun.com/features/mutts/blog/index.xml'),
|
||||
(u'Baltimore Insider',u'http://www.baltimoresun.com/features/baltimore-insider-blog/rss2.0.xml'),
|
||||
(u'Picture of Health', u'http://www.baltimoresun.com/health/blog/rss2.0.xml'),
|
||||
#(u'Unleashed', u'http://weblogs.baltimoresun.com/features/mutts/blog/index.xml'),
|
||||
|
||||
## b the site blogs ##
|
||||
(u'Game Cache', u'http://www.baltimoresun.com/entertainment/bthesite/game-cache/rss2.0.xml'),
|
||||
(u'TV Lust', u'http://www.baltimoresun.com/entertainment/bthesite/tv-lust/rss2.0.xml'),
|
||||
(u'TV Lust', u'http://baltimore.feedsportal.com/c/34255/f/623096/index.rss'),
|
||||
|
||||
## Sports Blogs ##
|
||||
(u'Baltimore Sports Blitz', u'http://baltimore.feedsportal.com/c/34255/f/623097/index.rss'),
|
||||
#(u'Faceoff', u'http://weblogs.baltimoresun.com/sports/lacrosse/blog/index.xml'),
|
||||
#(u'MMA Stomping Grounds', u'http://weblogs.baltimoresun.com/sports/mma/blog/index.xml'),
|
||||
## (u'Lacrosse Insider',u'http://www.baltimoresun.com/sports/lacrosse-blog/rss2.0.xml'),
|
||||
(u'Orioles Insider', u'http://baltimore.feedsportal.com/c/34255/f/623100/index.rss'),
|
||||
(u'Ravens Insider', u'http://www.baltimoresun.com/sports/ravens/ravens-insider/rss2.0.xml'),
|
||||
#(u'Recruiting Report', u'http://weblogs.baltimoresun.com/sports/college/recruiting/index.xml'),
|
||||
#(u'Ring Posts', u'http://weblogs.baltimoresun.com/sports/wrestling/blog/index.xml'),
|
||||
(u'The Schmuck Stops Here', u'http://www.baltimoresun.com/sports/schmuck-blog/rss2.0.xml'),
|
||||
#(u'Tracking the Terps', u'http://weblogs.baltimoresun.com/sports/college/maryland_terps/blog/index.xml'),
|
||||
@ -169,7 +163,6 @@ class BaltimoreSun(BasicNewsRecipe):
|
||||
]
|
||||
|
||||
|
||||
|
||||
def get_article_url(self, article):
|
||||
ans = None
|
||||
try:
|
||||
@ -190,6 +183,8 @@ class BaltimoreSun(BasicNewsRecipe):
|
||||
url = a.get('href')
|
||||
if url:
|
||||
return self.index_to_soup(url, raw=True)
|
||||
def print_version(self, url):
|
||||
return self.browser.open_novisit(url).geturl()
|
||||
|
||||
def postprocess_html(self, soup, first_fetch):
|
||||
# Remove the navigation bar. It was kept until now to be able to follow
|
||||
|
@ -11,22 +11,22 @@ from calibre.web.feeds.news import BasicNewsRecipe
|
||||
class EcoGeek(BasicNewsRecipe):
|
||||
title = 'EcoGeek'
|
||||
__author__ = 'Darko Miletic'
|
||||
description = 'EcoGeek - Technology for the Environment Blog Feed'
|
||||
description = 'EcoGeek - Technology for the Environment Blog Feed'
|
||||
publisher = 'EcoGeek'
|
||||
language = 'en'
|
||||
|
||||
category = 'news, ecology, blog'
|
||||
oldest_article = 7
|
||||
oldest_article = 30
|
||||
max_articles_per_feed = 100
|
||||
no_stylesheets = True
|
||||
use_embedded_content = True
|
||||
|
||||
|
||||
html2lrf_options = [
|
||||
'--comment', description
|
||||
, '--category', category
|
||||
, '--publisher', publisher
|
||||
]
|
||||
|
||||
html2epub_options = 'publisher="' + publisher + '"\ncomments="' + description + '"\ntags="' + category + '"'
|
||||
|
||||
html2epub_options = 'publisher="' + publisher + '"\ncomments="' + description + '"\ntags="' + category + '"'
|
||||
feeds = [(u'Posts', u'http://feeds2.feedburner.com/EcoGeek')]
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2010-2012, Darko Miletic <darko.miletic at gmail.com>'
|
||||
__copyright__ = '2010-2013, Darko Miletic <darko.miletic at gmail.com>'
|
||||
'''
|
||||
www.ft.com/uk-edition
|
||||
www.ft.com/intl/uk-edition
|
||||
'''
|
||||
|
||||
import datetime
|
||||
@ -29,7 +29,7 @@ class FinancialTimes(BasicNewsRecipe):
|
||||
masthead_url = 'http://im.media.ft.com/m/img/masthead_main.jpg'
|
||||
LOGIN = 'https://registration.ft.com/registration/barrier/login'
|
||||
LOGIN2 = 'http://media.ft.com/h/subs3.html'
|
||||
INDEX = 'http://www.ft.com/uk-edition'
|
||||
INDEX = 'http://www.ft.com/intl/uk-edition'
|
||||
PREFIX = 'http://www.ft.com'
|
||||
|
||||
conversion_options = {
|
||||
|
@ -1,20 +1,21 @@
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2013, Darko Miletic <darko.miletic at gmail.com>'
|
||||
__copyright__ = '2010-2013, Darko Miletic <darko.miletic at gmail.com>'
|
||||
'''
|
||||
http://www.ft.com/intl/us-edition
|
||||
www.ft.com/intl/international-edition
|
||||
'''
|
||||
|
||||
import datetime
|
||||
from calibre.ptempfile import PersistentTemporaryFile
|
||||
from calibre import strftime
|
||||
from calibre.web.feeds.news import BasicNewsRecipe
|
||||
from collections import OrderedDict
|
||||
|
||||
class FinancialTimes(BasicNewsRecipe):
|
||||
title = 'Financial Times (US) printed edition'
|
||||
title = 'Financial Times (International) printed edition'
|
||||
__author__ = 'Darko Miletic'
|
||||
description = "The Financial Times (FT) is one of the world's leading business news and information organisations, recognised internationally for its authority, integrity and accuracy."
|
||||
publisher = 'The Financial Times Ltd.'
|
||||
category = 'news, finances, politics, UK, World'
|
||||
category = 'news, finances, politics, World'
|
||||
oldest_article = 2
|
||||
language = 'en'
|
||||
max_articles_per_feed = 250
|
||||
@ -28,7 +29,7 @@ class FinancialTimes(BasicNewsRecipe):
|
||||
masthead_url = 'http://im.media.ft.com/m/img/masthead_main.jpg'
|
||||
LOGIN = 'https://registration.ft.com/registration/barrier/login'
|
||||
LOGIN2 = 'http://media.ft.com/h/subs3.html'
|
||||
INDEX = 'http://www.ft.com/intl/us-edition'
|
||||
INDEX = 'http://www.ft.com/intl/international-edition'
|
||||
PREFIX = 'http://www.ft.com'
|
||||
|
||||
conversion_options = {
|
||||
@ -93,7 +94,7 @@ class FinancialTimes(BasicNewsRecipe):
|
||||
try:
|
||||
urlverified = self.browser.open_novisit(url).geturl() # resolve redirect.
|
||||
except:
|
||||
continue
|
||||
continue
|
||||
title = self.tag_to_string(item)
|
||||
date = strftime(self.timefmt)
|
||||
articles.append({
|
||||
@ -105,29 +106,30 @@ class FinancialTimes(BasicNewsRecipe):
|
||||
return articles
|
||||
|
||||
def parse_index(self):
|
||||
feeds = []
|
||||
feeds = OrderedDict()
|
||||
soup = self.index_to_soup(self.INDEX)
|
||||
dates= self.tag_to_string(soup.find('div', attrs={'class':'btm-links'}).find('div'))
|
||||
self.timefmt = ' [%s]'%dates
|
||||
wide = soup.find('div',attrs={'class':'wide'})
|
||||
if not wide:
|
||||
return feeds
|
||||
allsections = wide.findAll(attrs={'class':lambda x: x and 'footwell' in x.split()})
|
||||
if not allsections:
|
||||
return feeds
|
||||
count = 0
|
||||
for item in allsections:
|
||||
count = count + 1
|
||||
if self.test and count > 2:
|
||||
return feeds
|
||||
fitem = item.h3
|
||||
if not fitem:
|
||||
fitem = item.h4
|
||||
ftitle = self.tag_to_string(fitem)
|
||||
self.report_progress(0, _('Fetching feed')+' %s...'%(ftitle))
|
||||
feedarts = self.get_artlinks(item.ul)
|
||||
feeds.append((ftitle,feedarts))
|
||||
return feeds
|
||||
#dates= self.tag_to_string(soup.find('div', attrs={'class':'btm-links'}).find('div'))
|
||||
#self.timefmt = ' [%s]'%dates
|
||||
section_title = 'Untitled'
|
||||
|
||||
for column in soup.findAll('div', attrs = {'class':'feedBoxes clearfix'}):
|
||||
for section in column. findAll('div', attrs = {'class':'feedBox'}):
|
||||
sectiontitle=self.tag_to_string(section.find('h4'))
|
||||
if '...' not in sectiontitle: section_title=sectiontitle
|
||||
for article in section.ul.findAll('li'):
|
||||
articles = []
|
||||
title=self.tag_to_string(article.a)
|
||||
url=article.a['href']
|
||||
articles.append({'title':title, 'url':url, 'description':'', 'date':''})
|
||||
|
||||
if articles:
|
||||
if section_title not in feeds:
|
||||
feeds[section_title] = []
|
||||
feeds[section_title] += articles
|
||||
|
||||
|
||||
ans = [(key, val) for key, val in feeds.iteritems()]
|
||||
return ans
|
||||
|
||||
def preprocess_html(self, soup):
|
||||
items = ['promo-box','promo-title',
|
||||
@ -174,9 +176,6 @@ class FinancialTimes(BasicNewsRecipe):
|
||||
count += 1
|
||||
tfile = PersistentTemporaryFile('_fa.html')
|
||||
tfile.write(html)
|
||||
tfile.close()
|
||||
tfile.close()
|
||||
self.temp_files.append(tfile)
|
||||
return tfile.name
|
||||
|
||||
def cleanup(self):
|
||||
self.browser.open('https://registration.ft.com/registration/login/logout?location=')
|
@ -1,90 +0,0 @@
|
||||
import re
|
||||
from calibre.web.feeds.news import BasicNewsRecipe
|
||||
|
||||
class GiveMeSomethingToRead(BasicNewsRecipe):
|
||||
title = u'Give Me Something To Read'
|
||||
description = 'Curation / aggregation of articles on diverse topics'
|
||||
language = 'en'
|
||||
__author__ = 'barty on mobileread.com forum'
|
||||
max_articles_per_feed = 100
|
||||
no_stylesheets = False
|
||||
timefmt = ' [%a, %d %b, %Y]'
|
||||
oldest_article = 365
|
||||
auto_cleanup = True
|
||||
INDEX = 'http://givemesomethingtoread.com'
|
||||
CATEGORIES = [
|
||||
# comment out categories you don't want
|
||||
# (user friendly name, system name, max number of articles to load)
|
||||
('The Arts','arts',25),
|
||||
('Science','science',30),
|
||||
('Technology','technology',30),
|
||||
('Politics','politics',20),
|
||||
('Media','media',30),
|
||||
('Crime','crime',15),
|
||||
('Other articles','',10)
|
||||
]
|
||||
|
||||
def parse_index(self):
|
||||
self.cover_url = 'http://thegretchenshow.files.wordpress.com/2009/12/well-read-cat-small.jpg'
|
||||
feeds = []
|
||||
seen_urls = set([])
|
||||
regex = re.compile( r'http://(www\.)?([^/:]+)', re.I)
|
||||
|
||||
for category in self.CATEGORIES:
|
||||
|
||||
(cat_name, tag, max_articles) = category
|
||||
|
||||
tagurl = '' if tag=='' else '/tagged/'+tag
|
||||
self.log('Reading category:', cat_name)
|
||||
|
||||
articles = []
|
||||
pageno = 1
|
||||
|
||||
while len(articles) < max_articles and pageno < 100:
|
||||
|
||||
page = "%s%s/page/%d" % (self.INDEX, tagurl, pageno) if pageno > 1 else self.INDEX + tagurl
|
||||
pageno += 1
|
||||
|
||||
self.log('\tReading page:', page)
|
||||
try:
|
||||
soup = self.index_to_soup(page)
|
||||
except:
|
||||
break
|
||||
|
||||
headers = soup.findAll('h2')
|
||||
if len(headers) == .0:
|
||||
break
|
||||
|
||||
for header in headers:
|
||||
atag = header.find('a')
|
||||
url = atag['href']
|
||||
# skip promotionals and duplicate
|
||||
if url.startswith('http://givemesomethingtoread') or url.startswith('/') or url in seen_urls:
|
||||
continue
|
||||
seen_urls.add(url)
|
||||
title = self.tag_to_string(header)
|
||||
self.log('\tFound article:', title)
|
||||
#self.log('\t', url)
|
||||
desc = header.parent.find('blockquote')
|
||||
desc = self.tag_to_string(desc) if desc else ''
|
||||
m = regex.match( url)
|
||||
if m:
|
||||
desc = "[%s] %s" % (m.group(2), desc)
|
||||
#self.log('\t', desc)
|
||||
date = ''
|
||||
p = header.parent.previousSibling
|
||||
# navigate up to find h3, which contains the date
|
||||
while p:
|
||||
if hasattr(p,'name') and p.name == 'h3':
|
||||
date = self.tag_to_string(p)
|
||||
break
|
||||
p = p.previousSibling
|
||||
articles.append({'title':title,'url':url,'description':desc,'date':date})
|
||||
if len(articles) >= max_articles:
|
||||
break
|
||||
|
||||
if articles:
|
||||
feeds.append((cat_name, articles))
|
||||
|
||||
return feeds
|
||||
|
@ -1,448 +1,229 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = 'Copyright 2010 Starson17'
|
||||
'''
|
||||
www.gocomics.com
|
||||
'''
|
||||
from calibre.web.feeds.news import BasicNewsRecipe
|
||||
import mechanize, re
|
||||
|
||||
class GoComics(BasicNewsRecipe):
|
||||
title = 'GoComics'
|
||||
|
||||
class Comics(BasicNewsRecipe):
|
||||
title = 'Comics.com'
|
||||
__author__ = 'Starson17'
|
||||
__version__ = '1.06'
|
||||
__date__ = '07 June 2011'
|
||||
description = u'200+ Comics - Customize for more days/comics: Defaults to 7 days, 25 comics - 20 general, 5 editorial.'
|
||||
category = 'news, comics'
|
||||
description = 'Comics from comics.com. You should customize this recipe to fetch only the comics you are interested in'
|
||||
language = 'en'
|
||||
use_embedded_content= False
|
||||
no_stylesheets = True
|
||||
oldest_article = 24
|
||||
remove_javascript = True
|
||||
cover_url = 'http://paulbuckley14059.files.wordpress.com/2008/06/calvin-and-hobbes.jpg'
|
||||
remove_attributes = ['style']
|
||||
|
||||
####### USER PREFERENCES - COMICS, IMAGE SIZE AND NUMBER OF COMICS TO RETRIEVE ########
|
||||
# num_comics_to_get - I've tried up to 99 on Calvin&Hobbes
|
||||
cover_url = 'http://www.bsb.lib.tx.us/images/comics.com.gif'
|
||||
recursions = 0
|
||||
max_articles_per_feed = 10
|
||||
num_comics_to_get = 7
|
||||
# comic_size 300 is small, 600 is medium, 900 is large, 1500 is extra-large
|
||||
comic_size = 900
|
||||
# CHOOSE COMIC STRIPS BELOW - REMOVE COMMENT '# ' FROM IN FRONT OF DESIRED STRIPS
|
||||
# Please do not overload their servers by selecting all comics and 1000 strips from each!
|
||||
simultaneous_downloads = 1
|
||||
# delay = 3
|
||||
|
||||
conversion_options = {'linearize_tables' : True
|
||||
, 'comment' : description
|
||||
, 'tags' : category
|
||||
, 'language' : language
|
||||
}
|
||||
|
||||
keep_only_tags = [dict(name='div', attrs={'class':['feature','banner']}),
|
||||
keep_only_tags = [dict(name='h1'),
|
||||
dict(name='p', attrs={'class':'feature_item'})
|
||||
]
|
||||
|
||||
remove_tags = [dict(name='a', attrs={'class':['beginning','prev','cal','next','newest']}),
|
||||
dict(name='div', attrs={'class':['tag-wrapper']}),
|
||||
dict(name='a', attrs={'href':re.compile(r'.*mutable_[0-9]+', re.IGNORECASE)}),
|
||||
dict(name='img', attrs={'src':re.compile(r'.*mutable_[0-9]+', re.IGNORECASE)}),
|
||||
dict(name='ul', attrs={'class':['share-nav','feature-nav']}),
|
||||
]
|
||||
|
||||
def get_browser(self):
|
||||
br = BasicNewsRecipe.get_browser(self)
|
||||
cookies = mechanize.CookieJar()
|
||||
br = mechanize.build_opener(mechanize.HTTPCookieProcessor(cookies))
|
||||
br.addheaders = [('Referer','http://www.gocomics.com/')]
|
||||
return br
|
||||
|
||||
def parse_index(self):
|
||||
feeds = []
|
||||
for title, url in [
|
||||
(u"2 Cows and a Chicken", u"http://www.gocomics.com/2cowsandachicken"),
|
||||
#(u"9 Chickweed Lane", u"http://www.gocomics.com/9chickweedlane"),
|
||||
(u"9 to 5", u"http://www.gocomics.com/9to5"),
|
||||
#(u"Adam At Home", u"http://www.gocomics.com/adamathome"),
|
||||
(u"Agnes", u"http://www.gocomics.com/agnes"),
|
||||
#(u"Alley Oop", u"http://www.gocomics.com/alleyoop"),
|
||||
#(u"Andy Capp", u"http://www.gocomics.com/andycapp"),
|
||||
#(u"Animal Crackers", u"http://www.gocomics.com/animalcrackers"),
|
||||
#(u"Annie", u"http://www.gocomics.com/annie"),
|
||||
#(u"Arlo & Janis", u"http://www.gocomics.com/arloandjanis"),
|
||||
#(u"Ask Shagg", u"http://www.gocomics.com/askshagg"),
|
||||
(u"B.C.", u"http://www.gocomics.com/bc"),
|
||||
#(u"Back in the Day", u"http://www.gocomics.com/backintheday"),
|
||||
#(u"Bad Reporter", u"http://www.gocomics.com/badreporter"),
|
||||
#(u"Baldo", u"http://www.gocomics.com/baldo"),
|
||||
#(u"Ballard Street", u"http://www.gocomics.com/ballardstreet"),
|
||||
#(u"Barkeater Lake", u"http://www.gocomics.com/barkeaterlake"),
|
||||
#(u"Basic Instructions", u"http://www.gocomics.com/basicinstructions"),
|
||||
#(u"Ben", u"http://www.gocomics.com/ben"),
|
||||
#(u"Betty", u"http://www.gocomics.com/betty"),
|
||||
#(u"Bewley", u"http://www.gocomics.com/bewley"),
|
||||
#(u"Big Nate", u"http://www.gocomics.com/bignate"),
|
||||
#(u"Big Top", u"http://www.gocomics.com/bigtop"),
|
||||
#(u"Biographic", u"http://www.gocomics.com/biographic"),
|
||||
#(u"Birdbrains", u"http://www.gocomics.com/birdbrains"),
|
||||
#(u"Bleeker: The Rechargeable Dog", u"http://www.gocomics.com/bleeker"),
|
||||
#(u"Bliss", u"http://www.gocomics.com/bliss"),
|
||||
(u"Bloom County", u"http://www.gocomics.com/bloomcounty"),
|
||||
#(u"Bo Nanas", u"http://www.gocomics.com/bonanas"),
|
||||
#(u"Bob the Squirrel", u"http://www.gocomics.com/bobthesquirrel"),
|
||||
#(u"Boomerangs", u"http://www.gocomics.com/boomerangs"),
|
||||
#(u"Bottomliners", u"http://www.gocomics.com/bottomliners"),
|
||||
#(u"Bound and Gagged", u"http://www.gocomics.com/boundandgagged"),
|
||||
#(u"Brainwaves", u"http://www.gocomics.com/brainwaves"),
|
||||
#(u"Brenda Starr", u"http://www.gocomics.com/brendastarr"),
|
||||
#(u"Brevity", u"http://www.gocomics.com/brevity"),
|
||||
#(u"Brewster Rockit", u"http://www.gocomics.com/brewsterrockit"),
|
||||
#(u"Broom Hilda", u"http://www.gocomics.com/broomhilda"),
|
||||
(u"Calvin and Hobbes", u"http://www.gocomics.com/calvinandhobbes"),
|
||||
#(u"Candorville", u"http://www.gocomics.com/candorville"),
|
||||
#(u"Cathy", u"http://www.gocomics.com/cathy"),
|
||||
#(u"C'est la Vie", u"http://www.gocomics.com/cestlavie"),
|
||||
#(u"Cheap Thrills", u"http://www.gocomics.com/cheapthrills"),
|
||||
#(u"Chuckle Bros", u"http://www.gocomics.com/chucklebros"),
|
||||
#(u"Citizen Dog", u"http://www.gocomics.com/citizendog"),
|
||||
#(u"Cleats", u"http://www.gocomics.com/cleats"),
|
||||
#(u"Close to Home", u"http://www.gocomics.com/closetohome"),
|
||||
#(u"Committed", u"http://www.gocomics.com/committed"),
|
||||
#(u"Compu-toon", u"http://www.gocomics.com/compu-toon"),
|
||||
#(u"Cornered", u"http://www.gocomics.com/cornered"),
|
||||
#(u"Cow & Boy", u"http://www.gocomics.com/cow&boy"),
|
||||
#(u"Cul de Sac", u"http://www.gocomics.com/culdesac"),
|
||||
#(u"Daddy's Home", u"http://www.gocomics.com/daddyshome"),
|
||||
#(u"Deep Cover", u"http://www.gocomics.com/deepcover"),
|
||||
#(u"Dick Tracy", u"http://www.gocomics.com/dicktracy"),
|
||||
(u"Dog Eat Doug", u"http://www.gocomics.com/dogeatdoug"),
|
||||
#(u"Domestic Abuse", u"http://www.gocomics.com/domesticabuse"),
|
||||
(u"Doodles", u"http://www.gocomics.com/doodles"),
|
||||
(u"Doonesbury", u"http://www.gocomics.com/doonesbury"),
|
||||
#(u"Drabble", u"http://www.gocomics.com/drabble"),
|
||||
#(u"Eek!", u"http://www.gocomics.com/eek"),
|
||||
#(u"F Minus", u"http://www.gocomics.com/fminus"),
|
||||
#(u"Family Tree", u"http://www.gocomics.com/familytree"),
|
||||
#(u"Farcus", u"http://www.gocomics.com/farcus"),
|
||||
(u"Fat Cats Classics", u"http://www.gocomics.com/fatcatsclassics"),
|
||||
#(u"Ferd'nand", u"http://www.gocomics.com/ferdnand"),
|
||||
#(u"Flight Deck", u"http://www.gocomics.com/flightdeck"),
|
||||
(u"Flo and Friends", u"http://www.gocomics.com/floandfriends"),
|
||||
#(u"For Better or For Worse", u"http://www.gocomics.com/forbetterorforworse"),
|
||||
#(u"For Heaven's Sake", u"http://www.gocomics.com/forheavenssake"),
|
||||
#(u"Fort Knox", u"http://www.gocomics.com/fortknox"),
|
||||
#(u"FoxTrot Classics", u"http://www.gocomics.com/foxtrotclassics"),
|
||||
(u"FoxTrot", u"http://www.gocomics.com/foxtrot"),
|
||||
#(u"Frank & Ernest", u"http://www.gocomics.com/frankandernest"),
|
||||
#(u"Frazz", u"http://www.gocomics.com/frazz"),
|
||||
#(u"Fred Basset", u"http://www.gocomics.com/fredbasset"),
|
||||
#(u"Free Range", u"http://www.gocomics.com/freerange"),
|
||||
#(u"Frog Applause", u"http://www.gocomics.com/frogapplause"),
|
||||
#(u"Garfield Minus Garfield", u"http://www.gocomics.com/garfieldminusgarfield"),
|
||||
(u"Garfield", u"http://www.gocomics.com/garfield"),
|
||||
#(u"Gasoline Alley", u"http://www.gocomics.com/gasolinealley"),
|
||||
#(u"Geech Classics", u"http://www.gocomics.com/geechclassics"),
|
||||
#(u"Get Fuzzy", u"http://www.gocomics.com/getfuzzy"),
|
||||
#(u"Gil Thorp", u"http://www.gocomics.com/gilthorp"),
|
||||
#(u"Ginger Meggs", u"http://www.gocomics.com/gingermeggs"),
|
||||
#(u"Girls & Sports", u"http://www.gocomics.com/girlsandsports"),
|
||||
#(u"Graffiti", u"http://www.gocomics.com/graffiti"),
|
||||
#(u"Grand Avenue", u"http://www.gocomics.com/grandavenue"),
|
||||
#(u"Haiku Ewe", u"http://www.gocomics.com/haikuewe"),
|
||||
#(u"Heart of the City", u"http://www.gocomics.com/heartofthecity"),
|
||||
(u"Heathcliff", u"http://www.gocomics.com/heathcliff"),
|
||||
#(u"Herb and Jamaal", u"http://www.gocomics.com/herbandjamaal"),
|
||||
#(u"Herman", u"http://www.gocomics.com/herman"),
|
||||
#(u"Home and Away", u"http://www.gocomics.com/homeandaway"),
|
||||
#(u"Housebroken", u"http://www.gocomics.com/housebroken"),
|
||||
#(u"Hubert and Abby", u"http://www.gocomics.com/hubertandabby"),
|
||||
#(u"Imagine This", u"http://www.gocomics.com/imaginethis"),
|
||||
#(u"In the Bleachers", u"http://www.gocomics.com/inthebleachers"),
|
||||
#(u"In the Sticks", u"http://www.gocomics.com/inthesticks"),
|
||||
#(u"Ink Pen", u"http://www.gocomics.com/inkpen"),
|
||||
#(u"It's All About You", u"http://www.gocomics.com/itsallaboutyou"),
|
||||
#(u"Jane's World", u"http://www.gocomics.com/janesworld"),
|
||||
#(u"Joe Vanilla", u"http://www.gocomics.com/joevanilla"),
|
||||
#(u"Jump Start", u"http://www.gocomics.com/jumpstart"),
|
||||
#(u"Kit 'N' Carlyle", u"http://www.gocomics.com/kitandcarlyle"),
|
||||
#(u"La Cucaracha", u"http://www.gocomics.com/lacucaracha"),
|
||||
#(u"Last Kiss", u"http://www.gocomics.com/lastkiss"),
|
||||
#(u"Legend of Bill", u"http://www.gocomics.com/legendofbill"),
|
||||
#(u"Liberty Meadows", u"http://www.gocomics.com/libertymeadows"),
|
||||
#(u"Li'l Abner Classics", u"http://www.gocomics.com/lilabnerclassics"),
|
||||
#(u"Lio", u"http://www.gocomics.com/lio"),
|
||||
#(u"Little Dog Lost", u"http://www.gocomics.com/littledoglost"),
|
||||
#(u"Little Otto", u"http://www.gocomics.com/littleotto"),
|
||||
#(u"Lola", u"http://www.gocomics.com/lola"),
|
||||
#(u"Loose Parts", u"http://www.gocomics.com/looseparts"),
|
||||
#(u"Love Is...", u"http://www.gocomics.com/loveis"),
|
||||
#(u"Luann", u"http://www.gocomics.com/luann"),
|
||||
#(u"Maintaining", u"http://www.gocomics.com/maintaining"),
|
||||
(u"Marmaduke", u"http://www.gocomics.com/marmaduke"),
|
||||
#(u"Meg! Classics", u"http://www.gocomics.com/megclassics"),
|
||||
#(u"Middle-Aged White Guy", u"http://www.gocomics.com/middleagedwhiteguy"),
|
||||
#(u"Minimum Security", u"http://www.gocomics.com/minimumsecurity"),
|
||||
#(u"Moderately Confused", u"http://www.gocomics.com/moderatelyconfused"),
|
||||
(u"Momma", u"http://www.gocomics.com/momma"),
|
||||
#(u"Monty", u"http://www.gocomics.com/monty"),
|
||||
#(u"Motley Classics", u"http://www.gocomics.com/motleyclassics"),
|
||||
(u"Mutt & Jeff", u"http://www.gocomics.com/muttandjeff"),
|
||||
#(u"Mythtickle", u"http://www.gocomics.com/mythtickle"),
|
||||
#(u"Nancy", u"http://www.gocomics.com/nancy"),
|
||||
#(u"Natural Selection", u"http://www.gocomics.com/naturalselection"),
|
||||
#(u"Nest Heads", u"http://www.gocomics.com/nestheads"),
|
||||
#(u"NEUROTICA", u"http://www.gocomics.com/neurotica"),
|
||||
#(u"New Adventures of Queen Victoria", u"http://www.gocomics.com/thenewadventuresofqueenvictoria"),
|
||||
#(u"Non Sequitur", u"http://www.gocomics.com/nonsequitur"),
|
||||
#(u"Off The Mark", u"http://www.gocomics.com/offthemark"),
|
||||
#(u"On A Claire Day", u"http://www.gocomics.com/onaclaireday"),
|
||||
#(u"One Big Happy Classics", u"http://www.gocomics.com/onebighappyclassics"),
|
||||
#(u"One Big Happy", u"http://www.gocomics.com/onebighappy"),
|
||||
#(u"Out of the Gene Pool Re-Runs", u"http://www.gocomics.com/outofthegenepool"),
|
||||
#(u"Over the Hedge", u"http://www.gocomics.com/overthehedge"),
|
||||
#(u"Overboard", u"http://www.gocomics.com/overboard"),
|
||||
#(u"PC and Pixel", u"http://www.gocomics.com/pcandpixel"),
|
||||
(u"Peanuts", u"http://www.gocomics.com/peanuts"),
|
||||
#(u"Pearls Before Swine", u"http://www.gocomics.com/pearlsbeforeswine"),
|
||||
#(u"Pibgorn Sketches", u"http://www.gocomics.com/pibgornsketches"),
|
||||
#(u"Pibgorn", u"http://www.gocomics.com/pibgorn"),
|
||||
(u"Pickles", u"http://www.gocomics.com/pickles"),
|
||||
#(u"Pinkerton", u"http://www.gocomics.com/pinkerton"),
|
||||
#(u"Pluggers", u"http://www.gocomics.com/pluggers"),
|
||||
#(u"Pooch Cafe", u"http://www.gocomics.com/poochcafe"),
|
||||
#(u"PreTeena", u"http://www.gocomics.com/preteena"),
|
||||
#(u"Prickly City", u"http://www.gocomics.com/pricklycity"),
|
||||
#(u"Rabbits Against Magic", u"http://www.gocomics.com/rabbitsagainstmagic"),
|
||||
#(u"Raising Duncan Classics", u"http://www.gocomics.com/raisingduncanclassics"),
|
||||
#(u"Real Life Adventures", u"http://www.gocomics.com/reallifeadventures"),
|
||||
#(u"Reality Check", u"http://www.gocomics.com/realitycheck"),
|
||||
#(u"Red and Rover", u"http://www.gocomics.com/redandrover"),
|
||||
#(u"Red Meat", u"http://www.gocomics.com/redmeat"),
|
||||
#(u"Reynolds Unwrapped", u"http://www.gocomics.com/reynoldsunwrapped"),
|
||||
#(u"Rip Haywire", u"http://www.gocomics.com/riphaywire"),
|
||||
#(u"Ripley's Believe It or Not!", u"http://www.gocomics.com/ripleysbelieveitornot"),
|
||||
#(u"Ronaldinho Gaucho", u"http://www.gocomics.com/ronaldinhogaucho"),
|
||||
#(u"Rose Is Rose", u"http://www.gocomics.com/roseisrose"),
|
||||
#(u"Rubes", u"http://www.gocomics.com/rubes"),
|
||||
#(u"Rudy Park", u"http://www.gocomics.com/rudypark"),
|
||||
#(u"Scary Gary", u"http://www.gocomics.com/scarygary"),
|
||||
#(u"Shirley and Son Classics", u"http://www.gocomics.com/shirleyandsonclassics"),
|
||||
#(u"Shoe", u"http://www.gocomics.com/shoe"),
|
||||
#(u"Shoecabbage", u"http://www.gocomics.com/shoecabbage"),
|
||||
#(u"Skin Horse", u"http://www.gocomics.com/skinhorse"),
|
||||
#(u"Slowpoke", u"http://www.gocomics.com/slowpoke"),
|
||||
#(u"Soup To Nutz", u"http://www.gocomics.com/souptonutz"),
|
||||
#(u"Speed Bump", u"http://www.gocomics.com/speedbump"),
|
||||
#(u"Spot The Frog", u"http://www.gocomics.com/spotthefrog"),
|
||||
#(u"State of the Union", u"http://www.gocomics.com/stateoftheunion"),
|
||||
#(u"Stone Soup", u"http://www.gocomics.com/stonesoup"),
|
||||
#(u"Strange Brew", u"http://www.gocomics.com/strangebrew"),
|
||||
#(u"Sylvia", u"http://www.gocomics.com/sylvia"),
|
||||
#(u"Tank McNamara", u"http://www.gocomics.com/tankmcnamara"),
|
||||
#(u"Tarzan Classics", u"http://www.gocomics.com/tarzanclassics"),
|
||||
#(u"That's Life", u"http://www.gocomics.com/thatslife"),
|
||||
#(u"The Academia Waltz", u"http://www.gocomics.com/academiawaltz"),
|
||||
#(u"The Argyle Sweater", u"http://www.gocomics.com/theargylesweater"),
|
||||
#(u"The Barn", u"http://www.gocomics.com/thebarn"),
|
||||
#(u"The Boiling Point", u"http://www.gocomics.com/theboilingpoint"),
|
||||
#(u"The Boondocks", u"http://www.gocomics.com/boondocks"),
|
||||
#(u"The Born Loser", u"http://www.gocomics.com/thebornloser"),
|
||||
#(u"The Buckets", u"http://www.gocomics.com/thebuckets"),
|
||||
#(u"The City", u"http://www.gocomics.com/thecity"),
|
||||
#(u"The Dinette Set", u"http://www.gocomics.com/dinetteset"),
|
||||
#(u"The Doozies", u"http://www.gocomics.com/thedoozies"),
|
||||
#(u"The Duplex", u"http://www.gocomics.com/duplex"),
|
||||
#(u"The Elderberries", u"http://www.gocomics.com/theelderberries"),
|
||||
#(u"The Flying McCoys", u"http://www.gocomics.com/theflyingmccoys"),
|
||||
#(u"The Fusco Brothers", u"http://www.gocomics.com/thefuscobrothers"),
|
||||
#(u"The Grizzwells", u"http://www.gocomics.com/thegrizzwells"),
|
||||
#(u"The Humble Stumble", u"http://www.gocomics.com/thehumblestumble"),
|
||||
#(u"The Knight Life", u"http://www.gocomics.com/theknightlife"),
|
||||
#(u"The Meaning of Lila", u"http://www.gocomics.com/meaningoflila"),
|
||||
#(u"The Middletons", u"http://www.gocomics.com/themiddletons"),
|
||||
#(u"The Norm", u"http://www.gocomics.com/thenorm"),
|
||||
#(u"The Other Coast", u"http://www.gocomics.com/theothercoast"),
|
||||
#(u"The Quigmans", u"http://www.gocomics.com/thequigmans"),
|
||||
#(u"The Sunshine Club", u"http://www.gocomics.com/thesunshineclub"),
|
||||
#(u"Tiny Sepuk", u"http://www.gocomics.com/tinysepuk"),
|
||||
#(u"TOBY", u"http://www.gocomics.com/toby"),
|
||||
#(u"Tom the Dancing Bug", u"http://www.gocomics.com/tomthedancingbug"),
|
||||
#(u"Too Much Coffee Man", u"http://www.gocomics.com/toomuchcoffeeman"),
|
||||
#(u"Unstrange Phenomena", u"http://www.gocomics.com/unstrangephenomena"),
|
||||
#(u"W.T. Duck", u"http://www.gocomics.com/wtduck"),
|
||||
#(u"Watch Your Head", u"http://www.gocomics.com/watchyourhead"),
|
||||
#(u"Wee Pals", u"http://www.gocomics.com/weepals"),
|
||||
#(u"Winnie the Pooh", u"http://www.gocomics.com/winniethepooh"),
|
||||
#(u"Wizard of Id", u"http://www.gocomics.com/wizardofid"),
|
||||
#(u"Working Daze", u"http://www.gocomics.com/workingdaze"),
|
||||
#(u"Working It Out", u"http://www.gocomics.com/workingitout"),
|
||||
#(u"Yenny", u"http://www.gocomics.com/yenny"),
|
||||
#(u"Zack Hill", u"http://www.gocomics.com/zackhill"),
|
||||
(u"Ziggy", u"http://www.gocomics.com/ziggy"),
|
||||
#
|
||||
######## EDITORIAL CARTOONS #####################
|
||||
(u"Adam Zyglis", u"http://www.gocomics.com/adamzyglis"),
|
||||
#(u"Andy Singer", u"http://www.gocomics.com/andysinger"),
|
||||
#(u"Ben Sargent",u"http://www.gocomics.com/bensargent"),
|
||||
#(u"Bill Day", u"http://www.gocomics.com/billday"),
|
||||
#(u"Bill Schorr", u"http://www.gocomics.com/billschorr"),
|
||||
#(u"Bob Englehart", u"http://www.gocomics.com/bobenglehart"),
|
||||
(u"Bob Gorrell",u"http://www.gocomics.com/bobgorrell"),
|
||||
#(u"Brian Fairrington", u"http://www.gocomics.com/brianfairrington"),
|
||||
#(u"Bruce Beattie", u"http://www.gocomics.com/brucebeattie"),
|
||||
#(u"Cam Cardow", u"http://www.gocomics.com/camcardow"),
|
||||
#(u"Chan Lowe",u"http://www.gocomics.com/chanlowe"),
|
||||
#(u"Chip Bok",u"http://www.gocomics.com/chipbok"),
|
||||
#(u"Chris Britt",u"http://www.gocomics.com/chrisbritt"),
|
||||
#(u"Chuck Asay",u"http://www.gocomics.com/chuckasay"),
|
||||
#(u"Clay Bennett",u"http://www.gocomics.com/claybennett"),
|
||||
#(u"Clay Jones",u"http://www.gocomics.com/clayjones"),
|
||||
#(u"Dan Wasserman",u"http://www.gocomics.com/danwasserman"),
|
||||
#(u"Dana Summers",u"http://www.gocomics.com/danasummers"),
|
||||
#(u"Daryl Cagle", u"http://www.gocomics.com/darylcagle"),
|
||||
#(u"David Fitzsimmons", u"http://www.gocomics.com/davidfitzsimmons"),
|
||||
(u"Dick Locher",u"http://www.gocomics.com/dicklocher"),
|
||||
#(u"Don Wright",u"http://www.gocomics.com/donwright"),
|
||||
#(u"Donna Barstow",u"http://www.gocomics.com/donnabarstow"),
|
||||
#(u"Drew Litton", u"http://www.gocomics.com/drewlitton"),
|
||||
#(u"Drew Sheneman",u"http://www.gocomics.com/drewsheneman"),
|
||||
#(u"Ed Stein", u"http://www.gocomics.com/edstein"),
|
||||
#(u"Eric Allie", u"http://www.gocomics.com/ericallie"),
|
||||
#(u"Gary Markstein", u"http://www.gocomics.com/garymarkstein"),
|
||||
#(u"Gary McCoy", u"http://www.gocomics.com/garymccoy"),
|
||||
#(u"Gary Varvel", u"http://www.gocomics.com/garyvarvel"),
|
||||
#(u"Glenn McCoy",u"http://www.gocomics.com/glennmccoy"),
|
||||
#(u"Henry Payne", u"http://www.gocomics.com/henrypayne"),
|
||||
#(u"Jack Ohman",u"http://www.gocomics.com/jackohman"),
|
||||
#(u"JD Crowe", u"http://www.gocomics.com/jdcrowe"),
|
||||
#(u"Jeff Danziger",u"http://www.gocomics.com/jeffdanziger"),
|
||||
#(u"Jeff Parker", u"http://www.gocomics.com/jeffparker"),
|
||||
#(u"Jeff Stahler", u"http://www.gocomics.com/jeffstahler"),
|
||||
#(u"Jerry Holbert", u"http://www.gocomics.com/jerryholbert"),
|
||||
#(u"Jim Morin",u"http://www.gocomics.com/jimmorin"),
|
||||
#(u"Joel Pett",u"http://www.gocomics.com/joelpett"),
|
||||
#(u"John Cole", u"http://www.gocomics.com/johncole"),
|
||||
#(u"John Darkow", u"http://www.gocomics.com/johndarkow"),
|
||||
#(u"John Deering",u"http://www.gocomics.com/johndeering"),
|
||||
#(u"John Sherffius", u"http://www.gocomics.com/johnsherffius"),
|
||||
#(u"Ken Catalino",u"http://www.gocomics.com/kencatalino"),
|
||||
#(u"Kerry Waghorn",u"http://www.gocomics.com/facesinthenews"),
|
||||
#(u"Kevin Kallaugher",u"http://www.gocomics.com/kevinkallaugher"),
|
||||
#(u"Lalo Alcaraz",u"http://www.gocomics.com/laloalcaraz"),
|
||||
#(u"Larry Wright", u"http://www.gocomics.com/larrywright"),
|
||||
#(u"Lisa Benson", u"http://www.gocomics.com/lisabenson"),
|
||||
#(u"Marshall Ramsey", u"http://www.gocomics.com/marshallramsey"),
|
||||
#(u"Matt Bors", u"http://www.gocomics.com/mattbors"),
|
||||
#(u"Matt Davies",u"http://www.gocomics.com/mattdavies"),
|
||||
#(u"Michael Ramirez", u"http://www.gocomics.com/michaelramirez"),
|
||||
#(u"Mike Keefe", u"http://www.gocomics.com/mikekeefe"),
|
||||
#(u"Mike Luckovich", u"http://www.gocomics.com/mikeluckovich"),
|
||||
#(u"MIke Thompson", u"http://www.gocomics.com/mikethompson"),
|
||||
#(u"Monte Wolverton", u"http://www.gocomics.com/montewolverton"),
|
||||
#(u"Mr. Fish", u"http://www.gocomics.com/mrfish"),
|
||||
#(u"Nate Beeler", u"http://www.gocomics.com/natebeeler"),
|
||||
#(u"Nick Anderson", u"http://www.gocomics.com/nickanderson"),
|
||||
#(u"Pat Bagley", u"http://www.gocomics.com/patbagley"),
|
||||
#(u"Pat Oliphant",u"http://www.gocomics.com/patoliphant"),
|
||||
#(u"Paul Conrad",u"http://www.gocomics.com/paulconrad"),
|
||||
#(u"Paul Szep", u"http://www.gocomics.com/paulszep"),
|
||||
#(u"RJ Matson", u"http://www.gocomics.com/rjmatson"),
|
||||
#(u"Rob Rogers", u"http://www.gocomics.com/robrogers"),
|
||||
#(u"Robert Ariail", u"http://www.gocomics.com/robertariail"),
|
||||
#(u"Scott Stantis", u"http://www.gocomics.com/scottstantis"),
|
||||
#(u"Signe Wilkinson", u"http://www.gocomics.com/signewilkinson"),
|
||||
#(u"Small World",u"http://www.gocomics.com/smallworld"),
|
||||
#(u"Steve Benson", u"http://www.gocomics.com/stevebenson"),
|
||||
#(u"Steve Breen", u"http://www.gocomics.com/stevebreen"),
|
||||
#(u"Steve Kelley", u"http://www.gocomics.com/stevekelley"),
|
||||
#(u"Steve Sack", u"http://www.gocomics.com/stevesack"),
|
||||
#(u"Stuart Carlson",u"http://www.gocomics.com/stuartcarlson"),
|
||||
#(u"Ted Rall",u"http://www.gocomics.com/tedrall"),
|
||||
#(u"(Th)ink", u"http://www.gocomics.com/think"),
|
||||
#(u"Tom Toles",u"http://www.gocomics.com/tomtoles"),
|
||||
(u"Tony Auth",u"http://www.gocomics.com/tonyauth"),
|
||||
#(u"Views of the World",u"http://www.gocomics.com/viewsoftheworld"),
|
||||
#(u"ViewsAfrica",u"http://www.gocomics.com/viewsafrica"),
|
||||
#(u"ViewsAmerica",u"http://www.gocomics.com/viewsamerica"),
|
||||
#(u"ViewsAsia",u"http://www.gocomics.com/viewsasia"),
|
||||
#(u"ViewsBusiness",u"http://www.gocomics.com/viewsbusiness"),
|
||||
#(u"ViewsEurope",u"http://www.gocomics.com/viewseurope"),
|
||||
#(u"ViewsLatinAmerica",u"http://www.gocomics.com/viewslatinamerica"),
|
||||
#(u"ViewsMidEast",u"http://www.gocomics.com/viewsmideast"),
|
||||
(u"Walt Handelsman",u"http://www.gocomics.com/walthandelsman"),
|
||||
#(u"Wayne Stayskal",u"http://www.gocomics.com/waynestayskal"),
|
||||
#(u"Wit of the World",u"http://www.gocomics.com/witoftheworld"),
|
||||
]:
|
||||
print 'Working on: ', title
|
||||
("9 Chickweed Lane", "http://gocomics.com/9_chickweed_lane"),
|
||||
("Agnes", "http://gocomics.com/agnes"),
|
||||
("Alley Oop", "http://gocomics.com/alley_oop"),
|
||||
("Andy Capp", "http://gocomics.com/andy_capp"),
|
||||
("Arlo & Janis", "http://gocomics.com/arlo&janis"),
|
||||
("B.C.", "http://gocomics.com/bc"),
|
||||
("Ballard Street", "http://gocomics.com/ballard_street"),
|
||||
# ("Ben", "http://comics.com/ben"),
|
||||
# ("Betty", "http://comics.com/betty"),
|
||||
# ("Big Nate", "http://comics.com/big_nate"),
|
||||
# ("Brevity", "http://comics.com/brevity"),
|
||||
# ("Candorville", "http://comics.com/candorville"),
|
||||
# ("Cheap Thrills", "http://comics.com/cheap_thrills"),
|
||||
# ("Committed", "http://comics.com/committed"),
|
||||
# ("Cow & Boy", "http://comics.com/cow&boy"),
|
||||
# ("Daddy's Home", "http://comics.com/daddys_home"),
|
||||
# ("Dog eat Doug", "http://comics.com/dog_eat_doug"),
|
||||
# ("Drabble", "http://comics.com/drabble"),
|
||||
# ("F Minus", "http://comics.com/f_minus"),
|
||||
# ("Family Tree", "http://comics.com/family_tree"),
|
||||
# ("Farcus", "http://comics.com/farcus"),
|
||||
# ("Fat Cats Classics", "http://comics.com/fat_cats_classics"),
|
||||
# ("Ferd'nand", "http://comics.com/ferdnand"),
|
||||
# ("Flight Deck", "http://comics.com/flight_deck"),
|
||||
# ("Flo & Friends", "http://comics.com/flo&friends"),
|
||||
# ("Fort Knox", "http://comics.com/fort_knox"),
|
||||
# ("Frank & Ernest", "http://comics.com/frank&ernest"),
|
||||
# ("Frazz", "http://comics.com/frazz"),
|
||||
# ("Free Range", "http://comics.com/free_range"),
|
||||
# ("Geech Classics", "http://comics.com/geech_classics"),
|
||||
# ("Get Fuzzy", "http://comics.com/get_fuzzy"),
|
||||
# ("Girls & Sports", "http://comics.com/girls&sports"),
|
||||
# ("Graffiti", "http://comics.com/graffiti"),
|
||||
# ("Grand Avenue", "http://comics.com/grand_avenue"),
|
||||
# ("Heathcliff", "http://comics.com/heathcliff"),
|
||||
# "Heathcliff, a street-smart and mischievous cat with many adventures."
|
||||
# ("Herb and Jamaal", "http://comics.com/herb_and_jamaal"),
|
||||
# ("Herman", "http://comics.com/herman"),
|
||||
# ("Home and Away", "http://comics.com/home_and_away"),
|
||||
# ("It's All About You", "http://comics.com/its_all_about_you"),
|
||||
# ("Jane's World", "http://comics.com/janes_world"),
|
||||
# ("Jump Start", "http://comics.com/jump_start"),
|
||||
# ("Kit 'N' Carlyle", "http://comics.com/kit_n_carlyle"),
|
||||
# ("Li'l Abner Classics", "http://comics.com/lil_abner_classics"),
|
||||
# ("Liberty Meadows", "http://comics.com/liberty_meadows"),
|
||||
# ("Little Dog Lost", "http://comics.com/little_dog_lost"),
|
||||
# ("Lola", "http://comics.com/lola"),
|
||||
# ("Luann", "http://comics.com/luann"),
|
||||
# ("Marmaduke", "http://comics.com/marmaduke"),
|
||||
# ("Meg! Classics", "http://comics.com/meg_classics"),
|
||||
# ("Minimum Security", "http://comics.com/minimum_security"),
|
||||
# ("Moderately Confused", "http://comics.com/moderately_confused"),
|
||||
# ("Momma", "http://comics.com/momma"),
|
||||
# ("Monty", "http://comics.com/monty"),
|
||||
# ("Motley Classics", "http://comics.com/motley_classics"),
|
||||
# ("Nancy", "http://comics.com/nancy"),
|
||||
# ("Natural Selection", "http://comics.com/natural_selection"),
|
||||
# ("Nest Heads", "http://comics.com/nest_heads"),
|
||||
# ("Off The Mark", "http://comics.com/off_the_mark"),
|
||||
# ("On a Claire Day", "http://comics.com/on_a_claire_day"),
|
||||
# ("One Big Happy Classics", "http://comics.com/one_big_happy_classics"),
|
||||
# ("Over the Hedge", "http://comics.com/over_the_hedge"),
|
||||
# ("PC and Pixel", "http://comics.com/pc_and_pixel"),
|
||||
# ("Peanuts", "http://comics.com/peanuts"),
|
||||
# ("Pearls Before Swine", "http://comics.com/pearls_before_swine"),
|
||||
# ("Pickles", "http://comics.com/pickles"),
|
||||
# ("Prickly City", "http://comics.com/prickly_city"),
|
||||
# ("Raising Duncan Classics", "http://comics.com/raising_duncan_classics"),
|
||||
# ("Reality Check", "http://comics.com/reality_check"),
|
||||
# ("Red & Rover", "http://comics.com/red&rover"),
|
||||
# ("Rip Haywire", "http://comics.com/rip_haywire"),
|
||||
# ("Ripley's Believe It or Not!", "http://comics.com/ripleys_believe_it_or_not"),
|
||||
# ("Rose Is Rose", "http://comics.com/rose_is_rose"),
|
||||
# ("Rubes", "http://comics.com/rubes"),
|
||||
# ("Rudy Park", "http://comics.com/rudy_park"),
|
||||
# ("Scary Gary", "http://comics.com/scary_gary"),
|
||||
# ("Shirley and Son Classics", "http://comics.com/shirley_and_son_classics"),
|
||||
# ("Soup To Nutz", "http://comics.com/soup_to_nutz"),
|
||||
# ("Speed Bump", "http://comics.com/speed_bump"),
|
||||
# ("Spot The Frog", "http://comics.com/spot_the_frog"),
|
||||
# ("State of the Union", "http://comics.com/state_of_the_union"),
|
||||
# ("Strange Brew", "http://comics.com/strange_brew"),
|
||||
# ("Tarzan Classics", "http://comics.com/tarzan_classics"),
|
||||
# ("That's Life", "http://comics.com/thats_life"),
|
||||
# ("The Barn", "http://comics.com/the_barn"),
|
||||
# ("The Born Loser", "http://comics.com/the_born_loser"),
|
||||
# ("The Buckets", "http://comics.com/the_buckets"),
|
||||
# ("The Dinette Set", "http://comics.com/the_dinette_set"),
|
||||
# ("The Grizzwells", "http://comics.com/the_grizzwells"),
|
||||
# ("The Humble Stumble", "http://comics.com/the_humble_stumble"),
|
||||
# ("The Knight Life", "http://comics.com/the_knight_life"),
|
||||
# ("The Meaning of Lila", "http://comics.com/the_meaning_of_lila"),
|
||||
# ("The Other Coast", "http://comics.com/the_other_coast"),
|
||||
# ("The Sunshine Club", "http://comics.com/the_sunshine_club"),
|
||||
# ("Unstrange Phenomena", "http://comics.com/unstrange_phenomena"),
|
||||
# ("Watch Your Head", "http://comics.com/watch_your_head"),
|
||||
# ("Wizard of Id", "http://comics.com/wizard_of_id"),
|
||||
# ("Working Daze", "http://comics.com/working_daze"),
|
||||
# ("Working It Out", "http://comics.com/working_it_out"),
|
||||
# ("Zack Hill", "http://comics.com/zack_hill"),
|
||||
# ("(Th)ink", "http://comics.com/think"),
|
||||
# "Tackling the political and social issues impacting communities of color."
|
||||
# ("Adam Zyglis", "http://comics.com/adam_zyglis"),
|
||||
# "Known for his excellent caricatures, as well as independent and incisive imagery. "
|
||||
# ("Andy Singer", "http://comics.com/andy_singer"),
|
||||
# ("Bill Day", "http://comics.com/bill_day"),
|
||||
# "Powerful images on sensitive issues."
|
||||
# ("Bill Schorr", "http://comics.com/bill_schorr"),
|
||||
# ("Bob Englehart", "http://comics.com/bob_englehart"),
|
||||
# ("Brian Fairrington", "http://comics.com/brian_fairrington"),
|
||||
# ("Bruce Beattie", "http://comics.com/bruce_beattie"),
|
||||
# ("Cam Cardow", "http://comics.com/cam_cardow"),
|
||||
# ("Chip Bok", "http://comics.com/chip_bok"),
|
||||
# ("Chris Britt", "http://comics.com/chris_britt"),
|
||||
# ("Chuck Asay", "http://comics.com/chuck_asay"),
|
||||
# ("Clay Bennett", "http://comics.com/clay_bennett"),
|
||||
# ("Daryl Cagle", "http://comics.com/daryl_cagle"),
|
||||
# ("David Fitzsimmons", "http://comics.com/david_fitzsimmons"),
|
||||
# "David Fitzsimmons is a new editorial cartoons on comics.com. He is also a staff writer and editorial cartoonist for the Arizona Daily Star. "
|
||||
# ("Drew Litton", "http://comics.com/drew_litton"),
|
||||
# "Drew Litton is an artist who is probably best known for his sports cartoons. He received the National Cartoonist Society Sports Cartoon Award for 1993. "
|
||||
# ("Ed Stein", "http://comics.com/ed_stein"),
|
||||
# "Winner of the Fischetti Award in 2006 and the Scripps Howard National Journalism Award, 1999, Ed Stein has been the editorial cartoonist for the Rocky Mountain News since 1978. "
|
||||
# ("Eric Allie", "http://comics.com/eric_allie"),
|
||||
# "Eric Allie is an editorial cartoonist with the Pioneer Press and CNS News. "
|
||||
# ("Gary Markstein", "http://comics.com/gary_markstein"),
|
||||
# ("Gary McCoy", "http://comics.com/gary_mccoy"),
|
||||
# "Gary McCoy is known for his editorial cartoons, humor and inane ramblings. He is a 2 time nominee for Best Magazine Cartoonist of the Year by the National Cartoonists Society. He resides in Belleville, IL. "
|
||||
# ("Gary Varvel", "http://comics.com/gary_varvel"),
|
||||
# ("Henry Payne", "http://comics.com/henry_payne"),
|
||||
# ("JD Crowe", "http://comics.com/jd_crowe"),
|
||||
# ("Jeff Parker", "http://comics.com/jeff_parker"),
|
||||
# ("Jeff Stahler", "http://comics.com/jeff_stahler"),
|
||||
# ("Jerry Holbert", "http://comics.com/jerry_holbert"),
|
||||
# ("John Cole", "http://comics.com/john_cole"),
|
||||
# ("John Darkow", "http://comics.com/john_darkow"),
|
||||
# "John Darkow is a contributing editorial cartoonist for the Humor Times as well as editoiral cartoonist for the Columbia Daily Tribune, Missouri"
|
||||
# ("John Sherffius", "http://comics.com/john_sherffius"),
|
||||
# ("Larry Wright", "http://comics.com/larry_wright"),
|
||||
# ("Lisa Benson", "http://comics.com/lisa_benson"),
|
||||
# ("Marshall Ramsey", "http://comics.com/marshall_ramsey"),
|
||||
# ("Matt Bors", "http://comics.com/matt_bors"),
|
||||
# ("Michael Ramirez", "http://comics.com/michael_ramirez"),
|
||||
# ("Mike Keefe", "http://comics.com/mike_keefe"),
|
||||
# ("Mike Luckovich", "http://comics.com/mike_luckovich"),
|
||||
# ("MIke Thompson", "http://comics.com/mike_thompson"),
|
||||
# ("Monte Wolverton", "http://comics.com/monte_wolverton"),
|
||||
# "Unique mix of perspectives"
|
||||
# ("Mr. Fish", "http://comics.com/mr_fish"),
|
||||
# "Side effects may include swelling"
|
||||
# ("Nate Beeler", "http://comics.com/nate_beeler"),
|
||||
# "Middle America meets the Beltway."
|
||||
# ("Nick Anderson", "http://comics.com/nick_anderson"),
|
||||
# ("Pat Bagley", "http://comics.com/pat_bagley"),
|
||||
# "Unfair and Totally Unbalanced."
|
||||
# ("Paul Szep", "http://comics.com/paul_szep"),
|
||||
# ("RJ Matson", "http://comics.com/rj_matson"),
|
||||
# "Power cartoons from NYC and Capitol Hill"
|
||||
# ("Rob Rogers", "http://comics.com/rob_rogers"),
|
||||
# "Humorous slant on current events"
|
||||
# ("Robert Ariail", "http://comics.com/robert_ariail"),
|
||||
# "Clever and unpredictable"
|
||||
# ("Scott Stantis", "http://comics.com/scott_stantis"),
|
||||
# ("Signe Wilkinson", "http://comics.com/signe_wilkinson"),
|
||||
# ("Steve Benson", "http://comics.com/steve_benson"),
|
||||
# ("Steve Breen", "http://comics.com/steve_breen"),
|
||||
# ("Steve Kelley", "http://comics.com/steve_kelley"),
|
||||
# ("Steve Sack", "http://comics.com/steve_sack"),
|
||||
]:
|
||||
articles = self.make_links(url)
|
||||
if articles:
|
||||
feeds.append((title, articles))
|
||||
return feeds
|
||||
|
||||
def make_links(self, url):
|
||||
title = 'Temp'
|
||||
soup = self.index_to_soup(url)
|
||||
# print 'soup: ', soup
|
||||
title = ''
|
||||
current_articles = []
|
||||
pages = range(1, self.num_comics_to_get+1)
|
||||
for page in pages:
|
||||
page_soup = self.index_to_soup(url)
|
||||
if page_soup:
|
||||
try:
|
||||
strip_title = page_soup.find(name='div', attrs={'class':'top'}).h1.a.string
|
||||
except:
|
||||
strip_title = 'Error - no Title found'
|
||||
try:
|
||||
date_title = page_soup.find('ul', attrs={'class': 'feature-nav'}).li.string
|
||||
if not date_title:
|
||||
date_title = page_soup.find('ul', attrs={'class': 'feature-nav'}).li.string
|
||||
except:
|
||||
date_title = 'Error - no Date found'
|
||||
title = strip_title + ' - ' + date_title
|
||||
for i in range(2):
|
||||
try:
|
||||
strip_url_date = page_soup.find(name='div', attrs={'class':'top'}).h1.a['href']
|
||||
break #success - this is normal exit
|
||||
except:
|
||||
strip_url_date = None
|
||||
continue #try to get strip_url_date again
|
||||
for i in range(2):
|
||||
try:
|
||||
prev_strip_url_date = page_soup.find('a', attrs={'class': 'prev'})['href']
|
||||
break #success - this is normal exit
|
||||
except:
|
||||
prev_strip_url_date = None
|
||||
continue #try to get prev_strip_url_date again
|
||||
if strip_url_date:
|
||||
page_url = 'http://www.gocomics.com' + strip_url_date
|
||||
else:
|
||||
continue
|
||||
if prev_strip_url_date:
|
||||
prev_page_url = 'http://www.gocomics.com' + prev_strip_url_date
|
||||
else:
|
||||
continue
|
||||
from datetime import datetime, timedelta
|
||||
now = datetime.now()
|
||||
dates = [(now-timedelta(days=d)).strftime('%Y/%m/%d') for d in range(self.num_comics_to_get)]
|
||||
|
||||
for page in dates:
|
||||
page_url = url + '/' + str(page)
|
||||
print(page_url)
|
||||
soup = self.index_to_soup(page_url)
|
||||
if soup:
|
||||
strip_tag = self.tag_to_string(soup.find('a'))
|
||||
if strip_tag:
|
||||
print 'strip_tag: ', strip_tag
|
||||
title = strip_tag
|
||||
print 'title: ', title
|
||||
current_articles.append({'title': title, 'url': page_url, 'description':'', 'date':''})
|
||||
url = prev_page_url
|
||||
current_articles.reverse()
|
||||
return current_articles
|
||||
|
||||
def preprocess_html(self, soup):
|
||||
if soup.title:
|
||||
title_string = soup.title.string.strip()
|
||||
_cd = title_string.split(',',1)[1]
|
||||
comic_date = ' '.join(_cd.split(' ', 4)[0:-1])
|
||||
if soup.h1.span:
|
||||
artist = soup.h1.span.string
|
||||
soup.h1.span.string.replaceWith(comic_date + artist)
|
||||
feature_item = soup.find('p',attrs={'class':'feature_item'})
|
||||
if feature_item.a:
|
||||
a_tag = feature_item.a
|
||||
a_href = a_tag["href"]
|
||||
img_tag = a_tag.img
|
||||
img_tag["src"] = a_href
|
||||
img_tag["width"] = self.comic_size
|
||||
img_tag["height"] = None
|
||||
return self.adeify_images(soup)
|
||||
|
||||
extra_css = '''
|
||||
h1{font-family:Arial,Helvetica,sans-serif; font-weight:bold;font-size:large;}
|
||||
h2{font-family:Arial,Helvetica,sans-serif; font-weight:normal;font-size:small;}
|
||||
img {max-width:100%; min-width:100%;}
|
||||
p{font-family:Arial,Helvetica,sans-serif;font-size:small;}
|
||||
body{font-family:Helvetica,Arial,sans-serif;font-size:small;}
|
||||
'''
|
||||
|
||||
'''
|
||||
|
11
recipes/lightspeed_magazine.recipe
Normal file
@ -0,0 +1,11 @@
|
||||
from calibre.web.feeds.news import BasicNewsRecipe
|
||||
|
||||
class AdvancedUserRecipe1366025923(BasicNewsRecipe):
|
||||
title = u'Lightspeed Magazine'
|
||||
language = 'en'
|
||||
__author__ = 'Jose Pinto'
|
||||
oldest_article = 31
|
||||
max_articles_per_feed = 100
|
||||
auto_cleanup = True
|
||||
use_embedded_content = False
|
||||
feeds = [(u'Lastest Stories', u'http://www.lightspeedmagazine.com/rss-2/')]
|
@ -36,6 +36,9 @@ from BeautifulSoup import BeautifulSoup
|
||||
Changed order of regex to speedup proces
|
||||
Version 1.9.3 23-05-2012
|
||||
Updated Cover image
|
||||
Version 1.9.4 19-04-2013
|
||||
Added regex filter for mailto
|
||||
Updated for new layout of metro-site
|
||||
'''
|
||||
|
||||
class AdvancedUserRecipe1306097511(BasicNewsRecipe):
|
||||
@ -43,7 +46,7 @@ class AdvancedUserRecipe1306097511(BasicNewsRecipe):
|
||||
oldest_article = 1.2
|
||||
max_articles_per_feed = 25
|
||||
__author__ = u'DrMerry'
|
||||
description = u'Metro Nederland'
|
||||
description = u'Metro Nederland v1.9.4 2013-04-19'
|
||||
language = u'nl'
|
||||
simultaneous_downloads = 5
|
||||
masthead_url = 'http://blog.metronieuws.nl/wp-content/themes/metro/images/header.gif'
|
||||
@ -68,13 +71,17 @@ class AdvancedUserRecipe1306097511(BasicNewsRecipe):
|
||||
#(re.compile('(</?)h2', re.DOTALL|re.IGNORECASE),lambda match:'\1em')
|
||||
]
|
||||
|
||||
remove_tags_before= dict(id='date')
|
||||
remove_tags_after = [dict(name='div', attrs={'class':['column-1-3','gallery-text']})]#id='share-and-byline')]
|
||||
remove_tags_before= dict(id='subwrapper')
|
||||
remove_tags_after = dict(name='div', attrs={'class':['body-area','article-main-area']})
|
||||
#name='div', attrs={'class':['subwrapper']})]
|
||||
#'column-1-3','gallery-text']})]#id='share-and-byline')]
|
||||
|
||||
filter_regexps = [r'mailto:.*']
|
||||
|
||||
remove_tags = [
|
||||
dict(name=['iframe','script','noscript','style']),
|
||||
dict(name='div', attrs={'class':['column-4-5','column-1-5','ad-msg','col-179 ','col-373 ','clear','ad','navigation',re.compile('share-tools(-top)?'),'tools','metroCommentFormWrap','article-tools-below-title','related-links','padding-top-15',re.compile('^promo.*?$'),'teaser-component',re.compile('fb(-comments|_iframe_widget)'),'promos','header-links','promo-2']}),
|
||||
dict(id=['column-1-5-bottom','column-4-5',re.compile('^ad(\d+|adcomp.*?)?$'),'adadcomp-4','margin-5','sidebar',re.compile('^article-\d'),'comments','gallery-1']),
|
||||
dict(name='div', attrs={'class':['aside clearfix','aside clearfix middle-col-line','comments','share-tools','article-right-column','column-4-5','column-1-5','ad-msg','col-179 ','col-373 ','clear','ad','navigation',re.compile('share-tools(-top)?'),'tools','metroCommentFormWrap','article-tools-below-title','related-links','padding-top-15',re.compile('^promo.*?$'),'teaser-component',re.compile('fb(-comments|_iframe_widget)'),'promos','header-links','promo-2']}),
|
||||
dict(id=['article-2','googleads','column-1-5-bottom','column-4-5',re.compile('^ad(\d+|adcomp.*?)?$'),'adadcomp-4','margin-5','sidebar',re.compile('^article-\d'),'comments','gallery-1','sharez_container','ts-container','topshares','ts-title']),
|
||||
dict(name='a', attrs={'name':'comments'}),
|
||||
#dict(name='div', attrs={'data-href'}),
|
||||
dict(name='img', attrs={'class':'top-line','title':'volledig scherm'}),
|
||||
|
@ -42,7 +42,6 @@ class Nzz(BasicNewsRecipe):
|
||||
soup = self.index_to_soup(baseref)
|
||||
|
||||
articles = {}
|
||||
key = None
|
||||
ans = []
|
||||
|
||||
issuelist = soup.find(id="issueSelectorList")
|
||||
@ -52,27 +51,25 @@ class Nzz(BasicNewsRecipe):
|
||||
section = f.string
|
||||
sectionref = baseref + f['href']
|
||||
|
||||
# print "section is "+section +" and ref is "+sectionref
|
||||
ans.append(section)
|
||||
|
||||
articlesoup = self.index_to_soup(sectionref)
|
||||
|
||||
articlesoup = articlesoup.findAll('article','article')
|
||||
for a in articlesoup:
|
||||
artlink = a.find('a')
|
||||
artlink = a.find('a')
|
||||
|
||||
arthref = baseref + artlink['href']
|
||||
arthead = a.find('h2')
|
||||
artcaption = arthead.string
|
||||
arthref = baseref + artlink['href']
|
||||
arthead = a.find('h2')
|
||||
artcaption = arthead.string
|
||||
|
||||
pubdate = strftime('%a, %d %b')
|
||||
pubdate = strftime('%a, %d %b')
|
||||
|
||||
if not artcaption is None:
|
||||
# print " found article named "+artcaption+" at "+arthref
|
||||
if not articles.has_key(section):
|
||||
articles[section] = []
|
||||
articles[section].append(
|
||||
dict(title=artcaption, url=arthref, date=pubdate, description='', content=''))
|
||||
if not artcaption is None:
|
||||
if not articles.has_key(section):
|
||||
articles[section] = []
|
||||
articles[section].append(
|
||||
dict(title=artcaption, url=arthref, date=pubdate, description='', content=''))
|
||||
|
||||
ans = [(key, articles[key]) for key in ans if articles.has_key(key)]
|
||||
return ans
|
||||
@ -80,10 +77,10 @@ class Nzz(BasicNewsRecipe):
|
||||
def get_browser(self):
|
||||
br = BasicNewsRecipe.get_browser(self)
|
||||
if self.username is not None and self.password is not None:
|
||||
br.open('https://webpaper.nzz.ch/login')
|
||||
br.open('https://cas.nzz.ch/cas/login')
|
||||
br.select_form(nr=0)
|
||||
br['_username'] = self.username
|
||||
br['_password'] = self.password
|
||||
br['username'] = self.username
|
||||
br['password'] = self.password
|
||||
br.submit()
|
||||
return br
|
||||
|
||||
|
@ -11,7 +11,8 @@ class PsychologyToday(BasicNewsRecipe):
|
||||
language = 'en'
|
||||
category = 'news'
|
||||
encoding = 'UTF-8'
|
||||
keep_only_tags = [dict(attrs={'class':['print-title', 'print-submitted', 'print-content', 'print-footer', 'print-source_url', 'print-links']})]
|
||||
auto_cleanup = True
|
||||
#keep_only_tags = [dict(attrs={'class':['print-title', 'print-submitted', 'print-content', 'print-footer', 'print-source_url', 'print-links']})]
|
||||
no_javascript = True
|
||||
no_stylesheets = True
|
||||
|
||||
@ -31,50 +32,32 @@ class PsychologyToday(BasicNewsRecipe):
|
||||
self.timefmt = u' [%s]'%date
|
||||
|
||||
articles = []
|
||||
for post in div.findAll('div', attrs={'class':'collections-node-feature-info'}):
|
||||
for post in div.findAll('div', attrs={'class':'collections-node-feature collection-node-even'}):
|
||||
title = self.tag_to_string(post.find('h2'))
|
||||
author_item=post.find('div', attrs={'class':'collection-node-byline'})
|
||||
author = re.sub(r'.*by\s',"",self.tag_to_string(author_item).strip())
|
||||
title = title + u' (%s)'%author
|
||||
article_page= self.index_to_soup('http://www.psychologytoday.com'+post.find('a', href=True)['href'])
|
||||
print_page=article_page.find('li', attrs={'class':'print_html first'})
|
||||
url='http://www.psychologytoday.com'+print_page.find('a',href=True)['href']
|
||||
url= 'http://www.psychologytoday.com'+post.find('a', href=True)['href']
|
||||
#print_page=article_page.find('li', attrs={'class':'print_html first'})
|
||||
#url='http://www.psychologytoday.com'+print_page.find('a',href=True)['href']
|
||||
desc = self.tag_to_string(post.find('div', attrs={'class':'collection-node-description'})).strip()
|
||||
self.log('Found article:', title)
|
||||
self.log('\t', url)
|
||||
self.log('\t', desc)
|
||||
articles.append({'title':title, 'url':url, 'date':'','description':desc})
|
||||
for post in div.findAll('div', attrs={'class':'collections-node-feature collection-node-odd'}):
|
||||
title = self.tag_to_string(post.find('h2'))
|
||||
author_item=post.find('div', attrs={'class':'collection-node-byline'})
|
||||
author = re.sub(r'.*by\s',"",self.tag_to_string(author_item).strip())
|
||||
title = title + u' (%s)'%author
|
||||
url= 'http://www.psychologytoday.com'+post.find('a', href=True)['href']
|
||||
#print_page=article_page.find('li', attrs={'class':'print_html first'})
|
||||
#url='http://www.psychologytoday.com'+print_page.find('a',href=True)['href']
|
||||
desc = self.tag_to_string(post.find('div', attrs={'class':'collection-node-description'})).strip()
|
||||
self.log('Found article:', title)
|
||||
self.log('\t', url)
|
||||
self.log('\t', desc)
|
||||
articles.append({'title':title, 'url':url, 'date':'','description':desc})
|
||||
|
||||
for post in div.findAll('div', attrs={'class':'collections-node-thumbnail-info'}):
|
||||
title = self.tag_to_string(post.find('h2'))
|
||||
author_item=post.find('div', attrs={'class':'collection-node-byline'})
|
||||
article_page= self.index_to_soup('http://www.psychologytoday.com'+post.find('a', href=True)['href'])
|
||||
print_page=article_page.find('li', attrs={'class':'print_html first'})
|
||||
description = post.find('div', attrs={'class':'collection-node-description'})
|
||||
author = re.sub(r'.*by\s',"",self.tag_to_string(description.nextSibling).strip())
|
||||
desc = self.tag_to_string(description).strip()
|
||||
url='http://www.psychologytoday.com'+print_page.find('a',href=True)['href']
|
||||
title = title + u' (%s)'%author
|
||||
self.log('Found article:', title)
|
||||
self.log('\t', url)
|
||||
self.log('\t', desc)
|
||||
articles.append({'title':title, 'url':url, 'date':'','description':desc})
|
||||
|
||||
for post in div.findAll('li', attrs={'class':['collection-item-list-odd','collection-item-list-even']}):
|
||||
title = self.tag_to_string(post.find('h2'))
|
||||
author_item=post.find('div', attrs={'class':'collection-node-byline'})
|
||||
author = re.sub(r'.*by\s',"",self.tag_to_string(author_item).strip())
|
||||
title = title + u' (%s)'%author
|
||||
article_page= self.index_to_soup('http://www.psychologytoday.com'+post.find('a', href=True)['href'])
|
||||
print_page=article_page.find('li', attrs={'class':'print_html first'})
|
||||
if print_page is not None:
|
||||
url='http://www.psychologytoday.com'+print_page.find('a',href=True)['href']
|
||||
desc = self.tag_to_string(post.find('div', attrs={'class':'collection-node-description'})).strip()
|
||||
self.log('Found article:', title)
|
||||
self.log('\t', url)
|
||||
self.log('\t', desc)
|
||||
articles.append({'title':title, 'url':url, 'date':'','description':desc})
|
||||
|
||||
return [('Current Issue', articles)]
|
||||
|
||||
|
||||
|
@ -50,6 +50,10 @@ class ScienceNewsIssue(BasicNewsRecipe):
|
||||
dict(name='ul', attrs={'id':'toc'})
|
||||
]
|
||||
|
||||
remove_tags= [ dict(name='a', attrs={'class':'enlarge print-no'}),
|
||||
dict(name='a', attrs={'rel':'shadowbox'})
|
||||
]
|
||||
|
||||
feeds = [(u"Science News Current Issues", u'http://www.sciencenews.org/view/feed/type/edition/name/issues.rss')]
|
||||
|
||||
match_regexps = [
|
||||
@ -57,6 +61,12 @@ class ScienceNewsIssue(BasicNewsRecipe):
|
||||
r'www.sciencenews.org/view/generic/id'
|
||||
]
|
||||
|
||||
def image_url_processor(self, baseurl, url):
|
||||
x = url.split('/')
|
||||
if x[4] == u'scale':
|
||||
url = u'http://www.sciencenews.org/view/download/id/' + x[6] + u'/name/' + x[-1]
|
||||
return url
|
||||
|
||||
def get_cover_url(self):
|
||||
cover_url = None
|
||||
index = 'http://www.sciencenews.org/view/home'
|
||||
@ -64,7 +74,6 @@ class ScienceNewsIssue(BasicNewsRecipe):
|
||||
link_item = soup.find(name = 'img',alt = "issue")
|
||||
if link_item:
|
||||
cover_url = 'http://www.sciencenews.org' + link_item['src'] + '.jpg'
|
||||
|
||||
return cover_url
|
||||
|
||||
def preprocess_html(self, soup):
|
||||
|
11
recipes/the_feature.recipe
Normal file
@ -0,0 +1,11 @@
|
||||
from calibre.web.feeds.news import BasicNewsRecipe
|
||||
|
||||
class AdvancedUserRecipe1365777047(BasicNewsRecipe):
|
||||
title = u'The Feature'
|
||||
__author__ = 'Jose Pinto'
|
||||
language = 'en'
|
||||
oldest_article = 30
|
||||
max_articles_per_feed = 100
|
||||
auto_cleanup = True
|
||||
use_embedded_content = False
|
||||
feeds = [(u'Latest', u'http://thefeature.net/rss/links')]
|
27
recipes/voice_of_america.recipe
Normal file
@ -0,0 +1,27 @@
|
||||
from calibre.web.feeds.news import BasicNewsRecipe
|
||||
|
||||
class HindustanTimes(BasicNewsRecipe):
|
||||
title = u'Voice of America'
|
||||
language = 'en'
|
||||
__author__ = 'Krittika Goyal'
|
||||
oldest_article = 15 #days
|
||||
max_articles_per_feed = 25
|
||||
#encoding = 'cp1252'
|
||||
use_embedded_content = False
|
||||
|
||||
no_stylesheets = True
|
||||
auto_cleanup = True
|
||||
|
||||
|
||||
feeds = [
|
||||
('All Zones',
|
||||
'http://learningenglish.voanews.com/rss/?count=20'),
|
||||
('World',
|
||||
'http://learningenglish.voanews.com/rss/?count=20&zoneid=957'),
|
||||
('USA',
|
||||
'http://learningenglish.voanews.com/rss/?count=20&zoneid=958'),
|
||||
('Health',
|
||||
'http://learningenglish.voanews.com/rss/?count=20&zoneid=955'),
|
||||
|
||||
]
|
||||
|
Before Width: | Height: | Size: 62 KiB After Width: | Height: | Size: 61 KiB |
Before Width: | Height: | Size: 7.7 KiB After Width: | Height: | Size: 5.2 KiB |
Before Width: | Height: | Size: 396 KiB After Width: | Height: | Size: 397 KiB |
@ -448,8 +448,15 @@
|
||||
<xsl:template match = "rtf:field[@type='hyperlink']">
|
||||
<xsl:element name ="a">
|
||||
<xsl:attribute name = "href">
|
||||
<xsl:if test = "not(contains(@link, '/'))">#</xsl:if>
|
||||
<xsl:value-of select = "@link"/>
|
||||
<xsl:choose>
|
||||
<xsl:when test="@argument">
|
||||
<xsl:value-of select="@argument"/>
|
||||
</xsl:when>
|
||||
<xsl:otherwise>
|
||||
<xsl:if test = "not(contains(@link, '/'))">#</xsl:if>
|
||||
<xsl:value-of select = "@link"/>
|
||||
</xsl:otherwise>
|
||||
</xsl:choose>
|
||||
</xsl:attribute>
|
||||
<xsl:apply-templates/>
|
||||
</xsl:element>
|
||||
|
@ -38,7 +38,7 @@ class Check(Command):
|
||||
if cache.get(y, 0) == mtime:
|
||||
continue
|
||||
if (f.endswith('.py') and f not in (
|
||||
'feedparser.py', 'pyparsing.py', 'markdown.py') and
|
||||
'feedparser.py', 'markdown.py') and
|
||||
'prs500/driver.py' not in y):
|
||||
yield y, mtime
|
||||
if f.endswith('.coffee'):
|
||||
|
@ -48,7 +48,7 @@ binary_includes = [
|
||||
'/usr/lib/libpng14.so.14',
|
||||
'/usr/lib/libexslt.so.0',
|
||||
# Ensure that libimobiledevice is compiled against openssl, not gnutls
|
||||
'/usr/lib/libimobiledevice.so.3',
|
||||
'/usr/lib/libimobiledevice.so.4',
|
||||
'/usr/lib/libusbmuxd.so.2',
|
||||
'/usr/lib/libplist.so.1',
|
||||
MAGICK_PREFIX+'/lib/libMagickWand.so.5',
|
||||
@ -112,7 +112,6 @@ class LinuxFreeze(Command):
|
||||
else:
|
||||
ffi = glob.glob('/usr/lib/libffi.so.?')[-1]
|
||||
|
||||
|
||||
for x in binary_includes + [stdcpp, ffi]:
|
||||
dest = self.bin_dir if '/bin/' in x else self.lib_dir
|
||||
shutil.copy2(x, dest)
|
||||
@ -226,7 +225,6 @@ class LinuxFreeze(Command):
|
||||
except:
|
||||
self.warn('Failed to byte-compile', y)
|
||||
|
||||
|
||||
def run_builder(self, cmd, verbose=True):
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
@ -256,7 +254,6 @@ class LinuxFreeze(Command):
|
||||
self.info('Archive %s created: %.2f MB'%(dist,
|
||||
os.stat(dist).st_size/(1024.**2)))
|
||||
|
||||
|
||||
def build_launchers(self):
|
||||
self.obj_dir = self.j(self.src_root, 'build', 'launcher')
|
||||
if not os.path.exists(self.obj_dir):
|
||||
@ -268,7 +265,8 @@ class LinuxFreeze(Command):
|
||||
cflags = '-fno-strict-aliasing -W -Wall -c -O2 -pipe -DPYTHON_VER="python%s"'%self.py_ver
|
||||
cflags = cflags.split() + ['-I/usr/include/python'+self.py_ver]
|
||||
for src, obj in zip(sources, objects):
|
||||
if not self.newer(obj, headers+[src, __file__]): continue
|
||||
if not self.newer(obj, headers+[src, __file__]):
|
||||
continue
|
||||
cmd = ['gcc'] + cflags + ['-fPIC', '-o', obj, src]
|
||||
self.run_builder(cmd)
|
||||
|
||||
@ -330,8 +328,7 @@ class LinuxFreeze(Command):
|
||||
|
||||
self.run_builder(cmd, verbose=False)
|
||||
|
||||
|
||||
def create_site_py(self): # {{{
|
||||
def create_site_py(self): # {{{
|
||||
with open(self.j(self.py_dir, 'site.py'), 'wb') as f:
|
||||
f.write(textwrap.dedent('''\
|
||||
import sys
|
||||
|
@ -37,7 +37,6 @@ class OSX32_Freeze(Command):
|
||||
action='store_true',
|
||||
help='Only build launchers')
|
||||
|
||||
|
||||
def run(self, opts):
|
||||
global info, warn
|
||||
info, warn = self.info, self.warn
|
||||
@ -332,7 +331,7 @@ class Py2App(object):
|
||||
def create_plist(self):
|
||||
from calibre.ebooks import BOOK_EXTENSIONS
|
||||
env = dict(**ENV)
|
||||
env['CALIBRE_LAUNCHED_FROM_BUNDLE']='1';
|
||||
env['CALIBRE_LAUNCHED_FROM_BUNDLE']='1'
|
||||
docs = [{'CFBundleTypeName':'E-book',
|
||||
'CFBundleTypeExtensions':list(BOOK_EXTENSIONS),
|
||||
'CFBundleTypeRole':'Viewer',
|
||||
@ -395,12 +394,11 @@ class Py2App(object):
|
||||
self.install_dylib(os.path.join(SW, 'lib', 'libpng12.0.dylib'))
|
||||
self.install_dylib(os.path.join(SW, 'lib', 'libpng.3.dylib'))
|
||||
|
||||
|
||||
@flush
|
||||
def add_fontconfig(self):
|
||||
info('\nAdding fontconfig')
|
||||
for x in ('fontconfig.1', 'freetype.6', 'expat.1',
|
||||
'plist.1', 'usbmuxd.2', 'imobiledevice.3'):
|
||||
'plist.1', 'usbmuxd.2', 'imobiledevice.4'):
|
||||
src = os.path.join(SW, 'lib', 'lib'+x+'.dylib')
|
||||
self.install_dylib(src)
|
||||
dst = os.path.join(self.resources_dir, 'fonts')
|
||||
@ -568,7 +566,7 @@ class Py2App(object):
|
||||
|
||||
@flush
|
||||
def compile_py_modules(self):
|
||||
info( '\nCompiling Python modules')
|
||||
info('\nCompiling Python modules')
|
||||
base = join(self.resources_dir, 'Python')
|
||||
for x in os.walk(base):
|
||||
root = x[0]
|
||||
@ -584,7 +582,7 @@ class Py2App(object):
|
||||
|
||||
@flush
|
||||
def create_console_app(self):
|
||||
info( '\nCreating console.app')
|
||||
info('\nCreating console.app')
|
||||
cc_dir = os.path.join(self.contents_dir, 'console.app', 'Contents')
|
||||
os.makedirs(cc_dir)
|
||||
for x in os.listdir(self.contents_dir):
|
||||
@ -607,7 +605,6 @@ class Py2App(object):
|
||||
shutil.copy2(join(base, 'site.py'), join(self.resources_dir, 'Python',
|
||||
'lib', 'python'+self.version_info))
|
||||
|
||||
|
||||
@flush
|
||||
def makedmg(self, d, volname,
|
||||
destdir='dist',
|
||||
@ -630,7 +627,7 @@ class Py2App(object):
|
||||
'-volname', volname, '-format', format, dmg])
|
||||
shutil.rmtree(tdir)
|
||||
if internet_enable:
|
||||
subprocess.check_call(['/usr/bin/hdiutil', 'internet-enable', '-yes', dmg])
|
||||
subprocess.check_call(['/usr/bin/hdiutil', 'internet-enable', '-yes', dmg])
|
||||
size = os.stat(dmg).st_size/(1024*1024.)
|
||||
info('\nInstaller size: %.2fMB\n'%size)
|
||||
return dmg
|
||||
|
352
setup/installer/windows/libimobiledevice_notes.rst
Normal file
@ -0,0 +1,352 @@
|
||||
Notes on building libiMobileDevice for Windows
|
||||
=========================================================
|
||||
|
||||
1. Get source files, set up VS project
|
||||
2. Build libcnary
|
||||
3. Build libgen
|
||||
4. Build libplist
|
||||
5. Build libusbmuxd
|
||||
6. Build libimobiledevice
|
||||
7. Exporting libimobiledevice entry points
|
||||
8. Finished
|
||||
|
||||
Get source files, set up VS project
|
||||
-------------------------------------
|
||||
|
||||
Starting with source downloaded from https://github.com/storoj/libimobiledevice-win32
|
||||
|
||||
Now create a new directory in which we will work::
|
||||
mkdir imobiledevice
|
||||
cp -r libcnary libgen vendors/include libimobiledevice libplist libusbmuxd imobiledevice
|
||||
cd imobiledevice
|
||||
rm `find . -name '*.props'`
|
||||
rm `find . -name *.vcxproj*`
|
||||
rm `find . -name *.txt`
|
||||
cd ..
|
||||
mv imobiledevice ~/sw/private/
|
||||
|
||||
In include/unistd.h, comment out line 11::
|
||||
|
||||
// #include <getopt.h> /* getopt from: http://www.pwilson.net/sample.html.
|
||||
|
||||
Create a new VS 2008 Project
|
||||
- File|New|Project…
|
||||
- Visual C++: Win32
|
||||
- Template: Win32Project
|
||||
- Name: imobiledevice
|
||||
- Location: Choose ~/sw/private
|
||||
- Solution: (Uncheck the create directory for solution checkbox)
|
||||
- Click OK
|
||||
- Next screen, select Application Settings tab
|
||||
- Application type: DLL
|
||||
- Additional options: Empty project
|
||||
- Click Finish
|
||||
|
||||
In the tool bar Solution Configurations dropdown, select Release.
|
||||
In the tool bar Solution Platforms dropdown, select Win32.
|
||||
(For 64 bit choose new configuration and create x64 with properties copied from
|
||||
win32).
|
||||
|
||||
|
||||
Build libcnary
|
||||
-------------------------
|
||||
|
||||
In VS Solution Explorer, right-click Solution 'imobiledevice', then click
|
||||
Add|New Project.
|
||||
- Name: libcnary
|
||||
- Location: Add \imobiledevice to the end of the default location
|
||||
- Visual C++: Win32, Template: Win32 Project
|
||||
- Click OK
|
||||
- Application Settings: Static library (not using precompiled headers)
|
||||
- Click Finish
|
||||
|
||||
In VS Solution Explorer, select the libcnary project, Project->Show All files.
|
||||
- Right-click the include folder, select 'Include In Project'.
|
||||
- Select all the .c files, right click, select 'Include In Project'
|
||||
- Select all the .c files, right click -> Properties -> C/C++ -> Advanced -> Compile as C++ code
|
||||
- Properties|Configuration Properties|C/C++:
|
||||
General|Additional Include Directories:
|
||||
"$(ProjectDir)\include"
|
||||
- If 64bits, then Right click->Properties->Configuration Manager change
|
||||
Win32 to x64 for the libcnary project and check the Build checkbox
|
||||
- Right-click libcnary, Build. Should build with 0 errors, 0 warnings.
|
||||
|
||||
|
||||
Build libplist
|
||||
---------------------
|
||||
|
||||
In VS Solution Explorer, right-click Solution 'imobiledevice', then click
|
||||
Add|New Project.
|
||||
- Name: libplist
|
||||
- Visual C++: Win32, Template: Win32 Project
|
||||
- Location: Add \imobiledevice to the end of the default location
|
||||
- Click OK
|
||||
- Application Settings: DLL (Empty project)
|
||||
- Click Finish
|
||||
|
||||
In VS Solution Explorer, select the libplist project, then click the 'Show all files'
|
||||
button.
|
||||
- Right-click the include folder, select Include In Project
|
||||
- Right-click the src folder, select Include In Project
|
||||
- Set 7 C files to compile as C++
|
||||
Advanced|Compile As: Compile as C++ Code (/TP)
|
||||
base64.c, bplist.c, bytearray.c, hashtable.c, plist.c, ptarray.c, xplist.c
|
||||
- Properties|Configuration Properties|C/C++:
|
||||
General|Additional Include Directories:
|
||||
$(ProjectDir)\include
|
||||
$(SolutionDir)\include
|
||||
$(SolutionDir)\libcnary\include
|
||||
$SW\include\libxml2 (if it exists)
|
||||
$SW\include (make sure this is last in the list)
|
||||
- Properties|C/C++|Preprocessor
|
||||
Preprocessor Definitions: Add the following items
|
||||
__STDC_FORMAT_MACROS
|
||||
plist_EXPORTS
|
||||
- Properties -> Linker -> General -> Additional Library directories: ~/sw/lib (for libxml2.lib)
|
||||
- Properties -> Linker -> Input -> Additional Dependencies: libxml2.lib
|
||||
- Project Dependencies:
|
||||
Depends on: libcnary
|
||||
- If 64bits, then Right click->Properties->Configuration Manager change
|
||||
Win32 to x64 for the libcnary project and check the Build checkbox
|
||||
- Right-click libplist, Build. Should build with 0 errors (there will be
|
||||
warnings about datatype conversion for the 64 bit build)
|
||||
|
||||
Build libusbmuxd
|
||||
----------------------
|
||||
|
||||
In VS Solution Explorer, right-click Solution 'imobiledevice', then click
|
||||
Add|New Project.
|
||||
- Name: libusbmuxd
|
||||
- Visual C++: Win32, Template: Win32 Project
|
||||
- Location: Add \imobiledevice to the end of the default location
|
||||
- Click OK
|
||||
- Application Settings: DLL (Empty project)
|
||||
- Click Finish
|
||||
|
||||
In VS Solution Explorer, select the libusbmuxd project, then click the 'Show all files'
|
||||
button.
|
||||
- Select all 7 files, right-click, Include In Project.
|
||||
- Set 3 C files to compile as C++
|
||||
Advanced|Compile As: Compile as C++ Code (/TP)
|
||||
libusbmuxd.c, sock_stuff.c, utils.c
|
||||
- Properties|Configuration Properties|C/C++:
|
||||
General|Additional Include Directories:
|
||||
$(SolutionDir)\include
|
||||
$(SolutionDir)\libplist\include
|
||||
- Properties|Linker|Input|Additional Dependencies:
|
||||
ws2_32.lib
|
||||
- Properties|C/C++|Preprocessor
|
||||
Preprocessor Definitions: add 'HAVE_PLIST'
|
||||
- Project Dependencies:
|
||||
Depends on: libplist
|
||||
- Edit sock_stuff.c #227:
|
||||
fprintf(stderr, "%s: gethostbyname returned NULL address!\n",
|
||||
__FUNCTION__);
|
||||
- Edit libusbmuxd\usbmuxd.h, insert at #26:
|
||||
#ifdef LIBUSBMUXD_EXPORTS
|
||||
# define LIBUSBMUXD_API __declspec( dllexport )
|
||||
#else
|
||||
# define LIBUSBMUXD_API __declspec( dllimport )
|
||||
#endif
|
||||
Then, at each function, insert LIBUSBMUXD_API ahead of declaration:
|
||||
usbmuxd_subscribe
|
||||
usbmuxd_unsubscribe
|
||||
usbmuxd_get_device_list
|
||||
usbmuxd_device_list_free
|
||||
usbmuxd_get_device_by_udid
|
||||
usbmuxd_connect
|
||||
usbmuxd_disconnect
|
||||
usbmuxd_send
|
||||
usbmuxd_recv_timeout
|
||||
usbmuxd_recv
|
||||
usbmuxd_set_use_inotify
|
||||
usbmuxd_set_debug_level
|
||||
|
||||
- If 64bits, then Right click->Properties->Configuration Manager change
|
||||
Win32 to x64 for the libcnary project and check the Build checkbox
|
||||
- Right-click libusbmuxd, Build. Should build with 0 errors, 10 or 14 warnings
|
||||
|
||||
Build libgen
|
||||
-----------------------
|
||||
|
||||
In VS Solution Explorer, right-click Solution 'imobiledevice', then click
|
||||
Add|New Project.
|
||||
- Name: libgen
|
||||
- Visual C++: Win32, Template: Win32 Project
|
||||
- Location: Add \imobiledevice to the end of the default location
|
||||
- Click OK
|
||||
- Application Settings: Static library (not using precompiled headers)
|
||||
- Click Finish
|
||||
|
||||
In VS Solution Explorer, select the libgen project, then click the 'Show all files'
|
||||
button.
|
||||
- Select libgen.cpp and libgen.h, right click, select 'Include In Project'
|
||||
- Open libgen.cpp, comment out line 5::
|
||||
// #include <fileapi.h>
|
||||
(This is a Windows 8 include file, not needed to build in Win 7)
|
||||
- If 64bits, then Right click->Properties->Configuration Manager change
|
||||
Win32 to x64 for the libcnary project and check the Build checkbox
|
||||
- Right-click libgen, Build. Should build with 0 errors, 0 warnings.
|
||||
|
||||
Build libimobiledevice
|
||||
----------------------------
|
||||
|
||||
In VS Solution Explorer, right-click Solution 'imobiledevice', then click
|
||||
Add|New Project.
|
||||
- Name: libimobiledevice
|
||||
- Visual C++: Win32, Template: Win32 Project
|
||||
- Location: Add \imobiledevice to the end of the default location
|
||||
- Click OK
|
||||
- Application Settings: DLL (Empty project)
|
||||
- Click Finish
|
||||
|
||||
- Right-click the include folder, select Include In Project
|
||||
- Right-click the src folder, select Include In Project
|
||||
- Set .c files to compile as C++
|
||||
Advanced|Compile As: Compile as C++ Code (/TP)
|
||||
- Properties|Configuration Properties|C/C++:
|
||||
General|Additional Include Directories:
|
||||
$(ProjectDir)\include
|
||||
$(SolutionDir)\include
|
||||
$(SolutionDir)\libplist\include
|
||||
$(SolutionDir)\libgen
|
||||
$(SolutionDir)\libusbmuxd
|
||||
$SW\private\openssl\include
|
||||
- Properties -> Linker -> General -> Additional library directories:
|
||||
$SW\private\openssl\lib
|
||||
$(OutDir)
|
||||
- Properties|Linker|Input|Additional Dependencies:
|
||||
libeay32.lib
|
||||
ssleay32.lib
|
||||
libplist.lib
|
||||
libgen.lib
|
||||
libusbmuxd.lib
|
||||
ws2_32.lib
|
||||
- Properties|C/C++|Preprocessor
|
||||
Preprocessor Definitions:
|
||||
ASN1_STATIC
|
||||
HAVE_OPENSSL
|
||||
__LITTLE_ENDIAN__
|
||||
_LIB
|
||||
- Project Dependencies:
|
||||
libcnary
|
||||
libgen
|
||||
libplist
|
||||
libusbmuxd
|
||||
- Edit afc.c #35:
|
||||
Comment out lines 35-37 (Synchapi.h is a Windows 8 include file)
|
||||
- Edit userprofile.c and add at line 25:
|
||||
#include <Windows.h>
|
||||
- Edit libimobiledevice\include\libimobiledevice\afc.h
|
||||
At #26, insert
|
||||
#define AFC_API __declspec( dllexport )
|
||||
Then, at each function, insert AFC_API ahead of declaration
|
||||
afc_client_new
|
||||
afc_client_free
|
||||
afc_get_device_info
|
||||
afc_read_directory
|
||||
afc_get_file_info
|
||||
afc_file_open
|
||||
afc_file_close
|
||||
afc_file_lock
|
||||
afc_file_read
|
||||
afc_file_write
|
||||
afc_file_seek
|
||||
afc_file_tell
|
||||
afc_file_truncate
|
||||
afc_remove_path
|
||||
afc_rename_path
|
||||
afc_make_directory
|
||||
afc_truncate
|
||||
afc_make_link
|
||||
afc_set_file_time
|
||||
afc_get_device_info_key
|
||||
|
||||
- Edit libimobiledevice\include\libimobiledevice\housearrest.h
|
||||
At #26, insert
|
||||
#define HOUSE_ARREST_API __declspec( dllexport )
|
||||
Then, at each function, insert HOUSE_ARREST_API ahead of declaration
|
||||
house_arrest_client_new
|
||||
house_arrest_client_free
|
||||
house_arrest_send_request
|
||||
house_arrest_send_command
|
||||
house_arrest_get_result
|
||||
afc_client_new_from_house_arrest_client
|
||||
|
||||
- Edit libimobiledevice\include\libimobiledevice\installation_proxy.h
|
||||
At #26, insert
|
||||
#define INSTALLATION_PROXY_API __declspec( dllexport )
|
||||
Then, at each function, insert INSTALLATION_PROXY_API ahead of declaration
|
||||
instproxy_client_new
|
||||
instproxy_client_free
|
||||
instproxy_browse
|
||||
instproxy_install
|
||||
instproxy_upgrade
|
||||
instproxy_uninstall
|
||||
instproxy_lookup_archives
|
||||
instproxy_archive
|
||||
instproxy_restore
|
||||
instproxy_remove_archive
|
||||
instproxy_client_options_new
|
||||
instproxy_client_options_add
|
||||
instproxy_client_options_free
|
||||
|
||||
- Edit libimobiledevice\include\libimobiledevice\libimobiledevice.h
|
||||
At #26, insert
|
||||
#define LIBIMOBILEDEVICE_API __declspec( dllexport )
|
||||
Then, at each function, insert LIBIMOBILEDEVICE_API ahead of declaration
|
||||
idevice_set_debug_level
|
||||
idevice_event_subscribe
|
||||
idevice_event_unsubscribe
|
||||
idevice_get_device_list
|
||||
idevice_device_list_free
|
||||
idevice_new
|
||||
idevice_free
|
||||
idevice_connect
|
||||
idevice_disconnect
|
||||
idevice_connection_send
|
||||
idevice_connection_receive_timeout
|
||||
idevice_connection_receive
|
||||
idevice_get_handle
|
||||
idevice_get_udid
|
||||
|
||||
- Edit libimobiledevice\include\libimobiledevice\lockdown.h
|
||||
At #27, insert
|
||||
#define LOCKDOWN_API __declspec( dllexport )
|
||||
Then, at each function, insert LOCKDOWN_API ahead of declaration
|
||||
lockdownd_client_new
|
||||
lockdownd_client_new_with_handshake
|
||||
lockdownd_client_free
|
||||
lockdownd_query_type
|
||||
lockdownd_get_value
|
||||
lockdownd_set_value
|
||||
lockdownd_remove_value
|
||||
lockdownd_start_service
|
||||
lockdownd_start_session
|
||||
lockdownd_stop_session
|
||||
lockdownd_send
|
||||
lockdownd_receive
|
||||
lockdownd_pair
|
||||
lockdownd_validate_pair
|
||||
lockdownd_unpair
|
||||
lockdownd_activate
|
||||
lockdownd_deactivate
|
||||
lockdownd_enter_recovery
|
||||
lockdownd_goodbye
|
||||
lockdownd_getdevice_udid
|
||||
lockdownd_get_device_name
|
||||
lockdownd_get_sync_data
|
||||
lockdownd_data_classes_free
|
||||
lockdownd_service_descriptor_free
|
||||
|
||||
- If 64bits, then Right click->Properties->Configuration Manager change
|
||||
Win32 to x64 for the libcnary project and check the Build checkbox
|
||||
- Right-click libimobiledevice, Build.
|
||||
0 errors, 60 warnings.
|
||||
|
||||
Copy the DLLs
|
||||
-----------------
|
||||
|
||||
Run::
|
||||
cp `find . -name '*.dll'` ~/sw/bin/
|
||||
|
@ -540,6 +540,11 @@ Then open ChmLib.dsw in Visual Studio, change the configuration to Release
|
||||
(Win32|x64) and build solution, this will generate a static library in
|
||||
Release/ChmLib.lib
|
||||
|
||||
libimobiledevice
|
||||
------------------
|
||||
|
||||
See libimobiledevice_notes.rst
|
||||
|
||||
calibre
|
||||
---------
|
||||
|
||||
|
@ -10,15 +10,18 @@ msgstr ""
|
||||
"Report-Msgid-Bugs-To: Debian iso-codes team <pkg-isocodes-"
|
||||
"devel@lists.alioth.debian.org>\n"
|
||||
"POT-Creation-Date: 2011-11-25 14:01+0000\n"
|
||||
"PO-Revision-Date: 2011-08-27 05:57+0000\n"
|
||||
"Last-Translator: Mohammad Gamal <f2c2001@yahoo.com>\n"
|
||||
"Language-Team: Arabic <support@arabeyes.org>\n"
|
||||
"PO-Revision-Date: 2013-04-15 10:56+0000\n"
|
||||
"Last-Translator: LADHARI <nader.ladhari@gmail.com>\n"
|
||||
"Language-Team: awadh alghaamdi <awadh_al_ghaamdi@hotmail.com>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"X-Launchpad-Export-Date: 2011-11-26 05:06+0000\n"
|
||||
"X-Generator: Launchpad (build 14381)\n"
|
||||
"X-Launchpad-Export-Date: 2013-04-16 04:37+0000\n"
|
||||
"X-Generator: Launchpad (build 16564)\n"
|
||||
"X-Poedit-Country: SAUDI ARABIA\n"
|
||||
"Language: ar\n"
|
||||
"X-Poedit-Language: Arabic\n"
|
||||
"X-Poedit-SourceCharset: utf-8\n"
|
||||
|
||||
#. name for aaa
|
||||
msgid "Ghotuo"
|
||||
@ -66,7 +69,7 @@ msgstr ""
|
||||
|
||||
#. name for aam
|
||||
msgid "Aramanik"
|
||||
msgstr ""
|
||||
msgstr "ارامانيك"
|
||||
|
||||
#. name for aan
|
||||
msgid "Anambé"
|
||||
@ -110,7 +113,7 @@ msgstr ""
|
||||
|
||||
#. name for aaz
|
||||
msgid "Amarasi"
|
||||
msgstr ""
|
||||
msgstr "أماراسي"
|
||||
|
||||
#. name for aba
|
||||
msgid "Abé"
|
||||
@ -294,7 +297,7 @@ msgstr ""
|
||||
|
||||
#. name for acx
|
||||
msgid "Arabic; Omani"
|
||||
msgstr ""
|
||||
msgstr "عماني"
|
||||
|
||||
#. name for acy
|
||||
msgid "Arabic; Cypriot"
|
||||
|
2402
setup/iso_639/cs.po
@ -17,14 +17,14 @@ msgstr ""
|
||||
"Report-Msgid-Bugs-To: Debian iso-codes team <pkg-isocodes-"
|
||||
"devel@lists.alioth.debian.org>\n"
|
||||
"POT-Creation-Date: 2011-11-25 14:01+0000\n"
|
||||
"PO-Revision-Date: 2011-09-27 18:12+0000\n"
|
||||
"PO-Revision-Date: 2013-04-21 09:31+0000\n"
|
||||
"Last-Translator: Kovid Goyal <Unknown>\n"
|
||||
"Language-Team: Danish <dansk@klid.dk>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"X-Launchpad-Export-Date: 2011-11-26 05:11+0000\n"
|
||||
"X-Generator: Launchpad (build 14381)\n"
|
||||
"X-Launchpad-Export-Date: 2013-04-22 05:23+0000\n"
|
||||
"X-Generator: Launchpad (build 16567)\n"
|
||||
"Language: da\n"
|
||||
|
||||
#. name for aaa
|
||||
@ -10253,7 +10253,7 @@ msgstr ""
|
||||
|
||||
#. name for inh
|
||||
msgid "Ingush"
|
||||
msgstr "Engelsk"
|
||||
msgstr "Ingush"
|
||||
|
||||
#. name for inj
|
||||
msgid "Inga; Jungle"
|
||||
|
@ -18,14 +18,14 @@ msgstr ""
|
||||
"Report-Msgid-Bugs-To: Debian iso-codes team <pkg-isocodes-"
|
||||
"devel@lists.alioth.debian.org>\n"
|
||||
"POT-Creation-Date: 2011-11-25 14:01+0000\n"
|
||||
"PO-Revision-Date: 2013-03-15 22:01+0000\n"
|
||||
"Last-Translator: Hendrik Knackstedt <Unknown>\n"
|
||||
"PO-Revision-Date: 2013-04-11 13:29+0000\n"
|
||||
"Last-Translator: Simon Schütte <simonschuette@arcor.de>\n"
|
||||
"Language-Team: Ubuntu German Translators\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"X-Launchpad-Export-Date: 2013-03-16 04:55+0000\n"
|
||||
"X-Generator: Launchpad (build 16532)\n"
|
||||
"X-Launchpad-Export-Date: 2013-04-12 05:20+0000\n"
|
||||
"X-Generator: Launchpad (build 16564)\n"
|
||||
"Language: de\n"
|
||||
|
||||
#. name for aaa
|
||||
@ -58,7 +58,7 @@ msgstr "Ambrak"
|
||||
|
||||
#. name for aah
|
||||
msgid "Arapesh; Abu'"
|
||||
msgstr "Arapesh;Abu' (Papua-Neuguinea)"
|
||||
msgstr "Arapesh; Abu' (Papua-Neuguinea)"
|
||||
|
||||
#. name for aai
|
||||
msgid "Arifama-Miniafia"
|
||||
|
@ -12,14 +12,14 @@ msgstr ""
|
||||
"Report-Msgid-Bugs-To: Debian iso-codes team <pkg-isocodes-"
|
||||
"devel@lists.alioth.debian.org>\n"
|
||||
"POT-Creation-Date: 2011-11-25 14:01+0000\n"
|
||||
"PO-Revision-Date: 2011-09-27 17:41+0000\n"
|
||||
"Last-Translator: Kovid Goyal <Unknown>\n"
|
||||
"PO-Revision-Date: 2013-04-12 15:49+0000\n"
|
||||
"Last-Translator: Costis Aspiotis <aspiotisk@gmail.com>\n"
|
||||
"Language-Team: Greek <debian-l10n-greek@lists.debian.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"X-Launchpad-Export-Date: 2011-11-26 05:17+0000\n"
|
||||
"X-Generator: Launchpad (build 14381)\n"
|
||||
"X-Launchpad-Export-Date: 2013-04-13 05:32+0000\n"
|
||||
"X-Generator: Launchpad (build 16564)\n"
|
||||
"Language: el\n"
|
||||
|
||||
#. name for aaa
|
||||
@ -30825,7 +30825,7 @@ msgstr ""
|
||||
|
||||
#. name for zxx
|
||||
msgid "No linguistic content"
|
||||
msgstr ""
|
||||
msgstr "Χωρίς γλωσσολογικό περιεχόμενο"
|
||||
|
||||
#. name for zyb
|
||||
msgid "Zhuang; Yongbei"
|
||||
|
@ -4,7 +4,7 @@ __license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
__appname__ = u'calibre'
|
||||
numeric_version = (0, 9, 27)
|
||||
numeric_version = (0, 9, 28)
|
||||
__version__ = u'.'.join(map(unicode, numeric_version))
|
||||
__author__ = u"Kovid Goyal <kovid@kovidgoyal.net>"
|
||||
|
||||
@ -29,7 +29,7 @@ isportable = os.environ.get('CALIBRE_PORTABLE_BUILD', None) is not None
|
||||
ispy3 = sys.version_info.major > 2
|
||||
isxp = iswindows and sys.getwindowsversion().major < 6
|
||||
is64bit = sys.maxsize > (1 << 32)
|
||||
isworker = os.environ.has_key('CALIBRE_WORKER') or os.environ.has_key('CALIBRE_SIMPLE_WORKER')
|
||||
isworker = 'CALIBRE_WORKER' in os.environ or 'CALIBRE_SIMPLE_WORKER' in os.environ
|
||||
if isworker:
|
||||
os.environ.pop('CALIBRE_FORCE_ANSI', None)
|
||||
|
||||
@ -58,7 +58,8 @@ def get_osx_version():
|
||||
return _osx_ver
|
||||
|
||||
filesystem_encoding = sys.getfilesystemencoding()
|
||||
if filesystem_encoding is None: filesystem_encoding = 'utf-8'
|
||||
if filesystem_encoding is None:
|
||||
filesystem_encoding = 'utf-8'
|
||||
else:
|
||||
try:
|
||||
if codecs.lookup(filesystem_encoding).name == 'ascii':
|
||||
@ -85,7 +86,7 @@ def _get_cache_dir():
|
||||
confcache = os.path.join(config_dir, u'caches')
|
||||
if isportable:
|
||||
return confcache
|
||||
if os.environ.has_key('CALIBRE_CACHE_DIRECTORY'):
|
||||
if 'CALIBRE_CACHE_DIRECTORY' in os.environ:
|
||||
return os.path.abspath(os.environ['CALIBRE_CACHE_DIRECTORY'])
|
||||
|
||||
if iswindows:
|
||||
@ -184,7 +185,7 @@ if plugins is None:
|
||||
|
||||
CONFIG_DIR_MODE = 0700
|
||||
|
||||
if os.environ.has_key('CALIBRE_CONFIG_DIRECTORY'):
|
||||
if 'CALIBRE_CONFIG_DIRECTORY' in os.environ:
|
||||
config_dir = os.path.abspath(os.environ['CALIBRE_CONFIG_DIRECTORY'])
|
||||
elif iswindows:
|
||||
if plugins['winutil'][0] is None:
|
||||
|
@ -1448,7 +1448,6 @@ class StoreGoogleBooksStore(StoreBase):
|
||||
|
||||
headquarters = 'US'
|
||||
formats = ['EPUB', 'PDF', 'TXT']
|
||||
affiliate = True
|
||||
|
||||
class StoreGutenbergStore(StoreBase):
|
||||
name = 'Project Gutenberg'
|
||||
|
@ -306,7 +306,8 @@ class DB(object):
|
||||
|
||||
# Initialize database {{{
|
||||
|
||||
def __init__(self, library_path, default_prefs=None, read_only=False):
|
||||
def __init__(self, library_path, default_prefs=None, read_only=False,
|
||||
restore_all_prefs=False, progress_callback=lambda x, y:True):
|
||||
try:
|
||||
if isbytestring(library_path):
|
||||
library_path = library_path.decode(filesystem_encoding)
|
||||
@ -377,23 +378,27 @@ class DB(object):
|
||||
UPDATE authors SET sort=author_to_author_sort(name) WHERE sort IS NULL;
|
||||
''')
|
||||
|
||||
self.initialize_prefs(default_prefs)
|
||||
self.initialize_prefs(default_prefs, restore_all_prefs, progress_callback)
|
||||
self.initialize_custom_columns()
|
||||
self.initialize_tables()
|
||||
|
||||
def initialize_prefs(self, default_prefs): # {{{
|
||||
def initialize_prefs(self, default_prefs, restore_all_prefs, progress_callback): # {{{
|
||||
self.prefs = DBPrefs(self)
|
||||
|
||||
if default_prefs is not None and not self._exists:
|
||||
progress_callback(None, len(default_prefs))
|
||||
# Only apply default prefs to a new database
|
||||
for key in default_prefs:
|
||||
for i, key in enumerate(default_prefs):
|
||||
# be sure that prefs not to be copied are listed below
|
||||
if key not in frozenset(['news_to_be_synced']):
|
||||
if restore_all_prefs or key not in frozenset(['news_to_be_synced']):
|
||||
self.prefs[key] = default_prefs[key]
|
||||
progress_callback(_('restored preference ') + key, i+1)
|
||||
if 'field_metadata' in default_prefs:
|
||||
fmvals = [f for f in default_prefs['field_metadata'].values()
|
||||
if f['is_custom']]
|
||||
for f in fmvals:
|
||||
progress_callback(None, len(fmvals))
|
||||
for i, f in enumerate(fmvals):
|
||||
progress_callback(_('creating custom column ') + f['label'], i)
|
||||
self.create_custom_column(f['label'], f['name'],
|
||||
f['datatype'],
|
||||
(f['is_multiple'] is not None and
|
||||
@ -422,6 +427,8 @@ class DB(object):
|
||||
('uuid', False), ('comments', True), ('id', False), ('pubdate', False),
|
||||
('last_modified', False), ('size', False), ('languages', False),
|
||||
]
|
||||
defs['virtual_libraries'] = {}
|
||||
defs['virtual_lib_on_startup'] = defs['cs_virtual_lib_on_startup'] = ''
|
||||
|
||||
# Migrate the bool tristate tweak
|
||||
defs['bools_are_tristate'] = \
|
||||
@ -470,6 +477,24 @@ class DB(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
# migrate the gui_restriction preference to a virtual library
|
||||
gr_pref = self.prefs.get('gui_restriction', None)
|
||||
if gr_pref:
|
||||
virt_libs = self.prefs.get('virtual_libraries', {})
|
||||
virt_libs[gr_pref] = 'search:"' + gr_pref + '"'
|
||||
self.prefs['virtual_libraries'] = virt_libs
|
||||
self.prefs['gui_restriction'] = ''
|
||||
self.prefs['virtual_lib_on_startup'] = gr_pref
|
||||
|
||||
# migrate the cs_restriction preference to a virtual library
|
||||
gr_pref = self.prefs.get('cs_restriction', None)
|
||||
if gr_pref:
|
||||
virt_libs = self.prefs.get('virtual_libraries', {})
|
||||
virt_libs[gr_pref] = 'search:"' + gr_pref + '"'
|
||||
self.prefs['virtual_libraries'] = virt_libs
|
||||
self.prefs['cs_restriction'] = ''
|
||||
self.prefs['cs_virtual_lib_on_startup'] = gr_pref
|
||||
|
||||
# Rename any user categories with names that differ only in case
|
||||
user_cats = self.prefs.get('user_categories', [])
|
||||
catmap = {}
|
||||
@ -691,11 +716,13 @@ class DB(object):
|
||||
|
||||
tables['size'] = SizeTable('size', self.field_metadata['size'].copy())
|
||||
|
||||
self.FIELD_MAP = {'id':0, 'title':1, 'authors':2, 'timestamp':3,
|
||||
'size':4, 'rating':5, 'tags':6, 'comments':7, 'series':8,
|
||||
'publisher':9, 'series_index':10, 'sort':11, 'author_sort':12,
|
||||
'formats':13, 'path':14, 'pubdate':15, 'uuid':16, 'cover':17,
|
||||
'au_map':18, 'last_modified':19, 'identifiers':20}
|
||||
self.FIELD_MAP = {
|
||||
'id':0, 'title':1, 'authors':2, 'timestamp':3, 'size':4,
|
||||
'rating':5, 'tags':6, 'comments':7, 'series':8, 'publisher':9,
|
||||
'series_index':10, 'sort':11, 'author_sort':12, 'formats':13,
|
||||
'path':14, 'pubdate':15, 'uuid':16, 'cover':17, 'au_map':18,
|
||||
'last_modified':19, 'identifiers':20, 'languages':21,
|
||||
}
|
||||
|
||||
for k,v in self.FIELD_MAP.iteritems():
|
||||
self.field_metadata.set_field_record_index(k, v, prefer_custom=False)
|
||||
@ -741,6 +768,8 @@ class DB(object):
|
||||
self.field_metadata.set_field_record_index('ondevice', base, prefer_custom=False)
|
||||
self.FIELD_MAP['marked'] = base = base+1
|
||||
self.field_metadata.set_field_record_index('marked', base, prefer_custom=False)
|
||||
self.FIELD_MAP['series_sort'] = base = base+1
|
||||
self.field_metadata.set_field_record_index('series_sort', base, prefer_custom=False)
|
||||
|
||||
# }}}
|
||||
|
||||
@ -754,6 +783,11 @@ class DB(object):
|
||||
self._conn = Connection(self.dbpath)
|
||||
return self._conn
|
||||
|
||||
def close(self):
|
||||
if self._conn is not None:
|
||||
self._conn.close()
|
||||
del self._conn
|
||||
|
||||
@dynamic_property
|
||||
def user_version(self):
|
||||
doc = 'The user version of this database'
|
||||
|
@ -114,6 +114,19 @@ class Cache(object):
|
||||
if self.dirtied_cache:
|
||||
self.dirtied_sequence = max(self.dirtied_cache.itervalues())+1
|
||||
|
||||
@write_api
|
||||
def initialize_template_cache(self):
|
||||
self.formatter_template_cache = {}
|
||||
|
||||
@write_api
|
||||
def refresh(self):
|
||||
self._initialize_template_cache()
|
||||
for field in self.fields.itervalues():
|
||||
if hasattr(field, 'clear_cache'):
|
||||
field.clear_cache() # Clear the composite cache
|
||||
if hasattr(field, 'table'):
|
||||
field.table.read(self.backend) # Reread data from metadata.db
|
||||
|
||||
@property
|
||||
def field_metadata(self):
|
||||
return self.backend.field_metadata
|
||||
|
150
src/calibre/db/legacy.py
Normal file
@ -0,0 +1,150 @@
|
||||
#!/usr/bin/env python
|
||||
# vim:fileencoding=utf-8
|
||||
from __future__ import (unicode_literals, division, absolute_import,
|
||||
print_function)
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
|
||||
import os, traceback
|
||||
from functools import partial
|
||||
|
||||
from calibre.db.backend import DB
|
||||
from calibre.db.cache import Cache
|
||||
from calibre.db.view import View
|
||||
from calibre.utils.date import utcnow
|
||||
|
||||
class LibraryDatabase(object):
|
||||
|
||||
''' Emulate the old LibraryDatabase2 interface '''
|
||||
|
||||
PATH_LIMIT = DB.PATH_LIMIT
|
||||
WINDOWS_LIBRARY_PATH_LIMIT = DB.WINDOWS_LIBRARY_PATH_LIMIT
|
||||
|
||||
@classmethod
|
||||
def exists_at(cls, path):
|
||||
return path and os.path.exists(os.path.join(path, 'metadata.db'))
|
||||
|
||||
def __init__(self, library_path,
|
||||
default_prefs=None, read_only=False, is_second_db=False,
|
||||
progress_callback=lambda x, y:True, restore_all_prefs=False):
|
||||
|
||||
self.is_second_db = is_second_db # TODO: Use is_second_db
|
||||
self.listeners = set([])
|
||||
|
||||
backend = self.backend = DB(library_path, default_prefs=default_prefs,
|
||||
read_only=read_only, restore_all_prefs=restore_all_prefs,
|
||||
progress_callback=progress_callback)
|
||||
cache = self.new_api = Cache(backend)
|
||||
cache.init()
|
||||
self.data = View(cache)
|
||||
|
||||
self.get_property = self.data.get_property
|
||||
|
||||
for prop in (
|
||||
'author_sort', 'authors', 'comment', 'comments',
|
||||
'publisher', 'rating', 'series', 'series_index', 'tags',
|
||||
'title', 'timestamp', 'uuid', 'pubdate', 'ondevice',
|
||||
'metadata_last_modified', 'languages',
|
||||
):
|
||||
fm = {'comment':'comments', 'metadata_last_modified':
|
||||
'last_modified', 'title_sort':'sort'}.get(prop, prop)
|
||||
setattr(self, prop, partial(self.get_property,
|
||||
loc=self.FIELD_MAP[fm]))
|
||||
|
||||
self.last_update_check = self.last_modified()
|
||||
|
||||
def close(self):
|
||||
self.backend.close()
|
||||
|
||||
def break_cycles(self):
|
||||
self.data.cache.backend = None
|
||||
self.data.cache = None
|
||||
self.data = self.backend = self.new_api = self.field_metadata = self.prefs = self.listeners = self.refresh_ondevice = None
|
||||
|
||||
# Library wide properties {{{
|
||||
@property
|
||||
def field_metadata(self):
|
||||
return self.backend.field_metadata
|
||||
|
||||
@property
|
||||
def user_version(self):
|
||||
return self.backend.user_version
|
||||
|
||||
@property
|
||||
def library_id(self):
|
||||
return self.backend.library_id
|
||||
|
||||
@property
|
||||
def library_path(self):
|
||||
return self.backend.library_path
|
||||
|
||||
@property
|
||||
def dbpath(self):
|
||||
return self.backend.dbpath
|
||||
|
||||
def last_modified(self):
|
||||
return self.backend.last_modified()
|
||||
|
||||
def check_if_modified(self):
|
||||
if self.last_modified() > self.last_update_check:
|
||||
self.refresh()
|
||||
self.last_update_check = utcnow()
|
||||
|
||||
@property
|
||||
def custom_column_num_map(self):
|
||||
return self.backend.custom_column_num_map
|
||||
|
||||
@property
|
||||
def custom_column_label_map(self):
|
||||
return self.backend.custom_column_label_map
|
||||
|
||||
@property
|
||||
def FIELD_MAP(self):
|
||||
return self.backend.FIELD_MAP
|
||||
|
||||
@property
|
||||
def formatter_template_cache(self):
|
||||
return self.data.cache.formatter_template_cache
|
||||
|
||||
def initialize_template_cache(self):
|
||||
self.data.cache.initialize_template_cache()
|
||||
|
||||
def all_ids(self):
|
||||
for book_id in self.data.cache.all_book_ids():
|
||||
yield book_id
|
||||
|
||||
def refresh(self, field=None, ascending=True):
|
||||
self.data.cache.refresh()
|
||||
self.data.refresh(field=field, ascending=ascending)
|
||||
|
||||
def add_listener(self, listener):
|
||||
'''
|
||||
Add a listener. Will be called on change events with two arguments.
|
||||
Event name and list of affected ids.
|
||||
'''
|
||||
self.listeners.add(listener)
|
||||
|
||||
def notify(self, event, ids=[]):
|
||||
'Notify all listeners'
|
||||
for listener in self.listeners:
|
||||
try:
|
||||
listener(event, ids)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
continue
|
||||
|
||||
# }}}
|
||||
|
||||
def path(self, index, index_is_id=False):
|
||||
'Return the relative path to the directory containing this books files as a unicode string.'
|
||||
book_id = index if index_is_id else self.data.index_to_id(index)
|
||||
return self.data.cache.field_for('path', book_id).replace('/', os.sep)
|
||||
|
||||
def abspath(self, index, index_is_id=False, create_dirs=True):
|
||||
'Return the absolute path to the directory containing this books files as a unicode string.'
|
||||
path = os.path.join(self.library_path, self.path(index, index_is_id=index_is_id))
|
||||
if create_dirs and not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
return path
|
||||
|
@ -195,13 +195,13 @@ class DateSearch(object): # {{{
|
||||
try:
|
||||
qd = now() - timedelta(int(num))
|
||||
except:
|
||||
raise ParseException(query, len(query), 'Number conversion error')
|
||||
raise ParseException(_('Number conversion error: {0}').format(num))
|
||||
field_count = 3
|
||||
else:
|
||||
try:
|
||||
qd = parse_date(query, as_utc=False)
|
||||
except:
|
||||
raise ParseException(query, len(query), 'Date conversion error')
|
||||
raise ParseException(_('Date conversion error: {0}').format(query))
|
||||
if '-' in query:
|
||||
field_count = query.count('-') + 1
|
||||
else:
|
||||
@ -285,8 +285,8 @@ class NumericSearch(object): # {{{
|
||||
try:
|
||||
q = cast(query) * mult
|
||||
except:
|
||||
raise ParseException(query, len(query),
|
||||
'Non-numeric value in query: %r'%query)
|
||||
raise ParseException(
|
||||
_('Non-numeric value in query: {0}').format(query))
|
||||
|
||||
for val, book_ids in field_iter():
|
||||
if val is None:
|
||||
@ -351,8 +351,8 @@ class KeyPairSearch(object): # {{{
|
||||
if ':' in query:
|
||||
q = [q.strip() for q in query.split(':')]
|
||||
if len(q) != 2:
|
||||
raise ParseException(query, len(query),
|
||||
'Invalid query format for colon-separated search')
|
||||
raise ParseException(
|
||||
_('Invalid query format for colon-separated search: {0}').format(query))
|
||||
keyq, valq = q
|
||||
keyq_mkind, keyq = _matchkind(keyq)
|
||||
valq_mkind, valq = _matchkind(valq)
|
||||
@ -465,7 +465,8 @@ class Parser(SearchQueryParser):
|
||||
if invert:
|
||||
matches = self.all_book_ids - matches
|
||||
return matches
|
||||
raise ParseException(query, len(query), 'Recursive query group detected')
|
||||
raise ParseException(
|
||||
_('Recursive query group detected: {0}').format(query))
|
||||
|
||||
# If the user has asked to restrict searching over all field, apply
|
||||
# that restriction
|
||||
|
@ -16,6 +16,9 @@ rmtree = partial(shutil.rmtree, ignore_errors=True)
|
||||
|
||||
class BaseTest(unittest.TestCase):
|
||||
|
||||
longMessage = True
|
||||
maxDiff = None
|
||||
|
||||
def setUp(self):
|
||||
self.library_path = self.mkdtemp()
|
||||
self.create_db(self.library_path)
|
||||
@ -40,10 +43,10 @@ class BaseTest(unittest.TestCase):
|
||||
db.conn.close()
|
||||
return dest
|
||||
|
||||
def init_cache(self, library_path):
|
||||
def init_cache(self, library_path=None):
|
||||
from calibre.db.backend import DB
|
||||
from calibre.db.cache import Cache
|
||||
backend = DB(library_path)
|
||||
backend = DB(library_path or self.library_path)
|
||||
cache = Cache(backend)
|
||||
cache.init()
|
||||
return cache
|
||||
@ -53,9 +56,13 @@ class BaseTest(unittest.TestCase):
|
||||
atexit.register(rmtree, ans)
|
||||
return ans
|
||||
|
||||
def init_old(self, library_path):
|
||||
def init_old(self, library_path=None):
|
||||
from calibre.library.database2 import LibraryDatabase2
|
||||
return LibraryDatabase2(library_path)
|
||||
return LibraryDatabase2(library_path or self.library_path)
|
||||
|
||||
def init_legacy(self, library_path=None):
|
||||
from calibre.db.legacy import LibraryDatabase
|
||||
return LibraryDatabase(library_path or self.library_path)
|
||||
|
||||
def clone_library(self, library_path):
|
||||
if not hasattr(self, 'clone_dir'):
|
||||
@ -81,7 +88,8 @@ class BaseTest(unittest.TestCase):
|
||||
'ondevice_col', 'last_modified', 'has_cover',
|
||||
'cover_data'}.union(allfk1)
|
||||
for attr in all_keys:
|
||||
if attr == 'user_metadata': continue
|
||||
if attr == 'user_metadata':
|
||||
continue
|
||||
attr1, attr2 = getattr(mi1, attr), getattr(mi2, attr)
|
||||
if attr == 'formats':
|
||||
attr1, attr2 = map(lambda x:tuple(x) if x else (), (attr1, attr2))
|
||||
|
99
src/calibre/db/tests/legacy.py
Normal file
@ -0,0 +1,99 @@
|
||||
#!/usr/bin/env python
|
||||
# vim:fileencoding=utf-8
|
||||
from __future__ import (unicode_literals, division, absolute_import,
|
||||
print_function)
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
|
||||
from calibre.db.tests.base import BaseTest
|
||||
|
||||
class LegacyTest(BaseTest):
|
||||
|
||||
''' Test the emulation of the legacy interface. '''
|
||||
|
||||
def test_library_wide_properties(self): # {{{
|
||||
'Test library wide properties'
|
||||
def get_props(db):
|
||||
props = ('user_version', 'is_second_db', 'library_id', 'field_metadata',
|
||||
'custom_column_label_map', 'custom_column_num_map', 'library_path', 'dbpath')
|
||||
fprops = ('last_modified', )
|
||||
ans = {x:getattr(db, x) for x in props}
|
||||
ans.update({x:getattr(db, x)() for x in fprops})
|
||||
ans['all_ids'] = frozenset(db.all_ids())
|
||||
return ans
|
||||
|
||||
old = self.init_old()
|
||||
oldvals = get_props(old)
|
||||
old.close()
|
||||
del old
|
||||
db = self.init_legacy()
|
||||
newvals = get_props(db)
|
||||
self.assertEqual(oldvals, newvals)
|
||||
db.close()
|
||||
# }}}
|
||||
|
||||
def test_get_property(self): # {{{
|
||||
'Test the get_property interface for reading data'
|
||||
def get_values(db):
|
||||
ans = {}
|
||||
for label, loc in db.FIELD_MAP.iteritems():
|
||||
if isinstance(label, int):
|
||||
label = '#'+db.custom_column_num_map[label]['label']
|
||||
label = type('')(label)
|
||||
ans[label] = tuple(db.get_property(i, index_is_id=True, loc=loc)
|
||||
for i in db.all_ids())
|
||||
if label in ('id', 'title', '#tags'):
|
||||
with self.assertRaises(IndexError):
|
||||
db.get_property(9999, loc=loc)
|
||||
with self.assertRaises(IndexError):
|
||||
db.get_property(9999, index_is_id=True, loc=loc)
|
||||
if label in {'tags', 'formats'}:
|
||||
# Order is random in the old db for these
|
||||
ans[label] = tuple(set(x.split(',')) if x else x for x in ans[label])
|
||||
if label == 'series_sort':
|
||||
# The old db code did not take book language into account
|
||||
# when generating series_sort values (the first book has
|
||||
# lang=deu)
|
||||
ans[label] = ans[label][1:]
|
||||
return ans
|
||||
|
||||
old = self.init_old()
|
||||
old_vals = get_values(old)
|
||||
old.close()
|
||||
old = None
|
||||
db = self.init_legacy()
|
||||
new_vals = get_values(db)
|
||||
db.close()
|
||||
self.assertEqual(old_vals, new_vals)
|
||||
|
||||
# }}}
|
||||
|
||||
def test_refresh(self): # {{{
|
||||
' Test refreshing the view after a change to metadata.db '
|
||||
db = self.init_legacy()
|
||||
db2 = self.init_legacy()
|
||||
self.assertEqual(db2.data.cache.set_field('title', {1:'xxx'}), set([1]))
|
||||
db2.close()
|
||||
del db2
|
||||
self.assertNotEqual(db.title(1, index_is_id=True), 'xxx')
|
||||
db.check_if_modified()
|
||||
self.assertEqual(db.title(1, index_is_id=True), 'xxx')
|
||||
# }}}
|
||||
|
||||
def test_legacy_getters(self): # {{{
|
||||
old = self.init_old()
|
||||
getters = ('path', 'abspath', 'title', 'authors', 'series',
|
||||
'publisher', 'author_sort', 'authors', 'comments',
|
||||
'comment', 'publisher', 'rating', 'series_index', 'tags',
|
||||
'timestamp', 'uuid', 'pubdate', 'ondevice',
|
||||
'metadata_last_modified', 'languages')
|
||||
oldvals = {g:tuple(getattr(old, g)(x) for x in xrange(3)) + tuple(getattr(old, g)(x, True) for x in (1,2,3)) for g in getters}
|
||||
old.close()
|
||||
db = self.init_legacy()
|
||||
newvals = {g:tuple(getattr(db, g)(x) for x in xrange(3)) + tuple(getattr(db, g)(x, True) for x in (1,2,3)) for g in getters}
|
||||
for x in (oldvals, newvals):
|
||||
x['tags'] = tuple(set(y.split(',')) if y else y for y in x['tags'])
|
||||
self.assertEqual(oldvals, newvals)
|
||||
# }}}
|
||||
|
@ -11,6 +11,9 @@ import weakref
|
||||
from functools import partial
|
||||
from itertools import izip, imap
|
||||
|
||||
from calibre.ebooks.metadata import title_sort
|
||||
from calibre.utils.config_base import tweaks
|
||||
|
||||
def sanitize_sort_field_name(field_metadata, field):
|
||||
field = field_metadata.search_term_to_field_key(field.lower().strip())
|
||||
# translate some fields to their hidden equivalent
|
||||
@ -40,6 +43,18 @@ class TableRow(list):
|
||||
else:
|
||||
return view._field_getters[obj](self.book_id)
|
||||
|
||||
def format_is_multiple(x, sep=',', repl=None):
|
||||
if not x:
|
||||
return None
|
||||
if repl is not None:
|
||||
x = (y.replace(sep, repl) for y in x)
|
||||
return sep.join(x)
|
||||
|
||||
def format_identifiers(x):
|
||||
if not x:
|
||||
return None
|
||||
return ','.join('%s:%s'%(k, v) for k, v in x.iteritems())
|
||||
|
||||
class View(object):
|
||||
|
||||
''' A table view of the database, with rows and columns. Also supports
|
||||
@ -49,33 +64,63 @@ class View(object):
|
||||
self.cache = cache
|
||||
self.marked_ids = {}
|
||||
self.search_restriction_book_count = 0
|
||||
self.search_restriction = ''
|
||||
self.search_restriction = self.base_restriction = ''
|
||||
self.search_restriction_name = self.base_restriction_name = ''
|
||||
self._field_getters = {}
|
||||
for col, idx in cache.backend.FIELD_MAP.iteritems():
|
||||
label, fmt = col, lambda x:x
|
||||
func = {
|
||||
'id': self._get_id,
|
||||
'au_map': self.get_author_data,
|
||||
'ondevice': self.get_ondevice,
|
||||
'marked': self.get_marked,
|
||||
'series_sort':self.get_series_sort,
|
||||
}.get(col, self._get)
|
||||
if isinstance(col, int):
|
||||
label = self.cache.backend.custom_column_num_map[col]['label']
|
||||
label = (self.cache.backend.field_metadata.custom_field_prefix
|
||||
+ label)
|
||||
self._field_getters[idx] = partial(self.get, label)
|
||||
else:
|
||||
if label.endswith('_index'):
|
||||
try:
|
||||
self._field_getters[idx] = {
|
||||
'id': self._get_id,
|
||||
'au_map': self.get_author_data,
|
||||
'ondevice': self.get_ondevice,
|
||||
'marked': self.get_marked,
|
||||
}[col]
|
||||
except KeyError:
|
||||
self._field_getters[idx] = partial(self.get, col)
|
||||
num = int(label.partition('_')[0])
|
||||
except ValueError:
|
||||
pass # series_index
|
||||
else:
|
||||
label = self.cache.backend.custom_column_num_map[num]['label']
|
||||
label = (self.cache.backend.field_metadata.custom_field_prefix
|
||||
+ label + '_index')
|
||||
|
||||
fm = self.field_metadata[label]
|
||||
fm
|
||||
if label == 'authors':
|
||||
fmt = partial(format_is_multiple, repl='|')
|
||||
elif label in {'tags', 'languages', 'formats'}:
|
||||
fmt = format_is_multiple
|
||||
elif label == 'cover':
|
||||
fmt = bool
|
||||
elif label == 'identifiers':
|
||||
fmt = format_identifiers
|
||||
elif fm['datatype'] == 'text' and fm['is_multiple']:
|
||||
sep = fm['is_multiple']['cache_to_list']
|
||||
if sep not in {'&','|'}:
|
||||
sep = '|'
|
||||
fmt = partial(format_is_multiple, sep=sep)
|
||||
self._field_getters[idx] = partial(func, label, fmt=fmt) if func == self._get else func
|
||||
|
||||
self._map = tuple(self.cache.all_book_ids())
|
||||
self._map_filtered = tuple(self._map)
|
||||
|
||||
def get_property(self, id_or_index, index_is_id=False, loc=-1):
|
||||
book_id = id_or_index if index_is_id else self._map_filtered[id_or_index]
|
||||
return self._field_getters[loc](book_id)
|
||||
|
||||
@property
|
||||
def field_metadata(self):
|
||||
return self.cache.field_metadata
|
||||
|
||||
def _get_id(self, idx, index_is_id=True):
|
||||
if index_is_id and idx not in self.cache.all_book_ids():
|
||||
raise IndexError('No book with id %s present'%idx)
|
||||
return idx if index_is_id else self.index_to_id(idx)
|
||||
|
||||
def __getitem__(self, row):
|
||||
@ -107,9 +152,21 @@ class View(object):
|
||||
def index_to_id(self, idx):
|
||||
return self._map_filtered[idx]
|
||||
|
||||
def get(self, field, idx, index_is_id=True, default_value=None):
|
||||
def _get(self, field, idx, index_is_id=True, default_value=None, fmt=lambda x:x):
|
||||
id_ = idx if index_is_id else self.index_to_id(idx)
|
||||
return self.cache.field_for(field, id_)
|
||||
if index_is_id and id_ not in self.cache.all_book_ids():
|
||||
raise IndexError('No book with id %s present'%idx)
|
||||
return fmt(self.cache.field_for(field, id_, default_value=default_value))
|
||||
|
||||
def get_series_sort(self, idx, index_is_id=True, default_value=''):
|
||||
book_id = idx if index_is_id else self.index_to_id(idx)
|
||||
with self.cache.read_lock:
|
||||
lang_map = self.cache.fields['languages'].book_value_map
|
||||
lang = lang_map.get(book_id, None) or None
|
||||
if lang:
|
||||
lang = lang[0]
|
||||
return title_sort(self.cache._field_for('series', book_id, default_value=''),
|
||||
order=tweaks['title_series_sorting'], lang=lang)
|
||||
|
||||
def get_ondevice(self, idx, index_is_id=True, default_value=''):
|
||||
id_ = idx if index_is_id else self.index_to_id(idx)
|
||||
@ -119,26 +176,15 @@ class View(object):
|
||||
id_ = idx if index_is_id else self.index_to_id(idx)
|
||||
return self.marked_ids.get(id_, default_value)
|
||||
|
||||
def get_author_data(self, idx, index_is_id=True, default_value=()):
|
||||
'''
|
||||
Return author data for all authors of the book identified by idx as a
|
||||
tuple of dictionaries. The dictionaries should never be empty, unless
|
||||
there is a bug somewhere. The list could be empty if idx point to an
|
||||
non existent book, or book with no authors (though again a book with no
|
||||
authors should never happen).
|
||||
|
||||
Each dictionary has the keys: name, sort, link. Link can be an empty
|
||||
string.
|
||||
|
||||
default_value is ignored, this method always returns a tuple
|
||||
'''
|
||||
def get_author_data(self, idx, index_is_id=True, default_value=None):
|
||||
id_ = idx if index_is_id else self.index_to_id(idx)
|
||||
with self.cache.read_lock:
|
||||
ids = self.cache._field_ids_for('authors', id_)
|
||||
ans = []
|
||||
for id_ in ids:
|
||||
ans.append(self.cache._author_data(id_))
|
||||
return tuple(ans)
|
||||
data = self.cache._author_data(id_)
|
||||
ans.append(':::'.join((data['name'], data['sort'], data['link'])))
|
||||
return ':#:'.join(ans) if ans else default_value
|
||||
|
||||
def multisort(self, fields=[], subsort=False, only_ids=None):
|
||||
fields = [(sanitize_sort_field_name(self.field_metadata, x), bool(y)) for x, y in fields]
|
||||
@ -168,8 +214,19 @@ class View(object):
|
||||
return ans
|
||||
self._map_filtered = tuple(ans)
|
||||
|
||||
def _build_restriction_string(self, restriction):
|
||||
if self.base_restriction:
|
||||
if restriction:
|
||||
return u'(%s) and (%s)' % (self.base_restriction, restriction)
|
||||
else:
|
||||
return self.base_restriction
|
||||
else:
|
||||
return restriction
|
||||
|
||||
def search_getting_ids(self, query, search_restriction,
|
||||
set_restriction_count=False):
|
||||
set_restriction_count=False, use_virtual_library=True):
|
||||
if use_virtual_library:
|
||||
search_restriction = self._build_restriction_string(search_restriction)
|
||||
q = ''
|
||||
if not query or not query.strip():
|
||||
q = search_restriction
|
||||
@ -188,11 +245,32 @@ class View(object):
|
||||
self.search_restriction_book_count = len(rv)
|
||||
return rv
|
||||
|
||||
def get_search_restriction(self):
|
||||
return self.search_restriction
|
||||
|
||||
def set_search_restriction(self, s):
|
||||
self.search_restriction = s
|
||||
|
||||
def get_base_restriction(self):
|
||||
return self.base_restriction
|
||||
|
||||
def set_base_restriction(self, s):
|
||||
self.base_restriction = s
|
||||
|
||||
def get_base_restriction_name(self):
|
||||
return self.base_restriction_name
|
||||
|
||||
def set_base_restriction_name(self, s):
|
||||
self.base_restriction_name = s
|
||||
|
||||
def get_search_restriction_name(self):
|
||||
return self.search_restriction_name
|
||||
|
||||
def set_search_restriction_name(self, s):
|
||||
self.search_restriction_name = s
|
||||
|
||||
def search_restriction_applied(self):
|
||||
return bool(self.search_restriction)
|
||||
return bool(self.search_restriction) or bool(self.base_restriction)
|
||||
|
||||
def get_search_restriction_book_count(self):
|
||||
return self.search_restriction_book_count
|
||||
@ -216,3 +294,11 @@ class View(object):
|
||||
self.marked_ids = dict(izip(id_dict.iterkeys(), imap(unicode,
|
||||
id_dict.itervalues())))
|
||||
|
||||
def refresh(self, field=None, ascending=True):
|
||||
self._map = tuple(self.cache.all_book_ids())
|
||||
self._map_filtered = tuple(self._map)
|
||||
if field is not None:
|
||||
self.sort(field, ascending)
|
||||
if self.search_restriction or self.base_restriction:
|
||||
self.search('', return_matches=False)
|
||||
|
||||
|
@ -71,6 +71,7 @@ class ANDROID(USBMS):
|
||||
0x42f7 : [0x216],
|
||||
0x4365 : [0x216],
|
||||
0x4366 : [0x216],
|
||||
0x4371 : [0x216],
|
||||
},
|
||||
# Freescale
|
||||
0x15a2 : {
|
||||
@ -239,7 +240,7 @@ class ANDROID(USBMS):
|
||||
'ADVANCED', 'SGH-I727', 'USB_FLASH_DRIVER', 'ANDROID',
|
||||
'S5830I_CARD', 'MID7042', 'LINK-CREATE', '7035', 'VIEWPAD_7E',
|
||||
'NOVO7', 'MB526', '_USB#WYK7MSF8KE', 'TABLET_PC', 'F', 'MT65XX_MS',
|
||||
'ICS', 'E400', '__FILE-STOR_GADG', 'ST80208-1', 'GT-S5660M_CARD']
|
||||
'ICS', 'E400', '__FILE-STOR_GADG', 'ST80208-1', 'GT-S5660M_CARD', 'XT894']
|
||||
WINDOWS_CARD_A_MEM = ['ANDROID_PHONE', 'GT-I9000_CARD', 'SGH-I897',
|
||||
'FILE-STOR_GADGET', 'SGH-T959_CARD', 'SGH-T959', 'SAMSUNG_ANDROID', 'GT-P1000_CARD',
|
||||
'A70S', 'A101IT', '7', 'INCREDIBLE', 'A7EB', 'SGH-T849_CARD',
|
||||
@ -250,7 +251,7 @@ class ANDROID(USBMS):
|
||||
'FILE-CD_GADGET', 'GT-I9001_CARD', 'USB_2.0', 'XT875',
|
||||
'UMS_COMPOSITE', 'PRO', '.KOBO_VOX', 'SGH-T989_CARD', 'SGH-I727',
|
||||
'USB_FLASH_DRIVER', 'ANDROID', 'MID7042', '7035', 'VIEWPAD_7E',
|
||||
'NOVO7', 'ADVANCED', 'TABLET_PC', 'F', 'E400_SD_CARD', 'ST80208-1']
|
||||
'NOVO7', 'ADVANCED', 'TABLET_PC', 'F', 'E400_SD_CARD', 'ST80208-1', 'XT894']
|
||||
|
||||
OSX_MAIN_MEM = 'Android Device Main Memory'
|
||||
|
||||
|
@ -35,11 +35,11 @@ class KOBO(USBMS):
|
||||
gui_name = 'Kobo Reader'
|
||||
description = _('Communicate with the Kobo Reader')
|
||||
author = 'Timothy Legge and David Forrester'
|
||||
version = (2, 0, 7)
|
||||
version = (2, 0, 8)
|
||||
|
||||
dbversion = 0
|
||||
fwversion = 0
|
||||
supported_dbversion = 75
|
||||
supported_dbversion = 80
|
||||
has_kepubs = False
|
||||
|
||||
supported_platforms = ['windows', 'osx', 'linux']
|
||||
@ -419,7 +419,7 @@ class KOBO(USBMS):
|
||||
# If all this succeeds we need to delete the images files via the ImageID
|
||||
return ImageID
|
||||
|
||||
def delete_images(self, ImageID):
|
||||
def delete_images(self, ImageID, book_path):
|
||||
if ImageID != None:
|
||||
path_prefix = '.kobo/images/'
|
||||
path = self._main_prefix + path_prefix + ImageID
|
||||
@ -449,7 +449,7 @@ class KOBO(USBMS):
|
||||
|
||||
ImageID = self.delete_via_sql(ContentID, ContentType)
|
||||
#print " We would now delete the Images for" + ImageID
|
||||
self.delete_images(ImageID)
|
||||
self.delete_images(ImageID, path)
|
||||
|
||||
if os.path.exists(path):
|
||||
# Delete the ebook
|
||||
@ -1199,15 +1199,21 @@ class KOBO(USBMS):
|
||||
|
||||
class KOBOTOUCH(KOBO):
|
||||
name = 'KoboTouch'
|
||||
gui_name = 'Kobo Touch'
|
||||
gui_name = 'Kobo Touch/Glo/Mini/Aura HD'
|
||||
author = 'David Forrester'
|
||||
description = 'Communicate with the Kobo Touch, Glo and Mini firmware. Based on the existing Kobo driver by %s.' % (KOBO.author)
|
||||
description = 'Communicate with the Kobo Touch, Glo, Mini and Aura HD ereaders. Based on the existing Kobo driver by %s.' % (KOBO.author)
|
||||
# icon = I('devices/kobotouch.jpg')
|
||||
|
||||
supported_dbversion = 75
|
||||
min_supported_dbversion = 53
|
||||
min_dbversion_series = 65
|
||||
min_dbversion_archive = 71
|
||||
supported_dbversion = 80
|
||||
min_supported_dbversion = 53
|
||||
min_dbversion_series = 65
|
||||
min_dbversion_archive = 71
|
||||
min_dbversion_images_on_sdcard = 77
|
||||
|
||||
max_supported_fwversion = (2,5,1)
|
||||
min_fwversion_images_on_sdcard = (2,4,1)
|
||||
|
||||
has_kepubs = True
|
||||
|
||||
booklist_class = KTCollectionsBookList
|
||||
book_class = Book
|
||||
@ -1291,12 +1297,13 @@ class KOBOTOUCH(KOBO):
|
||||
|
||||
TIMESTAMP_STRING = "%Y-%m-%dT%H:%M:%SZ"
|
||||
|
||||
GLO_PRODUCT_ID = [0x4173]
|
||||
MINI_PRODUCT_ID = [0x4183]
|
||||
TOUCH_PRODUCT_ID = [0x4163]
|
||||
PRODUCT_ID = GLO_PRODUCT_ID + MINI_PRODUCT_ID + TOUCH_PRODUCT_ID
|
||||
AURA_HD_PRODUCT_ID = [0x4193]
|
||||
GLO_PRODUCT_ID = [0x4173]
|
||||
MINI_PRODUCT_ID = [0x4183]
|
||||
TOUCH_PRODUCT_ID = [0x4163]
|
||||
PRODUCT_ID = AURA_HD_PRODUCT_ID + GLO_PRODUCT_ID + MINI_PRODUCT_ID + TOUCH_PRODUCT_ID
|
||||
|
||||
BCD = [0x0110, 0x0326]
|
||||
BCD = [0x0110, 0x0326]
|
||||
|
||||
# Image file name endings. Made up of: image size, min_dbversion, max_dbversion,
|
||||
COVER_FILE_ENDINGS = {
|
||||
@ -1313,6 +1320,11 @@ class KOBOTOUCH(KOBO):
|
||||
# ' - N3_LIBRARY_LIST.parsed':[(60,90),0, 53,],
|
||||
# ' - N3_LIBRARY_SHELF.parsed': [(40,60),0, 52,],
|
||||
}
|
||||
AURA_HD_COVER_FILE_ENDINGS = {
|
||||
' - N3_FULL.parsed': [(1080,1440), 0, 99,True,], # Used for screensaver, home screen
|
||||
' - N3_LIBRARY_FULL.parsed':[(355, 471), 0, 99,False,], # Used for Details screen
|
||||
' - N3_LIBRARY_GRID.parsed':[(149, 198), 0, 99,False,], # Used for library lists
|
||||
}
|
||||
#Following are the sizes used with pre2.1.4 firmware
|
||||
# COVER_FILE_ENDINGS = {
|
||||
# ' - N3_LIBRARY_FULL.parsed':[(355,530),0, 99,], # Used for Details screen
|
||||
@ -1328,6 +1340,10 @@ class KOBOTOUCH(KOBO):
|
||||
super(KOBOTOUCH, self).initialize()
|
||||
self.bookshelvelist = []
|
||||
|
||||
def get_device_information(self, end_session=True):
|
||||
self.set_device_name()
|
||||
return super(KOBOTOUCH, self).get_device_information(end_session)
|
||||
|
||||
def books(self, oncard=None, end_session=True):
|
||||
debug_print("KoboTouch:books - oncard='%s'"%oncard)
|
||||
from calibre.ebooks.metadata.meta import path_to_ext
|
||||
@ -1354,14 +1370,13 @@ class KOBOTOUCH(KOBO):
|
||||
|
||||
# Determine the firmware version
|
||||
try:
|
||||
with open(self.normalize_path(self._main_prefix + '.kobo/version'),
|
||||
'rb') as f:
|
||||
with open(self.normalize_path(self._main_prefix + '.kobo/version'), 'rb') as f:
|
||||
self.fwversion = f.readline().split(',')[2]
|
||||
self.fwversion = tuple((int(x) for x in self.fwversion.split('.')))
|
||||
except:
|
||||
self.fwversion = 'unknown'
|
||||
self.fwversion = (0,0,0)
|
||||
|
||||
if self.fwversion != '1.0' and self.fwversion != '1.4':
|
||||
self.has_kepubs = True
|
||||
debug_print('Kobo device: %s' % self.gui_name)
|
||||
debug_print('Version of driver:', self.version, 'Has kepubs:', self.has_kepubs)
|
||||
debug_print('Version of firmware:', self.fwversion, 'Has kepubs:', self.has_kepubs)
|
||||
|
||||
@ -1374,7 +1389,7 @@ class KOBOTOUCH(KOBO):
|
||||
debug_print(opts.extra_customization)
|
||||
if opts.extra_customization:
|
||||
debugging_title = opts.extra_customization[self.OPT_DEBUGGING_TITLE]
|
||||
debug_print("KoboTouch:books - set_debugging_title to", debugging_title )
|
||||
debug_print("KoboTouch:books - set_debugging_title to '%s'" % debugging_title )
|
||||
bl.set_debugging_title(debugging_title)
|
||||
debug_print("KoboTouch:books - length bl=%d"%len(bl))
|
||||
need_sync = self.parse_metadata_cache(bl, prefix, self.METADATA_CACHE)
|
||||
@ -1466,6 +1481,7 @@ class KOBOTOUCH(KOBO):
|
||||
if show_debug:
|
||||
self.debug_index = idx
|
||||
debug_print("KoboTouch:update_booklist - idx=%d"%idx)
|
||||
debug_print("KoboTouch:update_booklist - lpath=%s"%lpath)
|
||||
debug_print('KoboTouch:update_booklist - bl[idx].device_collections=', bl[idx].device_collections)
|
||||
debug_print('KoboTouch:update_booklist - playlist_map=', playlist_map)
|
||||
debug_print('KoboTouch:update_booklist - bookshelves=', bookshelves)
|
||||
@ -1477,7 +1493,7 @@ class KOBOTOUCH(KOBO):
|
||||
bl_cache[lpath] = None
|
||||
|
||||
if ImageID is not None:
|
||||
imagename = self.imagefilename_from_imageID(ImageID)
|
||||
imagename = self.imagefilename_from_imageID(prefix, ImageID)
|
||||
if imagename is not None:
|
||||
bl[idx].thumbnail = ImageWrapper(imagename)
|
||||
if (ContentType == '6' and MimeType != 'application/x-kobo-epub+zip'):
|
||||
@ -1717,12 +1733,14 @@ class KOBOTOUCH(KOBO):
|
||||
debug_print("KoboTouch:books - end - oncard='%s'"%oncard)
|
||||
return bl
|
||||
|
||||
def imagefilename_from_imageID(self, ImageID):
|
||||
def imagefilename_from_imageID(self, prefix, ImageID):
|
||||
show_debug = self.is_debugging_title(ImageID)
|
||||
|
||||
path = self.images_path(prefix)
|
||||
path = self.normalize_path(path.replace('/', os.sep))
|
||||
|
||||
for ending, cover_options in self.cover_file_endings().items():
|
||||
fpath = self._main_prefix + '.kobo/images/' + ImageID + ending
|
||||
fpath = self.normalize_path(fpath.replace('/', os.sep))
|
||||
fpath = path + ImageID + ending
|
||||
if os.path.exists(fpath):
|
||||
if show_debug:
|
||||
debug_print("KoboTouch:imagefilename_from_imageID - have cover image fpath=%s" % (fpath))
|
||||
@ -1764,7 +1782,7 @@ class KOBOTOUCH(KOBO):
|
||||
|
||||
if not self.copying_covers():
|
||||
imageID = self.imageid_from_contentid(contentID)
|
||||
self.delete_images(imageID)
|
||||
self.delete_images(imageID, fname)
|
||||
connection.commit()
|
||||
|
||||
cursor.close()
|
||||
@ -1821,11 +1839,11 @@ class KOBOTOUCH(KOBO):
|
||||
|
||||
return imageId
|
||||
|
||||
def delete_images(self, ImageID):
|
||||
def delete_images(self, ImageID, book_path):
|
||||
debug_print("KoboTouch:delete_images - ImageID=", ImageID)
|
||||
if ImageID != None:
|
||||
path_prefix = '.kobo/images/'
|
||||
path = self._main_prefix + path_prefix + ImageID
|
||||
path = self.images_path(book_path)
|
||||
path = path + ImageID
|
||||
|
||||
for ending in self.cover_file_endings().keys():
|
||||
fpath = path + ending
|
||||
@ -1872,12 +1890,14 @@ class KOBOTOUCH(KOBO):
|
||||
def get_content_type_from_extension(self, extension):
|
||||
debug_print("KoboTouch:get_content_type_from_extension - start")
|
||||
# With new firmware, ContentType appears to be 6 for all types of sideloaded books.
|
||||
if self.fwversion.startswith('2.'):
|
||||
if self.fwversion >= (1,9,17) or extension == '.kobo' or extension == '.mobi':
|
||||
debug_print("KoboTouch:get_content_type_from_extension - V2 firmware")
|
||||
ContentType = 6
|
||||
# For older firmware, it depends on the type of file.
|
||||
elif extension == '.kobo' or extension == '.mobi':
|
||||
ContentType = 6
|
||||
else:
|
||||
debug_print("KoboTouch:get_content_type_from_extension - calling super")
|
||||
ContentType = super(KOBOTOUCH, self).get_content_type_from_extension(extension)
|
||||
ContentType = 901
|
||||
return ContentType
|
||||
|
||||
def update_device_database_collections(self, booklists, collections_attributes, oncard):
|
||||
@ -1920,7 +1940,7 @@ class KOBOTOUCH(KOBO):
|
||||
delete_empty_shelves = opts.extra_customization[self.OPT_DELETE_BOOKSHELVES] and self.supports_bookshelves()
|
||||
update_series_details = opts.extra_customization[self.OPT_UPDATE_SERIES_DETAILS] and self.supports_series()
|
||||
debugging_title = opts.extra_customization[self.OPT_DEBUGGING_TITLE]
|
||||
debug_print("KoboTouch:update_device_database_collections - set_debugging_title to", debugging_title )
|
||||
debug_print("KoboTouch:update_device_database_collections - set_debugging_title to '%s'" % debugging_title )
|
||||
booklists.set_debugging_title(debugging_title)
|
||||
else:
|
||||
delete_empty_shelves = False
|
||||
@ -2088,8 +2108,8 @@ class KOBOTOUCH(KOBO):
|
||||
# debug_print('KoboTouch: not uploading cover')
|
||||
return
|
||||
|
||||
# Don't upload covers if book is on the SD card
|
||||
if self._card_a_prefix and path.startswith(self._card_a_prefix):
|
||||
# Only upload covers to SD card if that is supported
|
||||
if self._card_a_prefix and path.startswith(self._card_a_prefix) and not self.supports_covers_on_sdcard():
|
||||
return
|
||||
|
||||
if not opts.extra_customization[self.OPT_UPLOAD_GRAYSCALE_COVERS]:
|
||||
@ -2111,6 +2131,16 @@ class KOBOTOUCH(KOBO):
|
||||
ImageID = ImageID.replace('.', '_')
|
||||
return ImageID
|
||||
|
||||
|
||||
def images_path(self, path):
|
||||
if self._card_a_prefix and path.startswith(self._card_a_prefix) and self.supports_covers_on_sdcard():
|
||||
path_prefix = 'koboExtStorage/images/'
|
||||
path = self._card_a_prefix + path_prefix
|
||||
else:
|
||||
path_prefix = '.kobo/images/'
|
||||
path = self._main_prefix + path_prefix
|
||||
return path
|
||||
|
||||
def _upload_cover(self, path, filename, metadata, filepath, uploadgrayscale, keep_cover_aspect=False):
|
||||
from calibre.utils.magick.draw import save_cover_data_to, identify_data
|
||||
debug_print("KoboTouch:_upload_cover - filename='%s' uploadgrayscale='%s' "%(filename, uploadgrayscale))
|
||||
@ -2151,8 +2181,8 @@ class KOBOTOUCH(KOBO):
|
||||
cursor.close()
|
||||
|
||||
if ImageID != None:
|
||||
path_prefix = '.kobo/images/'
|
||||
path = self._main_prefix + path_prefix + ImageID
|
||||
path = self.images_path(path) + ImageID
|
||||
|
||||
if show_debug:
|
||||
debug_print("KoboTouch:_upload_cover - About to loop over cover endings")
|
||||
|
||||
@ -2496,6 +2526,8 @@ class KOBOTOUCH(KOBO):
|
||||
return opts
|
||||
|
||||
|
||||
def isAuraHD(self):
|
||||
return self.detected_device.idProduct in self.AURA_HD_PRODUCT_ID
|
||||
def isGlo(self):
|
||||
return self.detected_device.idProduct in self.GLO_PRODUCT_ID
|
||||
def isMini(self):
|
||||
@ -2504,7 +2536,21 @@ class KOBOTOUCH(KOBO):
|
||||
return self.detected_device.idProduct in self.TOUCH_PRODUCT_ID
|
||||
|
||||
def cover_file_endings(self):
|
||||
return self.GLO_COVER_FILE_ENDINGS if self.isGlo() else self.COVER_FILE_ENDINGS
|
||||
return self.GLO_COVER_FILE_ENDINGS if self.isGlo() else self.AURA_HD_COVER_FILE_ENDINGS if self.isAuraHD() else self.COVER_FILE_ENDINGS
|
||||
|
||||
def set_device_name(self):
|
||||
device_name = self.gui_name
|
||||
if self.isAuraHD():
|
||||
device_name = 'Kobo Aura HD'
|
||||
elif self.isGlo():
|
||||
device_name = 'Kobo Glo'
|
||||
elif self.isMini():
|
||||
device_name = 'Kobo Mini'
|
||||
elif self.isTouch():
|
||||
device_name = 'Kobo Touch'
|
||||
self.__class__.gui_name = device_name
|
||||
return device_name
|
||||
|
||||
|
||||
def copying_covers(self):
|
||||
opts = self.settings()
|
||||
@ -2524,6 +2570,44 @@ class KOBOTOUCH(KOBO):
|
||||
def supports_kobo_archive(self):
|
||||
return self.dbversion >= self.min_dbversion_archive
|
||||
|
||||
def supports_covers_on_sdcard(self):
|
||||
return self.dbversion >= 77 and self.fwversion >= self.min_fwversion_images_on_sdcard
|
||||
|
||||
def modify_database_check(self, function):
|
||||
# Checks to see whether the database version is supported
|
||||
# and whether the user has chosen to support the firmware version
|
||||
# debug_print("KoboTouch:modify_database_check - self.fwversion <= self.max_supported_fwversion=", self.fwversion > self.max_supported_fwversion)
|
||||
if self.dbversion > self.supported_dbversion or self.fwversion > self.max_supported_fwversion:
|
||||
# Unsupported database
|
||||
opts = self.settings()
|
||||
if not opts.extra_customization[self.OPT_SUPPORT_NEWER_FIRMWARE]:
|
||||
debug_print('The database has been upgraded past supported version')
|
||||
self.report_progress(1.0, _('Removing books from device...'))
|
||||
from calibre.devices.errors import UserFeedback
|
||||
raise UserFeedback(_("Kobo database version unsupported - See details"),
|
||||
_('Your Kobo is running an updated firmware/database version.'
|
||||
' As calibre does not know about this updated firmware,'
|
||||
' database editing is disabled, to prevent corruption.'
|
||||
' You can still send books to your Kobo with calibre, '
|
||||
' but deleting books and managing collections is disabled.'
|
||||
' If you are willing to experiment and know how to reset'
|
||||
' your Kobo to Factory defaults, you can override this'
|
||||
' check by right clicking the device icon in calibre and'
|
||||
' selecting "Configure this device" and then the '
|
||||
' "Attempt to support newer firmware" option.'
|
||||
' Doing so may require you to perform a factory reset of'
|
||||
' your Kobo.'
|
||||
),
|
||||
UserFeedback.WARN)
|
||||
|
||||
return False
|
||||
else:
|
||||
# The user chose to edit the database anyway
|
||||
return True
|
||||
else:
|
||||
# Supported database version
|
||||
return True
|
||||
|
||||
|
||||
@classmethod
|
||||
def is_debugging_title(cls, title):
|
||||
|
@ -95,7 +95,6 @@ class PDNOVEL(USBMS):
|
||||
SUPPORTS_SUB_DIRS = False
|
||||
DELETE_EXTS = ['.jpg', '.jpeg', '.png']
|
||||
|
||||
|
||||
def upload_cover(self, path, filename, metadata, filepath):
|
||||
coverdata = getattr(metadata, 'thumbnail', None)
|
||||
if coverdata and coverdata[2]:
|
||||
@ -226,9 +225,9 @@ class TREKSTOR(USBMS):
|
||||
|
||||
VENDOR_ID = [0x1e68]
|
||||
PRODUCT_ID = [0x0041, 0x0042, 0x0052, 0x004e, 0x0056,
|
||||
0x0067, # This is for the Pyrus Mini
|
||||
0x003e, # This is for the EBOOK_PLAYER_5M https://bugs.launchpad.net/bugs/792091
|
||||
0x5cL, # This is for the 4ink http://www.mobileread.com/forums/showthread.php?t=191318
|
||||
0x0067, # This is for the Pyrus Mini
|
||||
0x003e, # This is for the EBOOK_PLAYER_5M https://bugs.launchpad.net/bugs/792091
|
||||
0x5cL, # This is for the 4ink http://www.mobileread.com/forums/showthread.php?t=191318
|
||||
]
|
||||
BCD = [0x0002, 0x100]
|
||||
|
||||
@ -427,8 +426,8 @@ class WAYTEQ(USBMS):
|
||||
EBOOK_DIR_MAIN = 'Documents'
|
||||
SCAN_FROM_ROOT = True
|
||||
|
||||
VENDOR_NAME = 'ROCKCHIP'
|
||||
WINDOWS_MAIN_MEM = WINDOWS_CARD_A_MEM = 'RK28_SDK_DEMO'
|
||||
VENDOR_NAME = ['ROCKCHIP', 'CBR']
|
||||
WINDOWS_MAIN_MEM = WINDOWS_CARD_A_MEM = ['RK28_SDK_DEMO', 'EINK_EBOOK_READE']
|
||||
SUPPORTS_SUB_DIRS = True
|
||||
|
||||
def get_gui_name(self):
|
||||
@ -445,7 +444,8 @@ class WAYTEQ(USBMS):
|
||||
return self.EBOOK_DIR_CARD_A
|
||||
|
||||
def windows_sort_drives(self, drives):
|
||||
if len(drives) < 2: return drives
|
||||
if len(drives) < 2:
|
||||
return drives
|
||||
main = drives.get('main', None)
|
||||
carda = drives.get('carda', None)
|
||||
if main and carda:
|
||||
@ -455,7 +455,8 @@ class WAYTEQ(USBMS):
|
||||
|
||||
def linux_swap_drives(self, drives):
|
||||
# See https://bugs.launchpad.net/bugs/1151901
|
||||
if len(drives) < 2 or not drives[1] or not drives[2]: return drives
|
||||
if len(drives) < 2 or not drives[1] or not drives[2]:
|
||||
return drives
|
||||
drives = list(drives)
|
||||
t = drives[0]
|
||||
drives[0] = drives[1]
|
||||
@ -463,7 +464,8 @@ class WAYTEQ(USBMS):
|
||||
return tuple(drives)
|
||||
|
||||
def osx_sort_names(self, names):
|
||||
if len(names) < 2: return names
|
||||
if len(names) < 2:
|
||||
return names
|
||||
main = names.get('main', None)
|
||||
card = names.get('carda', None)
|
||||
|
||||
|
@ -58,8 +58,8 @@ class PICO(NEWSMY):
|
||||
gui_name = 'Pico'
|
||||
description = _('Communicate with the Pico reader.')
|
||||
|
||||
VENDOR_NAME = ['TECLAST', 'IMAGIN', 'LASER-', '']
|
||||
WINDOWS_MAIN_MEM = ['USBDISK__USER', 'EB720']
|
||||
VENDOR_NAME = ['TECLAST', 'IMAGIN', 'LASER-', 'LASER', '']
|
||||
WINDOWS_MAIN_MEM = WINDOWS_CARD_A_MEM = ['USBDISK__USER', 'EB720', 'EBOOK-EB720']
|
||||
EBOOK_DIR_MAIN = 'Books'
|
||||
FORMATS = ['EPUB', 'FB2', 'TXT', 'LRC', 'PDB', 'PDF', 'HTML', 'WTXT']
|
||||
SCAN_FROM_ROOT = True
|
||||
|
@ -188,7 +188,6 @@ class EPUBInput(InputFormatPlugin):
|
||||
raise DRMError(os.path.basename(path))
|
||||
self.encrypted_fonts = self._encrypted_font_uris
|
||||
|
||||
|
||||
if len(parts) > 1 and parts[0]:
|
||||
delta = '/'.join(parts[:-1])+'/'
|
||||
for elem in opf.itermanifest():
|
||||
|
@ -4,12 +4,15 @@ __copyright__ = '2010, Fabian Grassl <fg@jusmeum.de>'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
|
||||
import os, re, shutil
|
||||
from os.path import dirname, abspath, relpath, exists, basename
|
||||
from os.path import dirname, abspath, relpath as _relpath, exists, basename
|
||||
|
||||
from calibre.customize.conversion import OutputFormatPlugin, OptionRecommendation
|
||||
from calibre import CurrentDir
|
||||
from calibre.ptempfile import PersistentTemporaryDirectory
|
||||
|
||||
def relpath(*args):
|
||||
return _relpath(*args).replace(os.sep, '/')
|
||||
|
||||
class HTMLOutput(OutputFormatPlugin):
|
||||
|
||||
name = 'HTML Output'
|
||||
|
@ -13,7 +13,8 @@ from calibre.customize import FileTypePlugin
|
||||
from calibre.utils.zipfile import ZipFile, stringFileHeader
|
||||
|
||||
def is_comic(list_of_names):
|
||||
extensions = set([x.rpartition('.')[-1].lower() for x in list_of_names])
|
||||
extensions = set([x.rpartition('.')[-1].lower() for x in list_of_names
|
||||
if '.' in x and x.lower().rpartition('/')[-1] != 'thumbs.db'])
|
||||
comic_extensions = set(['jpg', 'jpeg', 'png'])
|
||||
return len(extensions - comic_extensions) == 0
|
||||
|
||||
@ -58,7 +59,7 @@ class ArchiveExtract(FileTypePlugin):
|
||||
else:
|
||||
fnames = zf.namelist()
|
||||
|
||||
fnames = [x for x in fnames if '.' in x]
|
||||
fnames = [x for x in fnames if '.' in x and x.lower().rpartition('/')[-1] != 'thumbs.db']
|
||||
if is_comic(fnames):
|
||||
ext = '.cbr' if is_rar else '.cbz'
|
||||
of = self.temporary_file('_archive_extract'+ext)
|
||||
|
@ -1,7 +1,6 @@
|
||||
'''
|
||||
Basic support for manipulating OEB 1.x/2.0 content and metadata.
|
||||
'''
|
||||
from __future__ import with_statement
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Marshall T. Vandegrift <llasram@gmail.com>'
|
||||
@ -11,7 +10,7 @@ import os, re, uuid, logging
|
||||
from collections import defaultdict
|
||||
from itertools import count
|
||||
from urlparse import urldefrag, urlparse, urlunparse, urljoin
|
||||
from urllib import unquote as urlunquote
|
||||
from urllib import unquote
|
||||
|
||||
from lxml import etree, html
|
||||
from calibre.constants import filesystem_encoding, __version__
|
||||
@ -40,11 +39,11 @@ CALIBRE_NS = 'http://calibre.kovidgoyal.net/2009/metadata'
|
||||
RE_NS = 'http://exslt.org/regular-expressions'
|
||||
MBP_NS = 'http://www.mobipocket.com'
|
||||
|
||||
XPNSMAP = {'h' : XHTML_NS, 'o1' : OPF1_NS, 'o2' : OPF2_NS,
|
||||
'd09': DC09_NS, 'd10': DC10_NS, 'd11': DC11_NS,
|
||||
'xsi': XSI_NS, 'dt' : DCTERMS_NS, 'ncx': NCX_NS,
|
||||
'svg': SVG_NS, 'xl' : XLINK_NS, 're': RE_NS,
|
||||
'mbp': MBP_NS, 'calibre': CALIBRE_NS }
|
||||
XPNSMAP = {'h': XHTML_NS, 'o1': OPF1_NS, 'o2': OPF2_NS,
|
||||
'd09': DC09_NS, 'd10': DC10_NS, 'd11': DC11_NS,
|
||||
'xsi': XSI_NS, 'dt': DCTERMS_NS, 'ncx': NCX_NS,
|
||||
'svg': SVG_NS, 'xl': XLINK_NS, 're': RE_NS,
|
||||
'mbp': MBP_NS, 'calibre': CALIBRE_NS}
|
||||
|
||||
OPF1_NSMAP = {'dc': DC11_NS, 'oebpackage': OPF1_NS}
|
||||
OPF2_NSMAP = {'opf': OPF2_NS, 'dc': DC11_NS, 'dcterms': DCTERMS_NS,
|
||||
@ -142,7 +141,6 @@ def iterlinks(root, find_links_in_css=True):
|
||||
if attr in link_attrs:
|
||||
yield (el, attr, attribs[attr], 0)
|
||||
|
||||
|
||||
if not find_links_in_css:
|
||||
continue
|
||||
if tag == XHTML('style') and el.text:
|
||||
@ -363,7 +361,9 @@ URL_SAFE = set('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
|
||||
URL_UNSAFE = [ASCII_CHARS - URL_SAFE, UNIBYTE_CHARS - URL_SAFE]
|
||||
|
||||
def urlquote(href):
|
||||
"""Quote URL-unsafe characters, allowing IRI-safe characters."""
|
||||
""" Quote URL-unsafe characters, allowing IRI-safe characters.
|
||||
That is, this function returns valid IRIs not valid URIs. In particular,
|
||||
IRIs can contain non-ascii characters. """
|
||||
result = []
|
||||
unsafe = 0 if isinstance(href, unicode) else 1
|
||||
unsafe = URL_UNSAFE[unsafe]
|
||||
@ -373,6 +373,19 @@ def urlquote(href):
|
||||
result.append(char)
|
||||
return ''.join(result)
|
||||
|
||||
def urlunquote(href):
|
||||
# unquote must run on a bytestring and will return a bytestring
|
||||
# If it runs on a unicode object, it returns a double encoded unicode
|
||||
# string: unquote(u'%C3%A4') != unquote(b'%C3%A4').decode('utf-8')
|
||||
# and the latter is correct
|
||||
want_unicode = isinstance(href, unicode)
|
||||
if want_unicode:
|
||||
href = href.encode('utf-8')
|
||||
href = unquote(href)
|
||||
if want_unicode:
|
||||
href = href.decode('utf-8')
|
||||
return href
|
||||
|
||||
def urlnormalize(href):
|
||||
"""Convert a URL into normalized form, with all and only URL-unsafe
|
||||
characters URL quoted.
|
||||
@ -469,7 +482,7 @@ class DirContainer(object):
|
||||
return
|
||||
|
||||
def _unquote(self, path):
|
||||
# urlunquote must run on a bytestring and will return a bytestring
|
||||
# unquote must run on a bytestring and will return a bytestring
|
||||
# If it runs on a unicode object, it returns a double encoded unicode
|
||||
# string: unquote(u'%C3%A4') != unquote(b'%C3%A4').decode('utf-8')
|
||||
# and the latter is correct
|
||||
@ -497,7 +510,7 @@ class DirContainer(object):
|
||||
return False
|
||||
try:
|
||||
path = os.path.join(self.rootdir, self._unquote(path))
|
||||
except ValueError: #Happens if path contains quoted special chars
|
||||
except ValueError: # Happens if path contains quoted special chars
|
||||
return False
|
||||
try:
|
||||
return os.path.isfile(path)
|
||||
@ -577,12 +590,13 @@ class Metadata(object):
|
||||
allowed = self.allowed
|
||||
if allowed is not None and term not in allowed:
|
||||
raise AttributeError(
|
||||
'attribute %r not valid for metadata term %r' \
|
||||
'attribute %r not valid for metadata term %r'
|
||||
% (self.attr(term), barename(obj.term)))
|
||||
return self.attr(term)
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None: return None
|
||||
if obj is None:
|
||||
return None
|
||||
return obj.attrib.get(self.term_attr(obj), '')
|
||||
|
||||
def __set__(self, obj, value):
|
||||
@ -628,8 +642,8 @@ class Metadata(object):
|
||||
self.value = value
|
||||
return property(fget=fget, fset=fset)
|
||||
|
||||
scheme = Attribute(lambda term: 'scheme' if \
|
||||
term == OPF('meta') else OPF('scheme'),
|
||||
scheme = Attribute(lambda term: 'scheme' if
|
||||
term == OPF('meta') else OPF('scheme'),
|
||||
[DC('identifier'), OPF('meta')])
|
||||
file_as = Attribute(OPF('file-as'), [DC('creator'), DC('contributor'),
|
||||
DC('title')])
|
||||
@ -882,7 +896,6 @@ class Manifest(object):
|
||||
|
||||
return self._parse_xhtml(convert_markdown(data, title=title))
|
||||
|
||||
|
||||
def _parse_css(self, data):
|
||||
from cssutils import CSSParser, log, resolveImports
|
||||
log.setLevel(logging.WARN)
|
||||
@ -935,7 +948,7 @@ class Manifest(object):
|
||||
data = self._loader(getattr(self, 'html_input_href',
|
||||
self.href))
|
||||
if not isinstance(data, basestring):
|
||||
pass # already parsed
|
||||
pass # already parsed
|
||||
elif self.media_type.lower() in OEB_DOCS:
|
||||
data = self._parse_xhtml(data)
|
||||
elif self.media_type.lower()[-4:] in ('+xml', '/xml'):
|
||||
@ -1022,7 +1035,8 @@ class Manifest(object):
|
||||
target, frag = urldefrag(href)
|
||||
target = target.split('/')
|
||||
for index in xrange(min(len(base), len(target))):
|
||||
if base[index] != target[index]: break
|
||||
if base[index] != target[index]:
|
||||
break
|
||||
else:
|
||||
index += 1
|
||||
relhref = (['..'] * (len(base) - index)) + target[index:]
|
||||
|
@ -30,7 +30,7 @@ from calibre.ebooks.oeb.base import (
|
||||
from calibre.ebooks.oeb.polish.errors import InvalidBook, DRMError
|
||||
from calibre.ebooks.oeb.parse_utils import NotHTML, parse_html, RECOVER_PARSER
|
||||
from calibre.ptempfile import PersistentTemporaryDirectory, PersistentTemporaryFile
|
||||
from calibre.utils.ipc.simple_worker import fork_job, WorkerError
|
||||
from calibre.utils.ipc.simple_worker import fork_job, WorkerError
|
||||
from calibre.utils.logging import default_log
|
||||
from calibre.utils.zipfile import ZipFile
|
||||
|
||||
@ -77,7 +77,7 @@ class Container(object):
|
||||
|
||||
# Map of relative paths with '/' separators from root of unzipped ePub
|
||||
# to absolute paths on filesystem with os-specific separators
|
||||
opfpath = os.path.abspath(opfpath)
|
||||
opfpath = os.path.abspath(os.path.realpath(opfpath))
|
||||
for dirpath, _dirnames, filenames in os.walk(self.root):
|
||||
for f in filenames:
|
||||
path = join(dirpath, f)
|
||||
@ -406,8 +406,9 @@ class Container(object):
|
||||
child.get('content', '').strip() in {'{}', ''}):
|
||||
remove.add(child)
|
||||
except AttributeError:
|
||||
continue # Happens for XML comments
|
||||
for child in remove: mdata.remove(child)
|
||||
continue # Happens for XML comments
|
||||
for child in remove:
|
||||
mdata.remove(child)
|
||||
if len(mdata) > 0:
|
||||
mdata[-1].tail = '\n '
|
||||
|
||||
@ -473,17 +474,17 @@ class EpubContainer(Container):
|
||||
book_type = 'epub'
|
||||
|
||||
META_INF = {
|
||||
'container.xml' : True,
|
||||
'manifest.xml' : False,
|
||||
'encryption.xml' : False,
|
||||
'metadata.xml' : False,
|
||||
'signatures.xml' : False,
|
||||
'rights.xml' : False,
|
||||
'container.xml': True,
|
||||
'manifest.xml': False,
|
||||
'encryption.xml': False,
|
||||
'metadata.xml': False,
|
||||
'signatures.xml': False,
|
||||
'rights.xml': False,
|
||||
}
|
||||
|
||||
def __init__(self, pathtoepub, log):
|
||||
self.pathtoepub = pathtoepub
|
||||
tdir = self.root = PersistentTemporaryDirectory('_epub_container')
|
||||
tdir = self.root = os.path.abspath(os.path.realpath(PersistentTemporaryDirectory('_epub_container')))
|
||||
with open(self.pathtoepub, 'rb') as stream:
|
||||
try:
|
||||
zf = ZipFile(stream)
|
||||
@ -616,7 +617,7 @@ class AZW3Container(Container):
|
||||
|
||||
def __init__(self, pathtoazw3, log):
|
||||
self.pathtoazw3 = pathtoazw3
|
||||
tdir = self.root = PersistentTemporaryDirectory('_azw3_container')
|
||||
tdir = self.root = os.path.abspath(os.path.realpath(PersistentTemporaryDirectory('_azw3_container')))
|
||||
with open(pathtoazw3, 'rb') as stream:
|
||||
raw = stream.read(3)
|
||||
if raw == b'TPZ':
|
||||
@ -670,7 +671,8 @@ class AZW3Container(Container):
|
||||
# }}}
|
||||
|
||||
def get_container(path, log=None):
|
||||
if log is None: log = default_log
|
||||
if log is None:
|
||||
log = default_log
|
||||
ebook = (AZW3Container if path.rpartition('.')[-1].lower() in {'azw3', 'mobi'}
|
||||
else EpubContainer)(path, log)
|
||||
return ebook
|
||||
|
@ -46,10 +46,11 @@ def is_raster_image(media_type):
|
||||
return media_type and media_type.lower() in {
|
||||
'image/png', 'image/jpeg', 'image/jpg', 'image/gif'}
|
||||
|
||||
COVER_TYPES = { 'coverimagestandard', 'other.ms-coverimage-standard',
|
||||
'other.ms-titleimage-standard', 'other.ms-titleimage',
|
||||
'other.ms-coverimage', 'other.ms-thumbimage-standard',
|
||||
'other.ms-thumbimage', 'thumbimagestandard', 'cover'}
|
||||
COVER_TYPES = {
|
||||
'coverimagestandard', 'other.ms-coverimage-standard',
|
||||
'other.ms-titleimage-standard', 'other.ms-titleimage',
|
||||
'other.ms-coverimage', 'other.ms-thumbimage-standard',
|
||||
'other.ms-thumbimage', 'thumbimagestandard', 'cover'}
|
||||
|
||||
def find_cover_image(container):
|
||||
'Find a raster image marked as a cover in the OPF'
|
||||
@ -92,7 +93,8 @@ def find_cover_page(container):
|
||||
def find_cover_image_in_page(container, cover_page):
|
||||
root = container.parsed(cover_page)
|
||||
body = XPath('//h:body')(root)
|
||||
if len(body) != 1: return
|
||||
if len(body) != 1:
|
||||
return
|
||||
body = body[0]
|
||||
images = []
|
||||
for img in XPath('descendant::h:img[@src]|descendant::svg:svg/descendant::svg:image')(body):
|
||||
@ -152,7 +154,7 @@ def create_epub_cover(container, cover_path):
|
||||
ar = 'xMidYMid meet' if keep_aspect else 'none'
|
||||
templ = CoverManager.SVG_TEMPLATE.replace('__ar__', ar)
|
||||
templ = templ.replace('__viewbox__', '0 0 %d %d'%(width, height))
|
||||
templ = templ.replace('__width__', str(width))
|
||||
templ = templ.replace('__width__', str(width))
|
||||
templ = templ.replace('__height__', str(height))
|
||||
titlepage_item = container.generate_item('titlepage.xhtml',
|
||||
id_prefix='titlepage')
|
||||
@ -179,7 +181,7 @@ def create_epub_cover(container, cover_path):
|
||||
guide = container.opf_get_or_create('guide')
|
||||
container.insert_into_xml(guide, guide.makeelement(
|
||||
OPF('reference'), type='cover', title=_('Cover'),
|
||||
href=container.name_to_href(titlepage)))
|
||||
href=container.name_to_href(titlepage, base=container.opf_name)))
|
||||
metadata = container.opf_get_or_create('metadata')
|
||||
meta = metadata.makeelement(OPF('meta'), name='cover')
|
||||
meta.set('content', raster_cover_item.get('id'))
|
||||
|
@ -175,7 +175,7 @@ def gui_polish(data):
|
||||
if not data.pop('metadata'):
|
||||
data.pop('opf')
|
||||
if not data.pop('do_cover'):
|
||||
data.pop('cover')
|
||||
data.pop('cover', None)
|
||||
file_map = {x:x for x in files}
|
||||
opts = ALL_OPTS.copy()
|
||||
opts.update(data)
|
||||
|
@ -148,7 +148,6 @@ class OEBReader(object):
|
||||
if not has_aut:
|
||||
m.add('creator', self.oeb.translate(__('Unknown')), role='aut')
|
||||
|
||||
|
||||
def _manifest_prune_invalid(self):
|
||||
'''
|
||||
Remove items from manifest that contain invalid data. This prevents
|
||||
@ -197,6 +196,8 @@ class OEBReader(object):
|
||||
item.media_type[-4:] in ('/xml', '+xml')):
|
||||
hrefs = [r[2] for r in iterlinks(data)]
|
||||
for href in hrefs:
|
||||
if isinstance(href, bytes):
|
||||
href = href.decode('utf-8')
|
||||
href, _ = urldefrag(href)
|
||||
if not href:
|
||||
continue
|
||||
@ -293,7 +294,7 @@ class OEBReader(object):
|
||||
continue
|
||||
try:
|
||||
href = item.abshref(urlnormalize(href))
|
||||
except ValueError: # Malformed URL
|
||||
except ValueError: # Malformed URL
|
||||
continue
|
||||
if href not in manifest.hrefs:
|
||||
continue
|
||||
@ -394,9 +395,9 @@ class OEBReader(object):
|
||||
|
||||
authorElement = xpath(child,
|
||||
'descendant::calibre:meta[@name = "author"]')
|
||||
if authorElement :
|
||||
if authorElement:
|
||||
author = authorElement[0].text
|
||||
else :
|
||||
else:
|
||||
author = None
|
||||
|
||||
descriptionElement = xpath(child,
|
||||
@ -406,7 +407,7 @@ class OEBReader(object):
|
||||
method='text', encoding=unicode).strip()
|
||||
if not description:
|
||||
description = None
|
||||
else :
|
||||
else:
|
||||
description = None
|
||||
|
||||
index_image = xpath(child,
|
||||
@ -497,7 +498,8 @@ class OEBReader(object):
|
||||
titles = []
|
||||
headers = []
|
||||
for item in self.oeb.spine:
|
||||
if not item.linear: continue
|
||||
if not item.linear:
|
||||
continue
|
||||
html = item.data
|
||||
title = ''.join(xpath(html, '/h:html/h:head/h:title/text()'))
|
||||
title = COLLAPSE_RE.sub(' ', title.strip())
|
||||
@ -515,17 +517,21 @@ class OEBReader(object):
|
||||
if len(titles) > len(set(titles)):
|
||||
use = headers
|
||||
for title, item in izip(use, self.oeb.spine):
|
||||
if not item.linear: continue
|
||||
if not item.linear:
|
||||
continue
|
||||
toc.add(title, item.href)
|
||||
return True
|
||||
|
||||
def _toc_from_opf(self, opf, item):
|
||||
self.oeb.auto_generated_toc = False
|
||||
if self._toc_from_ncx(item): return
|
||||
if self._toc_from_ncx(item):
|
||||
return
|
||||
# Prefer HTML to tour based TOC, since several LIT files
|
||||
# have good HTML TOCs but bad tour based TOCs
|
||||
if self._toc_from_html(opf): return
|
||||
if self._toc_from_tour(opf): return
|
||||
if self._toc_from_html(opf):
|
||||
return
|
||||
if self._toc_from_tour(opf):
|
||||
return
|
||||
self._toc_from_spine(opf)
|
||||
self.oeb.auto_generated_toc = True
|
||||
|
||||
@ -589,8 +595,10 @@ class OEBReader(object):
|
||||
return True
|
||||
|
||||
def _pages_from_opf(self, opf, item):
|
||||
if self._pages_from_ncx(opf, item): return
|
||||
if self._pages_from_page_map(opf): return
|
||||
if self._pages_from_ncx(opf, item):
|
||||
return
|
||||
if self._pages_from_page_map(opf):
|
||||
return
|
||||
return
|
||||
|
||||
def _cover_from_html(self, hcover):
|
||||
|
@ -47,6 +47,8 @@ class ManifestTrimmer(object):
|
||||
item.data is not None:
|
||||
hrefs = [r[2] for r in iterlinks(item.data)]
|
||||
for href in hrefs:
|
||||
if isinstance(href, bytes):
|
||||
href = href.decode('utf-8')
|
||||
try:
|
||||
href = item.abshref(urlnormalize(href))
|
||||
except:
|
||||
|
@ -51,7 +51,7 @@ class Links(object):
|
||||
for link in self.links:
|
||||
path, href, frag = link[0]
|
||||
page, rect = link[1:]
|
||||
combined_path = os.path.abspath(os.path.join(os.path.dirname(path), *href.split('/')))
|
||||
combined_path = os.path.abspath(os.path.join(os.path.dirname(path), *unquote(href).split('/')))
|
||||
is_local = not href or combined_path in self.anchors
|
||||
annot = Dictionary({
|
||||
'Type':Name('Annot'), 'Subtype':Name('Link'),
|
||||
|
@ -716,10 +716,11 @@ def choose_save_file(window, name, title, filters=[], all_files=True):
|
||||
ans = ans[0]
|
||||
return ans
|
||||
|
||||
def choose_images(window, name, title, select_only_single_file=True):
|
||||
def choose_images(window, name, title, select_only_single_file=True,
|
||||
formats=('png', 'gif', 'jpg', 'jpeg', 'svg')):
|
||||
mode = QFileDialog.ExistingFile if select_only_single_file else QFileDialog.ExistingFiles
|
||||
fd = FileDialog(title=title, name=name,
|
||||
filters=[('Images', ['png', 'gif', 'jpeg', 'jpg', 'svg'])],
|
||||
filters=[('Images', list(formats))],
|
||||
parent=window, add_all_files_filter=False, mode=mode,
|
||||
)
|
||||
fd.setParent(None)
|
||||
|
@ -17,14 +17,14 @@ from PyQt4.Qt import (QDialog, QGridLayout, QIcon, QCheckBox, QLabel, QFrame,
|
||||
QSizePolicy, QTimer, QModelIndex, QTextEdit,
|
||||
QInputDialog, QMenu)
|
||||
|
||||
from calibre.gui2 import error_dialog, Dispatcher, gprefs
|
||||
from calibre.gui2 import error_dialog, Dispatcher, gprefs, question_dialog
|
||||
from calibre.gui2.actions import InterfaceAction
|
||||
from calibre.gui2.convert.metadata import create_opf_file
|
||||
from calibre.gui2.dialogs.progress import ProgressDialog
|
||||
from calibre.ptempfile import PersistentTemporaryDirectory
|
||||
from calibre.utils.config_base import tweaks
|
||||
|
||||
class Polish(QDialog): # {{{
|
||||
class Polish(QDialog): # {{{
|
||||
|
||||
def __init__(self, db, book_id_map, parent=None):
|
||||
from calibre.ebooks.oeb.polish.main import HELP
|
||||
@ -58,7 +58,7 @@ class Polish(QDialog): # {{{
|
||||
' formats are not capable of supporting all the'
|
||||
' metadata in calibre.</p><p>There is a separate option to'
|
||||
' update the cover.</p>'),
|
||||
'do_cover': _('<p>Update the covers in the ebook files to match the'
|
||||
'do_cover': _('<p>Update the covers in the ebook files to match the'
|
||||
' current cover in the calibre library.</p>'
|
||||
'<p>If the ebook file does not have'
|
||||
' an identifiable cover, a new cover is inserted.</p>'
|
||||
@ -204,6 +204,15 @@ class Polish(QDialog): # {{{
|
||||
ac[action] = saved_prefs[action] = bool(getattr(self, 'opt_'+action).isChecked())
|
||||
if ac[action]:
|
||||
something = True
|
||||
if ac['jacket'] and not ac['metadata']:
|
||||
if not question_dialog(self, _('Must update metadata'),
|
||||
_('You have selected the option to add metadata as '
|
||||
'a "book jacket". For this option to work, you '
|
||||
'must also select the option to update metadata in'
|
||||
' the book files. Do you want to select it?')):
|
||||
return
|
||||
ac['metadata'] = saved_prefs['metadata'] = True
|
||||
self.opt_metadata.setChecked(True)
|
||||
if not something:
|
||||
return error_dialog(self, _('No actions selected'),
|
||||
_('You must select at least one action, or click Cancel.'),
|
||||
@ -275,7 +284,7 @@ class Polish(QDialog): # {{{
|
||||
self.jobs.append((desc, data, book_id, base, is_orig))
|
||||
# }}}
|
||||
|
||||
class Report(QDialog): # {{{
|
||||
class Report(QDialog): # {{{
|
||||
|
||||
def __init__(self, parent):
|
||||
QDialog.__init__(self, parent)
|
||||
@ -427,7 +436,7 @@ class PolishAction(InterfaceAction):
|
||||
supported = set(SUPPORTED)
|
||||
for x in SUPPORTED:
|
||||
supported.add('ORIGINAL_'+x)
|
||||
ans = [(x, set( (db.formats(x, index_is_id=True) or '').split(',') )
|
||||
ans = [(x, set((db.formats(x, index_is_id=True) or '').split(','))
|
||||
.intersection(supported)) for x in book_ids]
|
||||
ans = [x for x in ans if x[1]]
|
||||
if not ans:
|
||||
@ -476,8 +485,7 @@ class PolishAction(InterfaceAction):
|
||||
db.save_original_format(book_id, fmt, notify=False)
|
||||
with open(path, 'rb') as f:
|
||||
db.add_format(book_id, fmt, f, index_is_id=True)
|
||||
self.gui.status_bar.show_message(job.description + \
|
||||
(' completed'), 2000)
|
||||
self.gui.status_bar.show_message(job.description + (' completed'), 2000)
|
||||
try:
|
||||
shutil.rmtree(base)
|
||||
parent = os.path.dirname(base)
|
||||
|
@ -406,6 +406,7 @@ class BookInfo(QWebView):
|
||||
remove_format = pyqtSignal(int, object)
|
||||
save_format = pyqtSignal(int, object)
|
||||
restore_format = pyqtSignal(int, object)
|
||||
copy_link = pyqtSignal(object)
|
||||
|
||||
def __init__(self, vertical, parent=None):
|
||||
QWebView.__init__(self, parent)
|
||||
@ -419,26 +420,33 @@ class BookInfo(QWebView):
|
||||
palette.setBrush(QPalette.Base, Qt.transparent)
|
||||
self.page().setPalette(palette)
|
||||
self.css = P('templates/book_details.css', data=True).decode('utf-8')
|
||||
for x, icon in [('remove', 'trash.png'), ('save', 'save.png'), ('restore', 'edit-undo.png')]:
|
||||
for x, icon in [('remove_format', 'trash.png'), ('save_format', 'save.png'), ('restore_format', 'edit-undo.png'), ('copy_link','edit-copy.png')]:
|
||||
ac = QAction(QIcon(I(icon)), '', self)
|
||||
ac.current_fmt = None
|
||||
ac.triggered.connect(getattr(self, '%s_format_triggerred'%x))
|
||||
setattr(self, '%s_format_action'%x, ac)
|
||||
ac.current_url = None
|
||||
ac.triggered.connect(getattr(self, '%s_triggerred'%x))
|
||||
setattr(self, '%s_action'%x, ac)
|
||||
|
||||
def context_action_triggered(self, which):
|
||||
f = getattr(self, '%s_format_action'%which).current_fmt
|
||||
if f:
|
||||
f = getattr(self, '%s_action'%which).current_fmt
|
||||
url = getattr(self, '%s_action'%which).current_url
|
||||
if f and 'format' in which:
|
||||
book_id, fmt = f
|
||||
getattr(self, '%s_format'%which).emit(book_id, fmt)
|
||||
getattr(self, which).emit(book_id, fmt)
|
||||
if url and 'link' in which:
|
||||
getattr(self, which).emit(url)
|
||||
|
||||
def remove_format_triggerred(self):
|
||||
self.context_action_triggered('remove')
|
||||
self.context_action_triggered('remove_format')
|
||||
|
||||
def save_format_triggerred(self):
|
||||
self.context_action_triggered('save')
|
||||
self.context_action_triggered('save_format')
|
||||
|
||||
def restore_format_triggerred(self):
|
||||
self.context_action_triggered('restore')
|
||||
self.context_action_triggered('restore_format')
|
||||
|
||||
def copy_link_triggerred(self):
|
||||
self.context_action_triggered('copy_link')
|
||||
|
||||
def link_activated(self, link):
|
||||
self._link_clicked = True
|
||||
@ -474,24 +482,33 @@ class BookInfo(QWebView):
|
||||
for action in list(menu.actions()):
|
||||
if action is not ca:
|
||||
menu.removeAction(action)
|
||||
if not r.isNull() and url.startswith('format:'):
|
||||
parts = url.split(':')
|
||||
try:
|
||||
book_id, fmt = int(parts[1]), parts[2]
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
else:
|
||||
for a, t in [('remove', _('Delete the %s format')),
|
||||
('save', _('Save the %s format to disk')),
|
||||
('restore', _('Restore the %s format')),
|
||||
if not r.isNull():
|
||||
if url.startswith('http'):
|
||||
for a, t in [('copy', _('&Copy Link')),
|
||||
]:
|
||||
if a == 'restore' and not fmt.upper().startswith('ORIGINAL_'):
|
||||
continue
|
||||
ac = getattr(self, '%s_format_action'%a)
|
||||
ac.current_fmt = (book_id, fmt)
|
||||
ac.setText(t%parts[2])
|
||||
ac = getattr(self, '%s_link_action'%a)
|
||||
ac.current_url = url
|
||||
ac.setText(t)
|
||||
menu.addAction(ac)
|
||||
|
||||
if url.startswith('format:'):
|
||||
parts = url.split(':')
|
||||
try:
|
||||
book_id, fmt = int(parts[1]), parts[2]
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
else:
|
||||
for a, t in [('remove', _('Delete the %s format')),
|
||||
('save', _('Save the %s format to disk')),
|
||||
('restore', _('Restore the %s format')),
|
||||
]:
|
||||
if a == 'restore' and not fmt.upper().startswith('ORIGINAL_'):
|
||||
continue
|
||||
ac = getattr(self, '%s_format_action'%a)
|
||||
ac.current_fmt = (book_id, fmt)
|
||||
ac.setText(t%parts[2])
|
||||
menu.addAction(ac)
|
||||
if len(menu.actions()) > 0:
|
||||
menu.exec_(ev.globalPos())
|
||||
|
||||
@ -594,6 +611,7 @@ class BookDetails(QWidget): # {{{
|
||||
remove_specific_format = pyqtSignal(int, object)
|
||||
save_specific_format = pyqtSignal(int, object)
|
||||
restore_specific_format = pyqtSignal(int, object)
|
||||
copy_link = pyqtSignal(object)
|
||||
remote_file_dropped = pyqtSignal(object, object)
|
||||
files_dropped = pyqtSignal(object, object)
|
||||
cover_changed = pyqtSignal(object, object)
|
||||
@ -664,6 +682,7 @@ class BookDetails(QWidget): # {{{
|
||||
self.book_info.remove_format.connect(self.remove_specific_format)
|
||||
self.book_info.save_format.connect(self.save_specific_format)
|
||||
self.book_info.restore_format.connect(self.restore_specific_format)
|
||||
self.book_info.copy_link.connect(self.copy_link)
|
||||
self.setCursor(Qt.PointingHandCursor)
|
||||
|
||||
def handle_click(self, link):
|
||||
|
@ -75,7 +75,7 @@ class GroupModel(QAbstractListModel):
|
||||
def get_preferred_input_format_for_book(db, book_id):
|
||||
recs = load_specifics(db, book_id)
|
||||
if recs:
|
||||
return recs.get('gui_preferred_input_format', None)
|
||||
return recs.get('gui_preferred_input_format', None)
|
||||
|
||||
def get_available_formats_for_book(db, book_id):
|
||||
available_formats = db.formats(book_id, index_is_id=True)
|
||||
@ -147,6 +147,7 @@ class Config(ResizableDialog, Ui_Dialog):
|
||||
self.connect(self.groups, SIGNAL('entered(QModelIndex)'),
|
||||
self.show_group_help)
|
||||
rb = self.buttonBox.button(self.buttonBox.RestoreDefaults)
|
||||
rb.setText(_('Restore &Defaults'))
|
||||
self.connect(rb, SIGNAL('clicked()'), self.restore_defaults)
|
||||
self.groups.setMouseTracking(True)
|
||||
geom = gprefs.get('convert_single_dialog_geom', None)
|
||||
@ -188,7 +189,6 @@ class Config(ResizableDialog, Ui_Dialog):
|
||||
return cls(self.stack, self.plumber.get_option_by_name,
|
||||
self.plumber.get_option_help, self.db, self.book_id)
|
||||
|
||||
|
||||
self.mw = widget_factory(MetadataWidget)
|
||||
self.setWindowTitle(_('Convert')+ ' ' + unicode(self.mw.title.text()))
|
||||
lf = widget_factory(LookAndFeelWidget)
|
||||
@ -209,7 +209,8 @@ class Config(ResizableDialog, Ui_Dialog):
|
||||
self.plumber.get_option_help, self.db, self.book_id)
|
||||
while True:
|
||||
c = self.stack.currentWidget()
|
||||
if not c: break
|
||||
if not c:
|
||||
break
|
||||
self.stack.removeWidget(c)
|
||||
|
||||
widgets = [self.mw, lf, hw, ps, sd, toc, sr]
|
||||
@ -234,7 +235,6 @@ class Config(ResizableDialog, Ui_Dialog):
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def setup_input_output_formats(self, db, book_id, preferred_input_format,
|
||||
preferred_output_format):
|
||||
if preferred_output_format:
|
||||
|
@ -1131,6 +1131,13 @@ class DeviceMixin(object): # {{{
|
||||
# so we don't need to worry about whether some succeeded or not.
|
||||
self.refresh_ondevice(reset_only=False)
|
||||
|
||||
try:
|
||||
if not self.current_view().currentIndex().isValid():
|
||||
self.current_view().set_current_row()
|
||||
self.current_view().refresh_book_details()
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
def dispatch_sync_event(self, dest, delete, specific):
|
||||
rows = self.library_view.selectionModel().selectedRows()
|
||||
if not rows or len(rows) == 0:
|
||||
|
@ -7,7 +7,7 @@ __docformat__ = 'restructuredtext en'
|
||||
|
||||
import functools
|
||||
|
||||
from PyQt4.Qt import (Qt, QStackedWidget, QMenu, QTimer,
|
||||
from PyQt4.Qt import (Qt, QApplication, QStackedWidget, QMenu, QTimer,
|
||||
QSize, QSizePolicy, QStatusBar, QLabel, QFont)
|
||||
|
||||
from calibre.utils.config import prefs
|
||||
@ -274,6 +274,8 @@ class LayoutMixin(object): # {{{
|
||||
self.iactions['Save To Disk'].save_library_format_by_ids)
|
||||
self.book_details.restore_specific_format.connect(
|
||||
self.iactions['Remove Books'].restore_format)
|
||||
self.book_details.copy_link.connect(self.bd_copy_link,
|
||||
type=Qt.QueuedConnection)
|
||||
self.book_details.view_device_book.connect(
|
||||
self.iactions['View'].view_device_book)
|
||||
|
||||
@ -295,6 +297,10 @@ class LayoutMixin(object): # {{{
|
||||
if self.cover_flow:
|
||||
self.cover_flow.dataChanged()
|
||||
|
||||
def bd_copy_link(self, url):
|
||||
if url:
|
||||
QApplication.clipboard().setText(url)
|
||||
|
||||
def save_layout_state(self):
|
||||
for x in ('library', 'memory', 'card_a', 'card_b'):
|
||||
getattr(self, x+'_view').save_state()
|
||||
|
@ -16,11 +16,10 @@ from calibre.constants import __appname__
|
||||
from calibre.gui2.search_box import SearchBox2, SavedSearchBox
|
||||
from calibre.gui2.throbber import ThrobbingButton
|
||||
from calibre.gui2.bars import BarsManager
|
||||
from calibre.gui2.widgets import ComboBoxWithHelp
|
||||
from calibre.utils.config_base import tweaks
|
||||
from calibre import human_readable
|
||||
|
||||
class LocationManager(QObject): # {{{
|
||||
class LocationManager(QObject): # {{{
|
||||
|
||||
locations_changed = pyqtSignal()
|
||||
unmount_device = pyqtSignal()
|
||||
@ -165,7 +164,7 @@ class LocationManager(QObject): # {{{
|
||||
|
||||
# }}}
|
||||
|
||||
class SearchBar(QWidget): # {{{
|
||||
class SearchBar(QWidget): # {{{
|
||||
|
||||
def __init__(self, parent):
|
||||
QWidget.__init__(self, parent)
|
||||
@ -173,11 +172,13 @@ class SearchBar(QWidget): # {{{
|
||||
self.setLayout(self._layout)
|
||||
self._layout.setContentsMargins(0,5,0,0)
|
||||
|
||||
x = ComboBoxWithHelp(self)
|
||||
x.setMaximumSize(QSize(150, 16777215))
|
||||
x.setObjectName("search_restriction")
|
||||
x = QToolButton(self)
|
||||
x.setText(_('Virtual Library'))
|
||||
x.setIcon(QIcon(I('lt.png')))
|
||||
x.setObjectName("virtual_library")
|
||||
x.setToolButtonStyle(Qt.ToolButtonTextBesideIcon)
|
||||
l.addWidget(x)
|
||||
parent.search_restriction = x
|
||||
parent.virtual_library = x
|
||||
|
||||
x = QLabel(self)
|
||||
x.setObjectName("search_count")
|
||||
@ -243,7 +244,7 @@ class SearchBar(QWidget): # {{{
|
||||
|
||||
# }}}
|
||||
|
||||
class Spacer(QWidget): # {{{
|
||||
class Spacer(QWidget): # {{{
|
||||
|
||||
def __init__(self, parent):
|
||||
QWidget.__init__(self, parent)
|
||||
@ -252,7 +253,7 @@ class Spacer(QWidget): # {{{
|
||||
self.l.addStretch(10)
|
||||
# }}}
|
||||
|
||||
class MainWindowMixin(object): # {{{
|
||||
class MainWindowMixin(object): # {{{
|
||||
|
||||
def __init__(self, db):
|
||||
self.setObjectName('MainWindow')
|
||||
|
@ -12,7 +12,7 @@ from PyQt4.Qt import (QAbstractTableModel, Qt, pyqtSignal, QIcon, QImage,
|
||||
QModelIndex, QVariant, QDateTime, QColor, QPixmap)
|
||||
|
||||
from calibre.gui2 import NONE, UNDEFINED_QDATETIME, error_dialog
|
||||
from calibre.utils.pyparsing import ParseException
|
||||
from calibre.utils.search_query_parser import ParseException
|
||||
from calibre.ebooks.metadata import fmt_sidx, authors_to_string, string_to_authors
|
||||
from calibre.ebooks.metadata.book.base import SafeFormat
|
||||
from calibre.ptempfile import PersistentTemporaryFile
|
||||
|
@ -927,9 +927,9 @@ class Cover(ImageView): # {{{
|
||||
return sz
|
||||
|
||||
def select_cover(self, *args):
|
||||
files = choose_images(self, 'change cover dialog',
|
||||
_('Choose cover for ') +
|
||||
self.dialog.title.current_val)
|
||||
files = choose_images(
|
||||
self, 'change cover dialog', _('Choose cover for ') + self.dialog.title.current_val,
|
||||
formats=('png', 'gif', 'jpg', 'jpeg'))
|
||||
if not files:
|
||||
return
|
||||
_file = files[0]
|
||||
|
@ -21,7 +21,7 @@ from PyQt4.Qt import (
|
||||
QDialog, QVBoxLayout, QLabel, QDialogButtonBox, QStyle, QStackedWidget,
|
||||
QWidget, QTableView, QGridLayout, QFontInfo, QPalette, QTimer, pyqtSignal,
|
||||
QAbstractTableModel, QVariant, QSize, QListView, QPixmap, QModelIndex,
|
||||
QAbstractListModel, QColor, QRect, QTextBrowser, QStringListModel)
|
||||
QAbstractListModel, QColor, QRect, QTextBrowser, QStringListModel, QMenu, QCursor)
|
||||
from PyQt4.QtWebKit import QWebView
|
||||
|
||||
from calibre.customize.ui import metadata_plugins
|
||||
@ -40,7 +40,7 @@ from calibre.utils.ipc.simple_worker import fork_job, WorkerError
|
||||
from calibre.ptempfile import TemporaryDirectory
|
||||
# }}}
|
||||
|
||||
class RichTextDelegate(QStyledItemDelegate): # {{{
|
||||
class RichTextDelegate(QStyledItemDelegate): # {{{
|
||||
|
||||
def __init__(self, parent=None, max_width=160):
|
||||
QStyledItemDelegate.__init__(self, parent)
|
||||
@ -77,7 +77,7 @@ class RichTextDelegate(QStyledItemDelegate): # {{{
|
||||
painter.restore()
|
||||
# }}}
|
||||
|
||||
class CoverDelegate(QStyledItemDelegate): # {{{
|
||||
class CoverDelegate(QStyledItemDelegate): # {{{
|
||||
|
||||
needs_redraw = pyqtSignal()
|
||||
|
||||
@ -143,7 +143,7 @@ class CoverDelegate(QStyledItemDelegate): # {{{
|
||||
|
||||
# }}}
|
||||
|
||||
class ResultsModel(QAbstractTableModel): # {{{
|
||||
class ResultsModel(QAbstractTableModel): # {{{
|
||||
|
||||
COLUMNS = (
|
||||
'#', _('Title'), _('Published'), _('Has cover'), _('Has summary')
|
||||
@ -182,7 +182,6 @@ class ResultsModel(QAbstractTableModel): # {{{
|
||||
p = book.publisher if book.publisher else ''
|
||||
return '<b>%s</b><br><i>%s</i>' % (d, p)
|
||||
|
||||
|
||||
def data(self, index, role):
|
||||
row, col = index.row(), index.column()
|
||||
try:
|
||||
@ -233,7 +232,7 @@ class ResultsModel(QAbstractTableModel): # {{{
|
||||
|
||||
# }}}
|
||||
|
||||
class ResultsView(QTableView): # {{{
|
||||
class ResultsView(QTableView): # {{{
|
||||
|
||||
show_details_signal = pyqtSignal(object)
|
||||
book_selected = pyqtSignal(object)
|
||||
@ -316,7 +315,7 @@ class ResultsView(QTableView): # {{{
|
||||
|
||||
# }}}
|
||||
|
||||
class Comments(QWebView): # {{{
|
||||
class Comments(QWebView): # {{{
|
||||
|
||||
def __init__(self, parent=None):
|
||||
QWebView.__init__(self, parent)
|
||||
@ -384,7 +383,7 @@ class Comments(QWebView): # {{{
|
||||
return QSize(800, 300)
|
||||
# }}}
|
||||
|
||||
class IdentifyWorker(Thread): # {{{
|
||||
class IdentifyWorker(Thread): # {{{
|
||||
|
||||
def __init__(self, log, abort, title, authors, identifiers, caches):
|
||||
Thread.__init__(self)
|
||||
@ -441,7 +440,7 @@ class IdentifyWorker(Thread): # {{{
|
||||
|
||||
# }}}
|
||||
|
||||
class IdentifyWidget(QWidget): # {{{
|
||||
class IdentifyWidget(QWidget): # {{{
|
||||
|
||||
rejected = pyqtSignal()
|
||||
results_found = pyqtSignal()
|
||||
@ -552,12 +551,11 @@ class IdentifyWidget(QWidget): # {{{
|
||||
self.results_view.show_results(self.worker.results)
|
||||
self.results_found.emit()
|
||||
|
||||
|
||||
def cancel(self):
|
||||
self.abort.set()
|
||||
# }}}
|
||||
|
||||
class CoverWorker(Thread): # {{{
|
||||
class CoverWorker(Thread): # {{{
|
||||
|
||||
def __init__(self, log, abort, title, authors, identifiers, caches):
|
||||
Thread.__init__(self)
|
||||
@ -609,7 +607,8 @@ class CoverWorker(Thread): # {{{
|
||||
|
||||
def scan_once(self, tdir, seen):
|
||||
for x in list(os.listdir(tdir)):
|
||||
if x in seen: continue
|
||||
if x in seen:
|
||||
continue
|
||||
if x.endswith('.cover') and os.path.exists(os.path.join(tdir,
|
||||
x+'.done')):
|
||||
name = x.rpartition('.')[0]
|
||||
@ -635,7 +634,7 @@ class CoverWorker(Thread): # {{{
|
||||
|
||||
# }}}
|
||||
|
||||
class CoversModel(QAbstractListModel): # {{{
|
||||
class CoversModel(QAbstractListModel): # {{{
|
||||
|
||||
def __init__(self, current_cover, parent=None):
|
||||
QAbstractListModel.__init__(self, parent)
|
||||
@ -770,7 +769,7 @@ class CoversModel(QAbstractListModel): # {{{
|
||||
|
||||
# }}}
|
||||
|
||||
class CoversView(QListView): # {{{
|
||||
class CoversView(QListView): # {{{
|
||||
|
||||
chosen = pyqtSignal()
|
||||
|
||||
@ -793,6 +792,8 @@ class CoversView(QListView): # {{{
|
||||
type=Qt.QueuedConnection)
|
||||
|
||||
self.doubleClicked.connect(self.chosen, type=Qt.QueuedConnection)
|
||||
self.setContextMenuPolicy(Qt.CustomContextMenu)
|
||||
self.customContextMenuRequested.connect(self.show_context_menu)
|
||||
|
||||
def select(self, num):
|
||||
current = self.model().index(num)
|
||||
@ -814,9 +815,24 @@ class CoversView(QListView): # {{{
|
||||
else:
|
||||
self.select(self.m.index_from_pointer(pointer).row())
|
||||
|
||||
def show_context_menu(self, point):
|
||||
idx = self.currentIndex()
|
||||
if idx and idx.isValid() and not idx.data(Qt.UserRole).toPyObject():
|
||||
m = QMenu()
|
||||
m.addAction(QIcon(I('view.png')), _('View this cover at full size'), self.show_cover)
|
||||
m.exec_(QCursor.pos())
|
||||
|
||||
def show_cover(self):
|
||||
idx = self.currentIndex()
|
||||
pmap = self.model().cover_pixmap(idx)
|
||||
if pmap is not None:
|
||||
from calibre.gui2.viewer.image_popup import ImageView
|
||||
d = ImageView(self, pmap, unicode(idx.data(Qt.DisplayRole).toString()), geom_name='metadata_download_cover_popup_geom')
|
||||
d(use_exec=True)
|
||||
|
||||
# }}}
|
||||
|
||||
class CoversWidget(QWidget): # {{{
|
||||
class CoversWidget(QWidget): # {{{
|
||||
|
||||
chosen = pyqtSignal()
|
||||
finished = pyqtSignal()
|
||||
@ -922,7 +938,7 @@ class CoversWidget(QWidget): # {{{
|
||||
|
||||
# }}}
|
||||
|
||||
class LogViewer(QDialog): # {{{
|
||||
class LogViewer(QDialog): # {{{
|
||||
|
||||
def __init__(self, log, parent=None):
|
||||
QDialog.__init__(self, parent)
|
||||
@ -970,7 +986,7 @@ class LogViewer(QDialog): # {{{
|
||||
|
||||
# }}}
|
||||
|
||||
class FullFetch(QDialog): # {{{
|
||||
class FullFetch(QDialog): # {{{
|
||||
|
||||
def __init__(self, current_cover=None, parent=None):
|
||||
QDialog.__init__(self, parent)
|
||||
@ -1085,7 +1101,7 @@ class FullFetch(QDialog): # {{{
|
||||
return self.exec_()
|
||||
# }}}
|
||||
|
||||
class CoverFetch(QDialog): # {{{
|
||||
class CoverFetch(QDialog): # {{{
|
||||
|
||||
def __init__(self, current_cover=None, parent=None):
|
||||
QDialog.__init__(self, parent)
|
||||
|
@ -164,7 +164,7 @@ Author matching is exact.</string>
|
||||
<item>
|
||||
<widget class="QLabel" name="label_3">
|
||||
<property name="text">
|
||||
<string>Ignore files with the following extensions when automatically adding </string>
|
||||
<string><b>Ignore</b> files with the following extensions when automatically adding </string>
|
||||
</property>
|
||||
<property name="wordWrap">
|
||||
<bool>true</bool>
|
||||
|
@ -14,7 +14,6 @@ from calibre.gui2.preferences.behavior_ui import Ui_Form
|
||||
from calibre.gui2 import config, info_dialog, dynamic, gprefs
|
||||
from calibre.utils.config import prefs
|
||||
from calibre.customize.ui import available_output_formats, all_input_formats
|
||||
from calibre.utils.search_query_parser import saved_searches
|
||||
from calibre.ebooks import BOOK_EXTENSIONS
|
||||
from calibre.ebooks.oeb.iterator import is_supported
|
||||
from calibre.constants import iswindows
|
||||
@ -48,9 +47,13 @@ class ConfigWidget(ConfigWidgetBase, Ui_Form):
|
||||
choices = [(x.upper(), x) for x in output_formats]
|
||||
r('output_format', prefs, choices=choices, setting=OutputFormatSetting)
|
||||
|
||||
restrictions = sorted(saved_searches().names(), key=sort_key)
|
||||
restrictions = sorted(db.prefs['virtual_libraries'].iterkeys(), key=sort_key)
|
||||
choices = [('', '')] + [(x, x) for x in restrictions]
|
||||
r('gui_restriction', db.prefs, choices=choices)
|
||||
# check that the virtual library still exists
|
||||
vls = db.prefs['virtual_lib_on_startup']
|
||||
if vls and vls not in restrictions:
|
||||
db.prefs['virtual_lib_on_startup'] = ''
|
||||
r('virtual_lib_on_startup', db.prefs, choices=choices)
|
||||
self.reset_confirmation_button.clicked.connect(self.reset_confirmation_dialogs)
|
||||
|
||||
self.input_up_button.clicked.connect(self.up_input)
|
||||
|
@ -147,15 +147,15 @@ If not checked, the values can be Yes or No.</string>
|
||||
<item>
|
||||
<widget class="QLabel" name="label_170">
|
||||
<property name="text">
|
||||
<string>Restriction to apply when the current library is opened:</string>
|
||||
<string>Virtual library to apply when the current library is opened:</string>
|
||||
</property>
|
||||
<property name="buddy">
|
||||
<cstring>opt_gui_restriction</cstring>
|
||||
<cstring>opt_virtual_lib_on_startup</cstring>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QComboBox" name="opt_gui_restriction">
|
||||
<widget class="QComboBox" name="opt_virtual_lib_on_startup">
|
||||
<property name="maximumSize">
|
||||
<size>
|
||||
<width>250</width>
|
||||
@ -163,7 +163,7 @@ If not checked, the values can be Yes or No.</string>
|
||||
</size>
|
||||
</property>
|
||||
<property name="toolTip">
|
||||
<string>Apply this restriction on calibre startup if the current library is being used. Also applied when switching to this library. Note that this setting is per library. </string>
|
||||
<string>Use this virtual library on calibre startup if the current library is being used. Also applied when switching to this library. Note that this setting is per library. </string>
|
||||
</property>
|
||||
<property name="sizeAdjustPolicy">
|
||||
<enum>QComboBox::AdjustToMinimumContentsLengthWithIcon</enum>
|
||||
|
@ -12,7 +12,6 @@ from PyQt4.Qt import Qt, QUrl, QDialog, QSize, QVBoxLayout, QLabel, \
|
||||
|
||||
from calibre.gui2.preferences import ConfigWidgetBase, test_widget
|
||||
from calibre.gui2.preferences.server_ui import Ui_Form
|
||||
from calibre.utils.search_query_parser import saved_searches
|
||||
from calibre.library.server import server_config
|
||||
from calibre.utils.config import ConfigProxy
|
||||
from calibre.gui2 import error_dialog, config, open_url, warning_dialog, \
|
||||
@ -44,13 +43,13 @@ class ConfigWidget(ConfigWidgetBase, Ui_Form):
|
||||
else self.opt_password.Password))
|
||||
self.opt_password.setEchoMode(self.opt_password.Password)
|
||||
|
||||
restrictions = sorted(saved_searches().names(), key=sort_key)
|
||||
# verify that the current restriction still exists. If not, clear it.
|
||||
csr = db.prefs.get('cs_restriction', None)
|
||||
if csr and csr not in restrictions:
|
||||
db.prefs.set('cs_restriction', '')
|
||||
restrictions = sorted(db.prefs['virtual_libraries'].iterkeys(), key=sort_key)
|
||||
choices = [('', '')] + [(x, x) for x in restrictions]
|
||||
r('cs_restriction', db.prefs, choices=choices)
|
||||
# check that the virtual library still exists
|
||||
vls = db.prefs['cs_virtual_lib_on_startup']
|
||||
if vls and vls not in restrictions:
|
||||
db.prefs['cs_virtual_lib_on_startup'] = ''
|
||||
r('cs_virtual_lib_on_startup', db.prefs, choices=choices)
|
||||
|
||||
self.start_button.setEnabled(not getattr(self.server, 'is_running', False))
|
||||
self.test_button.setEnabled(not self.start_button.isEnabled())
|
||||
|
@ -129,7 +129,7 @@
|
||||
<item row="6" column="0">
|
||||
<widget class="QLabel" name="label_16">
|
||||
<property name="text">
|
||||
<string>Max. OPDS &ungrouped items:</string>
|
||||
<string>Max. &ungrouped items:</string>
|
||||
</property>
|
||||
<property name="buddy">
|
||||
<cstring>opt_max_opds_ungrouped_items</cstring>
|
||||
@ -139,14 +139,14 @@
|
||||
<item row="7" column="0">
|
||||
<widget class="QLabel" name="label_164">
|
||||
<property name="text">
|
||||
<string>Restriction (saved search) to apply:</string>
|
||||
<string>Virtual library to apply:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="7" column="1" colspan="2">
|
||||
<widget class="QComboBox" name="opt_cs_restriction">
|
||||
<widget class="QComboBox" name="opt_cs_virtual_lib_on_startup">
|
||||
<property name="toolTip">
|
||||
<string>This restriction (based on a saved search) will restrict the books the content server makes available to those matching the search. This setting is per library (i.e. you can have a different restriction per library).</string>
|
||||
<string>Setting a virtual library will restrict the books the content server makes available to those in the library. This setting is per library (i.e. you can have a different value per library).</string>
|
||||
</property>
|
||||
<property name="sizeAdjustPolicy">
|
||||
<enum>QComboBox::AdjustToMinimumContentsLengthWithIcon</enum>
|
||||
|
@ -19,9 +19,8 @@ from calibre.gui2.dialogs.confirm_delete import confirm
|
||||
from calibre.gui2.dialogs.saved_search_editor import SavedSearchEditor
|
||||
from calibre.gui2.dialogs.search import SearchDialog
|
||||
from calibre.utils.search_query_parser import saved_searches
|
||||
from calibre.utils.icu import sort_key
|
||||
|
||||
class SearchLineEdit(QLineEdit): # {{{
|
||||
class SearchLineEdit(QLineEdit): # {{{
|
||||
key_pressed = pyqtSignal(object)
|
||||
|
||||
def keyPressEvent(self, event):
|
||||
@ -42,7 +41,7 @@ class SearchLineEdit(QLineEdit): # {{{
|
||||
return QLineEdit.paste(self)
|
||||
# }}}
|
||||
|
||||
class SearchBox2(QComboBox): # {{{
|
||||
class SearchBox2(QComboBox): # {{{
|
||||
|
||||
'''
|
||||
To use this class:
|
||||
@ -59,7 +58,7 @@ class SearchBox2(QComboBox): # {{{
|
||||
accurate.
|
||||
'''
|
||||
|
||||
INTERVAL = 1500 #: Time to wait before emitting search signal
|
||||
INTERVAL = 1500 #: Time to wait before emitting search signal
|
||||
MAX_COUNT = 25
|
||||
|
||||
search = pyqtSignal(object)
|
||||
@ -254,7 +253,7 @@ class SearchBox2(QComboBox): # {{{
|
||||
|
||||
# }}}
|
||||
|
||||
class SavedSearchBox(QComboBox): # {{{
|
||||
class SavedSearchBox(QComboBox): # {{{
|
||||
|
||||
'''
|
||||
To use this class:
|
||||
@ -332,6 +331,10 @@ class SavedSearchBox(QComboBox): # {{{
|
||||
name = unicode(self.currentText())
|
||||
if not name.strip():
|
||||
name = unicode(self.search_box.text()).replace('"', '')
|
||||
if not (name and self.search_box.text()):
|
||||
error_dialog(self, _('Create saved search'),
|
||||
_('There is no search to save'), show=True)
|
||||
return
|
||||
saved_searches().delete(name)
|
||||
saved_searches().add(name, unicode(self.search_box.text()))
|
||||
# now go through an initialization cycle to ensure that the combobox has
|
||||
@ -339,7 +342,7 @@ class SavedSearchBox(QComboBox): # {{{
|
||||
# references the new search instead of the text in the search.
|
||||
self.clear()
|
||||
self.setCurrentIndex(self.findText(name))
|
||||
self.saved_search_selected (name)
|
||||
self.saved_search_selected(name)
|
||||
self.changed.emit()
|
||||
|
||||
def delete_current_search(self):
|
||||
@ -361,15 +364,15 @@ class SavedSearchBox(QComboBox): # {{{
|
||||
self.changed.emit()
|
||||
|
||||
# SIGNALed from the main UI
|
||||
def copy_search_button_clicked (self):
|
||||
idx = self.currentIndex();
|
||||
def copy_search_button_clicked(self):
|
||||
idx = self.currentIndex()
|
||||
if idx < 0:
|
||||
return
|
||||
self.search_box.set_search_string(saved_searches().lookup(unicode(self.currentText())))
|
||||
|
||||
# }}}
|
||||
|
||||
class SearchBoxMixin(object): # {{{
|
||||
class SearchBoxMixin(object): # {{{
|
||||
|
||||
def __init__(self):
|
||||
self.search.initialize('main_search_history', colorize=True,
|
||||
@ -443,7 +446,7 @@ class SearchBoxMixin(object): # {{{
|
||||
|
||||
# }}}
|
||||
|
||||
class SavedSearchBoxMixin(object): # {{{
|
||||
class SavedSearchBoxMixin(object): # {{{
|
||||
|
||||
def __init__(self):
|
||||
self.saved_search.changed.connect(self.saved_searches_changed)
|
||||
@ -452,7 +455,7 @@ class SavedSearchBoxMixin(object): # {{{
|
||||
self.saved_search.save_search_button_clicked)
|
||||
self.copy_search_button.clicked.connect(
|
||||
self.saved_search.copy_search_button_clicked)
|
||||
self.saved_searches_changed()
|
||||
# self.saved_searches_changed()
|
||||
self.saved_search.initialize(self.search, colorize=True,
|
||||
help_text=_('Saved Searches'))
|
||||
self.saved_search.setToolTip(
|
||||
@ -479,18 +482,10 @@ class SavedSearchBoxMixin(object): # {{{
|
||||
partial(self.do_saved_search_edit, None))
|
||||
|
||||
def saved_searches_changed(self, set_restriction=None, recount=True):
|
||||
p = sorted(saved_searches().names(), key=sort_key)
|
||||
if set_restriction is None:
|
||||
set_restriction = unicode(self.search_restriction.currentText())
|
||||
# rebuild the restrictions combobox using current saved searches
|
||||
self.search_restriction.clear()
|
||||
self.search_restriction.addItem('')
|
||||
self.search_restriction.addItem(_('*Current search'))
|
||||
self.build_search_restriction_list()
|
||||
if recount:
|
||||
self.tags_view.recount()
|
||||
for s in p:
|
||||
self.search_restriction.addItem(s)
|
||||
if set_restriction: # redo the search restriction if there was one
|
||||
if set_restriction: # redo the search restriction if there was one
|
||||
self.apply_named_search_restriction(set_restriction)
|
||||
|
||||
def do_saved_search_edit(self, search):
|
||||
|
@ -4,23 +4,506 @@ Created on 10 Jun 2010
|
||||
@author: charles
|
||||
'''
|
||||
|
||||
from PyQt4.Qt import Qt
|
||||
from functools import partial
|
||||
|
||||
from PyQt4.Qt import (
|
||||
Qt, QMenu, QPoint, QIcon, QDialog, QGridLayout, QLabel, QLineEdit, QComboBox,
|
||||
QDialogButtonBox, QSize, QVBoxLayout, QListWidget, QStringList, QCheckBox)
|
||||
|
||||
from calibre.gui2 import error_dialog, question_dialog
|
||||
from calibre.gui2.widgets import ComboBoxWithHelp
|
||||
from calibre.utils.icu import sort_key
|
||||
from calibre.utils.search_query_parser import ParseException
|
||||
from calibre.utils.search_query_parser import saved_searches
|
||||
|
||||
class SelectNames(QDialog): # {{{
|
||||
|
||||
def __init__(self, names, txt, parent=None):
|
||||
QDialog.__init__(self, parent)
|
||||
self.l = l = QVBoxLayout(self)
|
||||
self.setLayout(l)
|
||||
|
||||
self.la = la = QLabel(_('Create a Virtual Library based on %s') % txt)
|
||||
l.addWidget(la)
|
||||
|
||||
self._names = QListWidget(self)
|
||||
self._names.addItems(QStringList(sorted(names, key=sort_key)))
|
||||
self._names.setSelectionMode(self._names.ExtendedSelection)
|
||||
l.addWidget(self._names)
|
||||
|
||||
self._and = QCheckBox(_('Match all selected %s names')%txt)
|
||||
l.addWidget(self._and)
|
||||
|
||||
self.bb = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)
|
||||
self.bb.accepted.connect(self.accept)
|
||||
self.bb.rejected.connect(self.reject)
|
||||
l.addWidget(self.bb)
|
||||
|
||||
self.resize(self.sizeHint())
|
||||
|
||||
@property
|
||||
def names(self):
|
||||
for item in self._names.selectedItems():
|
||||
yield unicode(item.data(Qt.DisplayRole).toString())
|
||||
|
||||
@property
|
||||
def match_type(self):
|
||||
return ' and ' if self._and.isChecked() else ' or '
|
||||
|
||||
# }}}
|
||||
|
||||
MAX_VIRTUAL_LIBRARY_NAME_LENGTH = 40
|
||||
|
||||
def _build_full_search_string(gui):
|
||||
search_templates = (
|
||||
'',
|
||||
'{cl}',
|
||||
'{cr}',
|
||||
'(({cl}) and ({cr}))',
|
||||
'{sb}',
|
||||
'(({cl}) and ({sb}))',
|
||||
'(({cr}) and ({sb}))',
|
||||
'(({cl}) and ({cr}) and ({sb}))'
|
||||
)
|
||||
|
||||
sb = gui.search.current_text
|
||||
db = gui.current_db
|
||||
cr = db.data.get_search_restriction()
|
||||
cl = db.data.get_base_restriction()
|
||||
dex = 0
|
||||
if sb:
|
||||
dex += 4
|
||||
if cr:
|
||||
dex += 2
|
||||
if cl:
|
||||
dex += 1
|
||||
template = search_templates[dex]
|
||||
return template.format(cl=cl, cr=cr, sb=sb).strip()
|
||||
|
||||
class CreateVirtualLibrary(QDialog): # {{{
|
||||
|
||||
def __init__(self, gui, existing_names, editing=None):
|
||||
QDialog.__init__(self, gui)
|
||||
|
||||
self.gui = gui
|
||||
self.existing_names = existing_names
|
||||
|
||||
if editing:
|
||||
self.setWindowTitle(_('Edit virtual library'))
|
||||
else:
|
||||
self.setWindowTitle(_('Create virtual library'))
|
||||
self.setWindowIcon(QIcon(I('lt.png')))
|
||||
|
||||
gl = QGridLayout()
|
||||
self.setLayout(gl)
|
||||
self.la1 = la1 = QLabel(_('Virtual library &name:'))
|
||||
gl.addWidget(la1, 0, 0)
|
||||
self.vl_name = QComboBox()
|
||||
self.vl_name.setEditable(True)
|
||||
self.vl_name.lineEdit().setMaxLength(MAX_VIRTUAL_LIBRARY_NAME_LENGTH)
|
||||
la1.setBuddy(self.vl_name)
|
||||
gl.addWidget(self.vl_name, 0, 1)
|
||||
self.editing = editing
|
||||
|
||||
self.saved_searches_label = QLabel('')
|
||||
self.saved_searches_label.setTextInteractionFlags(Qt.TextSelectableByMouse)
|
||||
gl.addWidget(self.saved_searches_label, 2, 0, 1, 2)
|
||||
|
||||
self.la2 = la2 = QLabel(_('&Search expression:'))
|
||||
gl.addWidget(la2, 1, 0)
|
||||
self.vl_text = QLineEdit()
|
||||
self.vl_text.textChanged.connect(self.search_text_changed)
|
||||
la2.setBuddy(self.vl_text)
|
||||
gl.addWidget(self.vl_text, 1, 1)
|
||||
self.vl_text.setText(_build_full_search_string(self.gui))
|
||||
|
||||
self.sl = sl = QLabel('<p>'+_('Create a virtual library based on: ')+
|
||||
('<a href="author.{0}">{0}</a>, '
|
||||
'<a href="tag.{1}">{1}</a>, '
|
||||
'<a href="publisher.{2}">{2}</a>, '
|
||||
'<a href="series.{3}">{3}</a>, '
|
||||
'<a href="search.{4}">{4}</a>.').format(_('Authors'), _('Tags'),
|
||||
_('Publishers'), _('Series'), _('Saved Searches')))
|
||||
sl.setWordWrap(True)
|
||||
sl.setTextInteractionFlags(Qt.LinksAccessibleByMouse)
|
||||
sl.linkActivated.connect(self.link_activated)
|
||||
gl.addWidget(sl, 3, 0, 1, 2)
|
||||
gl.setRowStretch(3,10)
|
||||
|
||||
self.hl = hl = QLabel(_('''
|
||||
<h2>Virtual Libraries</h2>
|
||||
|
||||
<p>Using <i>virtual libraries</i> you can restrict calibre to only show
|
||||
you books that match a search. When a virtual library is in effect, calibre
|
||||
behaves as though the library contains only the matched books. The Tag Browser
|
||||
display only the tags/authors/series/etc. that belong to the matched books and any searches
|
||||
you do will only search within the books in the virtual library. This
|
||||
is a good way to partition your large library into smaller and easier to work with subsets.</p>
|
||||
|
||||
<p>For example you can use a Virtual Library to only show you books with the Tag <i>"Unread"</i>
|
||||
or only books by <i>"My Favorite Author"</i> or only books in a particular series.</p>
|
||||
'''))
|
||||
hl.setWordWrap(True)
|
||||
hl.setFrameStyle(hl.StyledPanel)
|
||||
gl.addWidget(hl, 0, 3, 4, 1)
|
||||
|
||||
bb = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)
|
||||
bb.accepted.connect(self.accept)
|
||||
bb.rejected.connect(self.reject)
|
||||
gl.addWidget(bb, 4, 0, 1, 0)
|
||||
|
||||
if editing:
|
||||
db = self.gui.current_db
|
||||
virt_libs = db.prefs.get('virtual_libraries', {})
|
||||
for dex,vl in enumerate(sorted(virt_libs.keys(), key=sort_key)):
|
||||
self.vl_name.addItem(vl, virt_libs.get(vl, ''))
|
||||
if vl == editing:
|
||||
self.vl_name.setCurrentIndex(dex)
|
||||
self.original_index = dex
|
||||
self.original_search = virt_libs.get(editing, '')
|
||||
self.vl_text.setText(self.original_search)
|
||||
self.new_name = editing
|
||||
self.vl_name.currentIndexChanged[int].connect(self.name_index_changed)
|
||||
self.vl_name.lineEdit().textEdited.connect(self.name_text_edited)
|
||||
|
||||
self.resize(self.sizeHint()+QSize(150, 25))
|
||||
|
||||
def search_text_changed(self, txt):
|
||||
searches = [_('Saved searches recognized in the expression:')]
|
||||
txt = unicode(txt)
|
||||
while txt:
|
||||
p = txt.partition('search:')
|
||||
if p[1]: # found 'search:'
|
||||
possible_search = p[2]
|
||||
if possible_search: # something follows the 'search:'
|
||||
if possible_search[0] == '"': # strip any quotes
|
||||
possible_search = possible_search[1:].partition('"')
|
||||
else: # find end of the search name. Is EOL, space, rparen
|
||||
sp = possible_search.find(' ')
|
||||
pp = possible_search.find(')')
|
||||
if pp < 0 or (sp > 0 and sp <= pp):
|
||||
# space in string before rparen, or neither found
|
||||
possible_search = possible_search.partition(' ')
|
||||
else:
|
||||
# rparen in string before space
|
||||
possible_search = possible_search.partition(')')
|
||||
txt = possible_search[2] # grab remainder of the string
|
||||
search_name = possible_search[0]
|
||||
if search_name.startswith('='):
|
||||
search_name = search_name[1:]
|
||||
if search_name in saved_searches().names():
|
||||
searches.append(search_name + '=' +
|
||||
saved_searches().lookup(search_name))
|
||||
else:
|
||||
txt = ''
|
||||
else:
|
||||
txt = ''
|
||||
if len(searches) > 1:
|
||||
self.saved_searches_label.setText('\n'.join(searches))
|
||||
else:
|
||||
self.saved_searches_label.setText('')
|
||||
|
||||
def name_text_edited(self, new_name):
|
||||
self.new_name = unicode(new_name)
|
||||
|
||||
def name_index_changed(self, dex):
|
||||
if self.editing and (self.vl_text.text() != self.original_search or
|
||||
self.new_name != self.editing):
|
||||
if not question_dialog(self.gui, _('Search text changed'),
|
||||
_('The virtual library name or the search text has changed. '
|
||||
'Do you want to discard these changes?'),
|
||||
default_yes=False):
|
||||
self.vl_name.blockSignals(True)
|
||||
self.vl_name.setCurrentIndex(self.original_index)
|
||||
self.vl_name.lineEdit().setText(self.new_name)
|
||||
self.vl_name.blockSignals(False)
|
||||
return
|
||||
self.new_name = self.editing = self.vl_name.currentText()
|
||||
self.original_index = dex
|
||||
self.original_search = unicode(self.vl_name.itemData(dex).toString())
|
||||
self.vl_text.setText(self.original_search)
|
||||
|
||||
def link_activated(self, url):
|
||||
db = self.gui.current_db
|
||||
f, txt = unicode(url).partition('.')[0::2]
|
||||
if f == 'search':
|
||||
names = saved_searches().names()
|
||||
else:
|
||||
names = getattr(db, 'all_%s_names'%f)()
|
||||
d = SelectNames(names, txt, parent=self)
|
||||
if d.exec_() == d.Accepted:
|
||||
prefix = f+'s' if f in {'tag', 'author'} else f
|
||||
if f == 'search':
|
||||
search = ['(%s)'%(saved_searches().lookup(x)) for x in d.names]
|
||||
else:
|
||||
search = ['%s:"=%s"'%(prefix, x.replace('"', '\\"')) for x in d.names]
|
||||
if search:
|
||||
if not self.editing:
|
||||
self.vl_name.lineEdit().setText(d.names.next())
|
||||
self.vl_name.lineEdit().setCursorPosition(0)
|
||||
self.vl_text.setText(d.match_type.join(search))
|
||||
self.vl_text.setCursorPosition(0)
|
||||
|
||||
def accept(self):
|
||||
n = unicode(self.vl_name.currentText()).strip()
|
||||
if not n:
|
||||
error_dialog(self.gui, _('No name'),
|
||||
_('You must provide a name for the new virtual library'),
|
||||
show=True)
|
||||
return
|
||||
|
||||
if n.startswith('*'):
|
||||
error_dialog(self.gui, _('Invalid name'),
|
||||
_('A virtual library name cannot begin with "*"'),
|
||||
show=True)
|
||||
return
|
||||
|
||||
if n in self.existing_names and n != self.editing:
|
||||
if not question_dialog(self.gui, _('Name already in use'),
|
||||
_('That name is already in use. Do you want to replace it '
|
||||
'with the new search?'),
|
||||
default_yes=False):
|
||||
return
|
||||
|
||||
v = unicode(self.vl_text.text()).strip()
|
||||
if not v:
|
||||
error_dialog(self.gui, _('No search string'),
|
||||
_('You must provide a search to define the new virtual library'),
|
||||
show=True)
|
||||
return
|
||||
|
||||
try:
|
||||
db = self.gui.library_view.model().db
|
||||
recs = db.data.search_getting_ids('', v, use_virtual_library=False)
|
||||
except ParseException as e:
|
||||
error_dialog(self.gui, _('Invalid search'),
|
||||
_('The search in the search box is not valid'),
|
||||
det_msg=e.msg, show=True)
|
||||
return
|
||||
|
||||
if not recs and not question_dialog(
|
||||
self.gui, _('Search found no books'),
|
||||
_('The search found no books, so the virtual library '
|
||||
'will be empty. Do you really want to use that search?'),
|
||||
default_yes=False):
|
||||
return
|
||||
|
||||
self.library_name = n
|
||||
self.library_search = v
|
||||
QDialog.accept(self)
|
||||
# }}}
|
||||
|
||||
class SearchRestrictionMixin(object):
|
||||
|
||||
no_restriction = _('<None>')
|
||||
|
||||
def __init__(self):
|
||||
self.search_restriction.initialize(help_text=_('Restrict to'))
|
||||
self.search_restriction.activated[int].connect(self.apply_search_restriction)
|
||||
self.library_view.model().count_changed_signal.connect(self.set_number_of_books_shown)
|
||||
self.search_restriction.setSizeAdjustPolicy(
|
||||
self.search_restriction.AdjustToMinimumContentsLengthWithIcon)
|
||||
self.search_restriction.setMinimumContentsLength(10)
|
||||
self.search_restriction.setStatusTip(self.search_restriction.toolTip())
|
||||
self.checked = QIcon(I('ok.png'))
|
||||
self.empty = QIcon(I('blank.png'))
|
||||
self.search_based_vl_name = None
|
||||
self.search_based_vl = None
|
||||
|
||||
self.virtual_library_menu = QMenu()
|
||||
|
||||
self.virtual_library.clicked.connect(self.virtual_library_clicked)
|
||||
|
||||
self.virtual_library_tooltip = \
|
||||
_('Use a "virtual library" to show only a subset of the books present in this library')
|
||||
self.virtual_library.setToolTip(self.virtual_library_tooltip)
|
||||
|
||||
self.search_restriction = ComboBoxWithHelp(self)
|
||||
self.search_restriction.setVisible(False)
|
||||
self.search_count.setText(_("(all books)"))
|
||||
self.search_restriction_tooltip = \
|
||||
_('Books display will be restricted to those matching a '
|
||||
'selected saved search')
|
||||
self.search_restriction.setToolTip(self.search_restriction_tooltip)
|
||||
self.ar_menu = QMenu(_('Additional restriction'))
|
||||
self.edit_menu = QMenu(_('Edit Virtual Library'))
|
||||
self.rm_menu = QMenu(_('Remove Virtual Library'))
|
||||
|
||||
|
||||
def add_virtual_library(self, db, name, search):
|
||||
virt_libs = db.prefs.get('virtual_libraries', {})
|
||||
virt_libs[name] = search
|
||||
db.prefs.set('virtual_libraries', virt_libs)
|
||||
|
||||
def do_create_edit(self, name=None):
|
||||
db = self.library_view.model().db
|
||||
virt_libs = db.prefs.get('virtual_libraries', {})
|
||||
cd = CreateVirtualLibrary(self, virt_libs.keys(), editing=name)
|
||||
if cd.exec_() == cd.Accepted:
|
||||
if name:
|
||||
self._remove_vl(name, reapply=False)
|
||||
self.add_virtual_library(db, cd.library_name, cd.library_search)
|
||||
if not name or name == db.data.get_base_restriction_name():
|
||||
self.apply_virtual_library(cd.library_name)
|
||||
|
||||
def virtual_library_clicked(self):
|
||||
m = self.virtual_library_menu
|
||||
m.clear()
|
||||
|
||||
a = m.addAction(_('Create Virtual Library'))
|
||||
a.triggered.connect(partial(self.do_create_edit, name=None))
|
||||
|
||||
a = self.edit_menu
|
||||
self.build_virtual_library_list(a, self.do_create_edit)
|
||||
m.addMenu(a)
|
||||
|
||||
a = self.rm_menu
|
||||
self.build_virtual_library_list(a, self.remove_vl_triggered)
|
||||
m.addMenu(a)
|
||||
|
||||
m.addSeparator()
|
||||
|
||||
db = self.library_view.model().db
|
||||
|
||||
a = self.ar_menu
|
||||
a.clear()
|
||||
a.setIcon(self.checked if db.data.get_search_restriction_name() else self.empty)
|
||||
self.build_search_restriction_list()
|
||||
m.addMenu(a)
|
||||
|
||||
m.addSeparator()
|
||||
|
||||
current_lib = db.data.get_base_restriction_name()
|
||||
|
||||
if current_lib == '':
|
||||
a = m.addAction(self.checked, self.no_restriction)
|
||||
else:
|
||||
a = m.addAction(self.empty, self.no_restriction)
|
||||
a.triggered.connect(partial(self.apply_virtual_library, library=''))
|
||||
|
||||
a = m.addAction(self.empty, _('*current search'))
|
||||
a.triggered.connect(partial(self.apply_virtual_library, library='*'))
|
||||
|
||||
if self.search_based_vl_name:
|
||||
a = m.addAction(
|
||||
self.checked if db.data.get_base_restriction_name().startswith('*')
|
||||
else self.empty,
|
||||
self.search_based_vl_name)
|
||||
a.triggered.connect(partial(self.apply_virtual_library,
|
||||
library=self.search_based_vl_name))
|
||||
|
||||
m.addSeparator()
|
||||
|
||||
virt_libs = db.prefs.get('virtual_libraries', {})
|
||||
for vl in sorted(virt_libs.keys(), key=sort_key):
|
||||
a = m.addAction(self.checked if vl == current_lib else self.empty, vl)
|
||||
a.triggered.connect(partial(self.apply_virtual_library, library=vl))
|
||||
|
||||
p = QPoint(0, self.virtual_library.height())
|
||||
self.virtual_library_menu.popup(self.virtual_library.mapToGlobal(p))
|
||||
|
||||
def apply_virtual_library(self, library=None):
|
||||
db = self.library_view.model().db
|
||||
virt_libs = db.prefs.get('virtual_libraries', {})
|
||||
if not library:
|
||||
db.data.set_base_restriction('')
|
||||
db.data.set_base_restriction_name('')
|
||||
elif library == '*':
|
||||
if not self.search.current_text:
|
||||
error_dialog(self, _('No search'),
|
||||
_('There is no current search to use'), show=True)
|
||||
return
|
||||
|
||||
txt = _build_full_search_string(self)
|
||||
try:
|
||||
db.data.search_getting_ids('', txt, use_virtual_library=False)
|
||||
except ParseException as e:
|
||||
error_dialog(self, _('Invalid search'),
|
||||
_('The search in the search box is not valid'),
|
||||
det_msg=e.msg, show=True)
|
||||
return
|
||||
|
||||
self.search_based_vl = txt
|
||||
db.data.set_base_restriction(txt)
|
||||
self.search_based_vl_name = self._trim_restriction_name('*' + txt)
|
||||
db.data.set_base_restriction_name(self.search_based_vl_name)
|
||||
elif library == self.search_based_vl_name:
|
||||
db.data.set_base_restriction(self.search_based_vl)
|
||||
db.data.set_base_restriction_name(self.search_based_vl_name)
|
||||
elif library in virt_libs:
|
||||
db.data.set_base_restriction(virt_libs[library])
|
||||
db.data.set_base_restriction_name(library)
|
||||
self.virtual_library.setToolTip(self.virtual_library_tooltip + '\n' +
|
||||
db.data.get_base_restriction())
|
||||
self._apply_search_restriction(db.data.get_search_restriction(),
|
||||
db.data.get_search_restriction_name())
|
||||
|
||||
def build_virtual_library_list(self, menu, handler):
|
||||
db = self.library_view.model().db
|
||||
virt_libs = db.prefs.get('virtual_libraries', {})
|
||||
menu.clear()
|
||||
menu.setIcon(self.empty)
|
||||
|
||||
def add_action(name, search):
|
||||
a = menu.addAction(name)
|
||||
a.triggered.connect(partial(handler, name=name))
|
||||
a.setIcon(self.empty)
|
||||
|
||||
libs = sorted(virt_libs.keys(), key=sort_key)
|
||||
if libs:
|
||||
menu.setEnabled(True)
|
||||
for n in libs:
|
||||
add_action(n, virt_libs[n])
|
||||
else:
|
||||
menu.setEnabled(False)
|
||||
|
||||
def remove_vl_triggered(self, name=None):
|
||||
if not question_dialog(self, _('Are you sure?'),
|
||||
_('Are you sure you want to remove '
|
||||
'the virtual library {0}').format(name),
|
||||
default_yes=False):
|
||||
return
|
||||
self._remove_vl(name, reapply=True)
|
||||
|
||||
def _remove_vl(self, name, reapply=True):
|
||||
db = self.library_view.model().db
|
||||
virt_libs = db.prefs.get('virtual_libraries', {})
|
||||
virt_libs.pop(name, None)
|
||||
db.prefs.set('virtual_libraries', virt_libs)
|
||||
if reapply and db.data.get_base_restriction_name() == name:
|
||||
self.apply_virtual_library('')
|
||||
|
||||
def _trim_restriction_name(self, name):
|
||||
return name[0:MAX_VIRTUAL_LIBRARY_NAME_LENGTH].strip()
|
||||
|
||||
def build_search_restriction_list(self):
|
||||
m = self.ar_menu
|
||||
m.clear()
|
||||
|
||||
current_restriction_text = None
|
||||
|
||||
if self.search_restriction.count() > 1:
|
||||
txt = unicode(self.search_restriction.itemText(2))
|
||||
if txt.startswith('*'):
|
||||
current_restriction_text = txt
|
||||
self.search_restriction.clear()
|
||||
|
||||
current_restriction = self.library_view.model().db.data.get_search_restriction_name()
|
||||
m.setIcon(self.checked if current_restriction else self.empty)
|
||||
|
||||
def add_action(txt, index):
|
||||
self.search_restriction.addItem(txt)
|
||||
txt = self._trim_restriction_name(txt)
|
||||
if txt == current_restriction:
|
||||
a = m.addAction(self.checked, txt if txt else self.no_restriction)
|
||||
else:
|
||||
a = m.addAction(self.empty, txt if txt else self.no_restriction)
|
||||
a.triggered.connect(partial(self.search_restriction_triggered,
|
||||
action=a, index=index))
|
||||
|
||||
add_action('', 0)
|
||||
add_action(_('*current search'), 1)
|
||||
dex = 2
|
||||
if current_restriction_text:
|
||||
add_action(current_restriction_text, 2)
|
||||
dex += 1
|
||||
|
||||
for n in sorted(saved_searches().names(), key=sort_key):
|
||||
add_action(n, dex)
|
||||
dex += 1
|
||||
|
||||
def search_restriction_triggered(self, action=None, index=None):
|
||||
self.search_restriction.setCurrentIndex(index)
|
||||
self.apply_search_restriction(index)
|
||||
|
||||
def apply_named_search_restriction(self, name):
|
||||
if not name:
|
||||
@ -29,15 +512,14 @@ class SearchRestrictionMixin(object):
|
||||
r = self.search_restriction.findText(name)
|
||||
if r < 0:
|
||||
r = 0
|
||||
if r != self.search_restriction.currentIndex():
|
||||
self.search_restriction.setCurrentIndex(r)
|
||||
self.apply_search_restriction(r)
|
||||
self.search_restriction.setCurrentIndex(r)
|
||||
self.apply_search_restriction(r)
|
||||
|
||||
def apply_text_search_restriction(self, search):
|
||||
search = unicode(search)
|
||||
if not search:
|
||||
self.search_restriction.setCurrentIndex(0)
|
||||
self._apply_search_restriction('')
|
||||
self._apply_search_restriction('', '')
|
||||
else:
|
||||
s = '*' + search
|
||||
if self.search_restriction.count() > 1:
|
||||
@ -49,10 +531,7 @@ class SearchRestrictionMixin(object):
|
||||
else:
|
||||
self.search_restriction.insertItem(2, s)
|
||||
self.search_restriction.setCurrentIndex(2)
|
||||
self.search_restriction.setToolTip('<p>' +
|
||||
self.search_restriction_tooltip +
|
||||
_(' or the search ') + "'" + search + "'</p>")
|
||||
self._apply_search_restriction(search)
|
||||
self._apply_search_restriction(search, self._trim_restriction_name(s))
|
||||
|
||||
def apply_search_restriction(self, i):
|
||||
if i == 1:
|
||||
@ -66,18 +545,20 @@ class SearchRestrictionMixin(object):
|
||||
restriction = 'search:"%s"'%(r)
|
||||
else:
|
||||
restriction = ''
|
||||
self._apply_search_restriction(restriction)
|
||||
self._apply_search_restriction(restriction, r)
|
||||
|
||||
def _apply_search_restriction(self, restriction):
|
||||
def _apply_search_restriction(self, restriction, name):
|
||||
self.saved_search.clear()
|
||||
# The order below is important. Set the restriction, force a '' search
|
||||
# to apply it, reset the tag browser to take it into account, then set
|
||||
# the book count.
|
||||
self.library_view.model().db.data.set_search_restriction(restriction)
|
||||
self.library_view.model().db.data.set_search_restriction_name(name)
|
||||
self.search.clear(emit_search=True)
|
||||
self.tags_view.set_search_restriction(restriction)
|
||||
self.tags_view.recount()
|
||||
self.set_number_of_books_shown()
|
||||
self.current_view().setFocus(Qt.OtherFocusReason)
|
||||
self.set_window_title()
|
||||
|
||||
def set_number_of_books_shown(self):
|
||||
db = self.library_view.model().db
|
||||
@ -86,9 +567,9 @@ class SearchRestrictionMixin(object):
|
||||
rows = self.current_view().row_count()
|
||||
rbc = max(rows, db.data.get_search_restriction_book_count())
|
||||
t = _("({0} of {1})").format(rows, rbc)
|
||||
self.search_count.setStyleSheet \
|
||||
('QLabel { border-radius: 8px; background-color: yellow; }')
|
||||
else: # No restriction or not library view
|
||||
self.search_count.setStyleSheet(
|
||||
'QLabel { border-radius: 8px; background-color: yellow; }')
|
||||
else: # No restriction or not library view
|
||||
if not self.search.in_a_search():
|
||||
t = _("(all books)")
|
||||
else:
|
||||
@ -96,3 +577,14 @@ class SearchRestrictionMixin(object):
|
||||
self.search_count.setStyleSheet(
|
||||
'QLabel { background-color: transparent; }')
|
||||
self.search_count.setText(t)
|
||||
|
||||
if __name__ == '__main__':
|
||||
from calibre.gui2 import Application
|
||||
from calibre.gui2.preferences import init_gui
|
||||
app = Application([])
|
||||
app
|
||||
gui = init_gui()
|
||||
d = CreateVirtualLibrary(gui, [])
|
||||
d.exec_()
|
||||
|
||||
|
||||
|
@ -1,13 +1,12 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import (unicode_literals, division, absolute_import, print_function)
|
||||
store_version = 1 # Needed for dynamic plugin loading
|
||||
store_version = 2 # Needed for dynamic plugin loading
|
||||
|
||||
__license__ = 'GPL 3'
|
||||
__copyright__ = '2011, John Schember <john@nachtimwald.com>'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
|
||||
import random
|
||||
import urllib
|
||||
from contextlib import closing
|
||||
|
||||
@ -25,25 +24,7 @@ from calibre.gui2.store.web_store_dialog import WebStoreDialog
|
||||
class GoogleBooksStore(BasicStoreConfig, StorePlugin):
|
||||
|
||||
def open(self, parent=None, detail_item=None, external=False):
|
||||
aff_id = {
|
||||
'lid': '41000000033185143',
|
||||
'pubid': '21000000000352219',
|
||||
'ganpub': 'k352219',
|
||||
'ganclk': 'GOOG_1335334761',
|
||||
}
|
||||
# Use Kovid's affiliate id 30% of the time.
|
||||
if random.randint(1, 10) in (1, 2, 3):
|
||||
aff_id = {
|
||||
'lid': '41000000031855266',
|
||||
'pubid': '21000000000352583',
|
||||
'ganpub': 'k352583',
|
||||
'ganclk': 'GOOG_1335335464',
|
||||
}
|
||||
|
||||
url = 'http://gan.doubleclick.net/gan_click?lid=%(lid)s&pubid=%(pubid)s' % aff_id
|
||||
if detail_item:
|
||||
detail_item += '&ganpub=%(ganpub)s&ganclk=%(ganclk)s' % aff_id
|
||||
|
||||
url = 'http://books.google.com/books'
|
||||
if external or self.config.get('open_external', False):
|
||||
open_url(QUrl(url_slash_cleaner(detail_item if detail_item else url)))
|
||||
else:
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import (unicode_literals, division, absolute_import, print_function)
|
||||
store_version = 1 # Needed for dynamic plugin loading
|
||||
store_version = 2 # Needed for dynamic plugin loading
|
||||
|
||||
__license__ = 'GPL 3'
|
||||
__copyright__ = '2011, John Schember <john@nachtimwald.com>'
|
||||
@ -31,10 +31,10 @@ class KoboStore(BasicStoreConfig, StorePlugin):
|
||||
if random.randint(1, 10) in (1, 2, 3):
|
||||
pub_id = '0dsO3kDu/AU'
|
||||
|
||||
murl = 'http://click.linksynergy.com/fs-bin/click?id=%s&offerid=268429.4&type=3&subid=0' % pub_id
|
||||
murl = 'http://click.linksynergy.com/fs-bin/click?id=%s&subid=&offerid=280046.1&type=10&tmpid=9310&RD_PARM1=http%%3A%%2F%%2Fkobo.com' % pub_id
|
||||
|
||||
if detail_item:
|
||||
purl = 'http://click.linksynergy.com/link?id=%s&offerid=268429&type=2&murl=%s' % (pub_id, urllib.quote_plus(detail_item))
|
||||
purl = 'http://click.linksynergy.com/link?id=%s&offerid=280046&type=2&murl=%s' % (pub_id, urllib.quote_plus(detail_item))
|
||||
url = purl
|
||||
else:
|
||||
purl = None
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import (unicode_literals, division, absolute_import, print_function)
|
||||
store_version = 1 # Needed for dynamic plugin loading
|
||||
store_version = 2 # Needed for dynamic plugin loading
|
||||
|
||||
__license__ = 'GPL 3'
|
||||
__copyright__ = '2012, John Schember <john@nachtimwald.com>'
|
||||
@ -25,11 +25,19 @@ from calibre.gui2.store.web_store_dialog import WebStoreDialog
|
||||
class NookUKStore(BasicStoreConfig, StorePlugin):
|
||||
|
||||
def open(self, parent=None, detail_item=None, external=False):
|
||||
url = "http://uk.nook.com"
|
||||
url = 'http://www.awin1.com/awclick.php?mid=5266&id=120917'
|
||||
detail_url = 'http://www.awin1.com/cread.php?awinmid=5266&awinaffid=120917&clickref=&p='
|
||||
|
||||
if external or self.config.get('open_external', False):
|
||||
open_url(QUrl(url_slash_cleaner(detail_item if detail_item else url)))
|
||||
if detail_item:
|
||||
url = detail_url + detail_item
|
||||
|
||||
open_url(QUrl(url_slash_cleaner(url)))
|
||||
else:
|
||||
if detail_item:
|
||||
detail_url = detail_url + detail_item
|
||||
else:
|
||||
detail_url = None
|
||||
d = WebStoreDialog(self.gui, url, parent, detail_item)
|
||||
d.setWindowTitle(self.name)
|
||||
d.set_tags(self.config.get('tags', ''))
|
||||
|
@ -264,13 +264,8 @@ class TagsModel(QAbstractItemModel): # {{{
|
||||
if rebuild:
|
||||
self.rebuild_node_tree(state_map)
|
||||
|
||||
def set_search_restriction(self, s):
|
||||
self.search_restriction = s
|
||||
self.rebuild_node_tree()
|
||||
|
||||
def set_database(self, db):
|
||||
self.beginResetModel()
|
||||
self.search_restriction = None
|
||||
hidden_cats = db.prefs.get('tag_browser_hidden_categories', None)
|
||||
# migrate from config to db prefs
|
||||
if hidden_cats is None:
|
||||
@ -848,7 +843,7 @@ class TagsModel(QAbstractItemModel): # {{{
|
||||
self.categories = {}
|
||||
|
||||
# Get the categories
|
||||
if self.search_restriction:
|
||||
if self.db.data.get_base_restriction() or self.db.data.get_search_restriction():
|
||||
try:
|
||||
data = self.db.get_categories(sort=sort,
|
||||
icon_map=self.category_icon_map,
|
||||
|
@ -232,10 +232,6 @@ class TagsView(QTreeView): # {{{
|
||||
except:
|
||||
pass
|
||||
|
||||
def set_search_restriction(self, s):
|
||||
s = s if s else None
|
||||
self._model.set_search_restriction(s)
|
||||
|
||||
def mouseMoveEvent(self, event):
|
||||
dex = self.indexAt(event.pos())
|
||||
if self.in_drag_drop or not dex.isValid():
|
||||
|
@ -11,14 +11,14 @@ from base64 import b64encode
|
||||
|
||||
from PyQt4.Qt import (QWidget, QGridLayout, QListWidget, QSize, Qt, QUrl,
|
||||
pyqtSlot, pyqtSignal, QVBoxLayout, QFrame, QLabel,
|
||||
QLineEdit, QTimer, QPushButton, QIcon)
|
||||
QLineEdit, QTimer, QPushButton, QIcon, QSplitter)
|
||||
from PyQt4.QtWebKit import QWebView, QWebPage, QWebElement
|
||||
|
||||
from calibre.ebooks.oeb.display.webview import load_html
|
||||
from calibre.gui2 import error_dialog, question_dialog
|
||||
from calibre.gui2 import error_dialog, question_dialog, gprefs
|
||||
from calibre.utils.logging import default_log
|
||||
|
||||
class Page(QWebPage): # {{{
|
||||
class Page(QWebPage): # {{{
|
||||
|
||||
elem_clicked = pyqtSignal(object, object, object, object)
|
||||
|
||||
@ -67,7 +67,7 @@ class Page(QWebPage): # {{{
|
||||
self.evaljs(self.js)
|
||||
# }}}
|
||||
|
||||
class WebView(QWebView): # {{{
|
||||
class WebView(QWebView): # {{{
|
||||
|
||||
elem_clicked = pyqtSignal(object, object, object, object)
|
||||
|
||||
@ -106,38 +106,46 @@ class ItemEdit(QWidget):
|
||||
|
||||
def __init__(self, parent):
|
||||
QWidget.__init__(self, parent)
|
||||
self.l = l = QGridLayout()
|
||||
self.setLayout(l)
|
||||
self.setLayout(QVBoxLayout())
|
||||
|
||||
self.la = la = QLabel('<b>'+_(
|
||||
'Select a destination for the Table of Contents entry'))
|
||||
l.addWidget(la, 0, 0, 1, 3)
|
||||
self.layout().addWidget(la)
|
||||
self.splitter = sp = QSplitter(self)
|
||||
self.layout().addWidget(sp)
|
||||
self.layout().setStretch(1, 10)
|
||||
sp.setOpaqueResize(False)
|
||||
sp.setChildrenCollapsible(False)
|
||||
|
||||
self.dest_list = dl = QListWidget(self)
|
||||
dl.setMinimumWidth(250)
|
||||
dl.currentItemChanged.connect(self.current_changed)
|
||||
l.addWidget(dl, 1, 0, 2, 1)
|
||||
sp.addWidget(dl)
|
||||
|
||||
w = self.w = QWidget(self)
|
||||
l = w.l = QGridLayout()
|
||||
w.setLayout(l)
|
||||
self.view = WebView(self)
|
||||
self.view.elem_clicked.connect(self.elem_clicked)
|
||||
l.addWidget(self.view, 1, 1, 1, 3)
|
||||
l.addWidget(self.view, 0, 0, 1, 3)
|
||||
sp.addWidget(w)
|
||||
|
||||
self.search_text = s = QLineEdit(self)
|
||||
s.setPlaceholderText(_('Search for text...'))
|
||||
l.addWidget(s, 1, 0)
|
||||
self.ns_button = b = QPushButton(QIcon(I('arrow-down.png')), _('Find &next'), self)
|
||||
b.clicked.connect(self.find_next)
|
||||
l.addWidget(b, 1, 1)
|
||||
self.ps_button = b = QPushButton(QIcon(I('arrow-up.png')), _('Find &previous'), self)
|
||||
l.addWidget(b, 1, 2)
|
||||
b.clicked.connect(self.find_previous)
|
||||
|
||||
self.f = f = QFrame()
|
||||
f.setFrameShape(f.StyledPanel)
|
||||
f.setMinimumWidth(250)
|
||||
l.addWidget(f, 1, 4, 2, 1)
|
||||
self.search_text = s = QLineEdit(self)
|
||||
s.setPlaceholderText(_('Search for text...'))
|
||||
l.addWidget(s, 2, 1, 1, 1)
|
||||
self.ns_button = b = QPushButton(QIcon(I('arrow-down.png')), _('Find &next'), self)
|
||||
b.clicked.connect(self.find_next)
|
||||
l.addWidget(b, 2, 2, 1, 1)
|
||||
self.ps_button = b = QPushButton(QIcon(I('arrow-up.png')), _('Find &previous'), self)
|
||||
l.addWidget(b, 2, 3, 1, 1)
|
||||
b.clicked.connect(self.find_previous)
|
||||
l.setRowStretch(1, 10)
|
||||
l = f.l = QVBoxLayout()
|
||||
f.setLayout(l)
|
||||
sp.addWidget(f)
|
||||
|
||||
f.la = la = QLabel('<p>'+_(
|
||||
'Here you can choose a destination for the Table of Contents\' entry'
|
||||
@ -167,6 +175,10 @@ class ItemEdit(QWidget):
|
||||
|
||||
l.addStretch()
|
||||
|
||||
state = gprefs.get('toc_edit_splitter_state', None)
|
||||
if state is not None:
|
||||
sp.restoreState(state)
|
||||
|
||||
def keyPressEvent(self, ev):
|
||||
if ev.key() in (Qt.Key_Return, Qt.Key_Enter) and self.search_text.hasFocus():
|
||||
# Prevent pressing enter in the search box from triggering the dialog's accept() method
|
||||
@ -236,6 +248,7 @@ class ItemEdit(QWidget):
|
||||
if item is not None:
|
||||
if where is None:
|
||||
self.name.setText(item.data(0, Qt.DisplayRole).toString())
|
||||
self.name.setCursorPosition(0)
|
||||
toc = item.data(0, Qt.UserRole).toPyObject()
|
||||
if toc.dest:
|
||||
for i in xrange(self.dest_list.count()):
|
||||
@ -272,7 +285,6 @@ class ItemEdit(QWidget):
|
||||
loctext = _('Approximately %d%% from the top')%frac
|
||||
return loctext
|
||||
|
||||
|
||||
def elem_clicked(self, tag, frac, elem_id, loc):
|
||||
self.current_frag = elem_id or loc
|
||||
base = _('Location: A <%s> tag inside the file')%tag
|
||||
|
@ -14,7 +14,7 @@ from functools import partial
|
||||
from PyQt4.Qt import (QPushButton, QFrame, QVariant, QMenu, QInputDialog,
|
||||
QDialog, QVBoxLayout, QDialogButtonBox, QSize, QStackedWidget, QWidget,
|
||||
QLabel, Qt, pyqtSignal, QIcon, QTreeWidget, QGridLayout, QTreeWidgetItem,
|
||||
QToolButton, QItemSelectionModel, QCursor)
|
||||
QToolButton, QItemSelectionModel, QCursor, QKeySequence)
|
||||
|
||||
from calibre.ebooks.oeb.polish.container import get_container, AZW3Container
|
||||
from calibre.ebooks.oeb.polish.toc import (
|
||||
@ -27,7 +27,7 @@ from calibre.utils.logging import GUILog
|
||||
|
||||
ICON_SIZE = 24
|
||||
|
||||
class XPathDialog(QDialog): # {{{
|
||||
class XPathDialog(QDialog): # {{{
|
||||
|
||||
def __init__(self, parent):
|
||||
QDialog.__init__(self, parent)
|
||||
@ -118,7 +118,7 @@ class XPathDialog(QDialog): # {{{
|
||||
return [w.xpath for w in self.widgets if w.xpath.strip()]
|
||||
# }}}
|
||||
|
||||
class ItemView(QFrame): # {{{
|
||||
class ItemView(QFrame): # {{{
|
||||
|
||||
add_new_item = pyqtSignal(object, object)
|
||||
delete_item = pyqtSignal()
|
||||
@ -207,7 +207,6 @@ class ItemView(QFrame): # {{{
|
||||
)))
|
||||
l.addWidget(b)
|
||||
|
||||
|
||||
l.addStretch()
|
||||
self.w1 = la = QLabel(_('<b>WARNING:</b> calibre only supports the '
|
||||
'creation of linear ToCs in AZW3 files. In a '
|
||||
@ -349,7 +348,9 @@ class ItemView(QFrame): # {{{
|
||||
|
||||
# }}}
|
||||
|
||||
class TreeWidget(QTreeWidget): # {{{
|
||||
class TreeWidget(QTreeWidget): # {{{
|
||||
|
||||
edit_item = pyqtSignal()
|
||||
|
||||
def __init__(self, parent):
|
||||
QTreeWidget.__init__(self, parent)
|
||||
@ -510,25 +511,30 @@ class TreeWidget(QTreeWidget): # {{{
|
||||
|
||||
def show_context_menu(self, point):
|
||||
item = self.currentItem()
|
||||
def key(k):
|
||||
sc = unicode(QKeySequence(k | Qt.CTRL).toString(QKeySequence.NativeText))
|
||||
return ' [%s]'%sc
|
||||
|
||||
if item is not None:
|
||||
m = QMenu()
|
||||
ci = unicode(item.data(0, Qt.DisplayRole).toString())
|
||||
p = item.parent() or self.invisibleRootItem()
|
||||
idx = p.indexOfChild(item)
|
||||
if idx > 0:
|
||||
m.addAction(QIcon(I('arrow-up.png')), _('Move "%s" up')%ci, self.move_up)
|
||||
m.addAction(QIcon(I('arrow-up.png')), (_('Move "%s" up')%ci)+key(Qt.Key_Up), self.move_up)
|
||||
if idx + 1 < p.childCount():
|
||||
m.addAction(QIcon(I('arrow-down.png')), _('Move "%s" down')%ci, self.move_down)
|
||||
m.addAction(QIcon(I('arrow-down.png')), (_('Move "%s" down')%ci)+key(Qt.Key_Down), self.move_down)
|
||||
m.addAction(QIcon(I('trash.png')), _('Remove all selected items'), self.del_items)
|
||||
if item.parent() is not None:
|
||||
m.addAction(QIcon(I('back.png')), _('Unindent "%s"')%ci, self.move_left)
|
||||
m.addAction(QIcon(I('back.png')), (_('Unindent "%s"')%ci)+key(Qt.Key_Left), self.move_left)
|
||||
if idx > 0:
|
||||
m.addAction(QIcon(I('forward.png')), _('Indent "%s"')%ci, self.move_right)
|
||||
m.addAction(QIcon(I('forward.png')), (_('Indent "%s"')%ci)+key(Qt.Key_Right), self.move_right)
|
||||
m.addAction(QIcon(I('edit_input.png')), _('Change the location this entry points to'), self.edit_item)
|
||||
m.addAction(_('Change all selected items to title case'), self.title_case)
|
||||
m.exec_(QCursor.pos())
|
||||
# }}}
|
||||
|
||||
class TOCView(QWidget): # {{{
|
||||
class TOCView(QWidget): # {{{
|
||||
|
||||
add_new_item = pyqtSignal(object, object)
|
||||
|
||||
@ -537,6 +543,7 @@ class TOCView(QWidget): # {{{
|
||||
l = self.l = QGridLayout()
|
||||
self.setLayout(l)
|
||||
self.tocw = t = TreeWidget(self)
|
||||
self.tocw.edit_item.connect(self.edit_item)
|
||||
l.addWidget(t, 0, 0, 7, 3)
|
||||
self.up_button = b = QToolButton(self)
|
||||
b.setIcon(QIcon(I('arrow-up.png')))
|
||||
@ -595,6 +602,9 @@ class TOCView(QWidget): # {{{
|
||||
|
||||
l.setColumnStretch(2, 10)
|
||||
|
||||
def edit_item(self):
|
||||
self.item_view.edit_item()
|
||||
|
||||
def event(self, e):
|
||||
if e.type() == e.StatusTip:
|
||||
txt = unicode(e.tip()) or self.default_msg
|
||||
@ -742,12 +752,12 @@ class TOCView(QWidget): # {{{
|
||||
else:
|
||||
parent = item.parent() or self.root
|
||||
idx = parent.indexOfChild(item)
|
||||
if where == 'after': idx += 1
|
||||
if where == 'after':
|
||||
idx += 1
|
||||
c = self.create_item(parent, child, idx=idx)
|
||||
self.tocw.setCurrentItem(c, 0, QItemSelectionModel.ClearAndSelect)
|
||||
self.tocw.scrollToItem(c)
|
||||
|
||||
|
||||
def create_toc(self):
|
||||
root = TOC()
|
||||
|
||||
@ -799,7 +809,7 @@ class TOCView(QWidget): # {{{
|
||||
|
||||
# }}}
|
||||
|
||||
class TOCEditor(QDialog): # {{{
|
||||
class TOCEditor(QDialog): # {{{
|
||||
|
||||
explode_done = pyqtSignal(object)
|
||||
writing_done = pyqtSignal(object)
|
||||
@ -857,6 +867,7 @@ class TOCEditor(QDialog): # {{{
|
||||
def accept(self):
|
||||
if self.stacks.currentIndex() == 2:
|
||||
self.toc_view.update_item(*self.item_edit.result)
|
||||
gprefs['toc_edit_splitter_state'] = bytearray(self.item_edit.splitter.saveState())
|
||||
self.stacks.setCurrentIndex(1)
|
||||
elif self.stacks.currentIndex() == 1:
|
||||
self.working = False
|
||||
@ -883,6 +894,7 @@ class TOCEditor(QDialog): # {{{
|
||||
if not self.bb.isEnabled():
|
||||
return
|
||||
if self.stacks.currentIndex() == 2:
|
||||
gprefs['toc_edit_splitter_state'] = bytearray(self.item_edit.splitter.saveState())
|
||||
self.stacks.setCurrentIndex(1)
|
||||
else:
|
||||
self.working = False
|
||||
@ -938,5 +950,5 @@ if __name__ == '__main__':
|
||||
d = TOCEditor(sys.argv[-1])
|
||||
d.start()
|
||||
d.exec_()
|
||||
del d # Needed to prevent sigsegv in exit cleanup
|
||||
del d # Needed to prevent sigsegv in exit cleanup
|
||||
|
||||
|
@ -15,7 +15,7 @@ from threading import Thread
|
||||
from collections import OrderedDict
|
||||
|
||||
from PyQt4.Qt import (Qt, SIGNAL, QTimer, QHelpEvent, QAction,
|
||||
QMenu, QIcon, pyqtSignal, QUrl,
|
||||
QMenu, QIcon, pyqtSignal, QUrl, QFont,
|
||||
QDialog, QSystemTrayIcon, QApplication)
|
||||
|
||||
from calibre import prints, force_unicode
|
||||
@ -47,7 +47,7 @@ from calibre.gui2.proceed import ProceedQuestion
|
||||
from calibre.gui2.dialogs.message_box import JobError
|
||||
from calibre.gui2.job_indicator import Pointer
|
||||
|
||||
class Listener(Thread): # {{{
|
||||
class Listener(Thread): # {{{
|
||||
|
||||
def __init__(self, listener):
|
||||
Thread.__init__(self)
|
||||
@ -76,7 +76,7 @@ class Listener(Thread): # {{{
|
||||
|
||||
# }}}
|
||||
|
||||
class SystemTrayIcon(QSystemTrayIcon): # {{{
|
||||
class SystemTrayIcon(QSystemTrayIcon): # {{{
|
||||
|
||||
tooltip_requested = pyqtSignal(object)
|
||||
|
||||
@ -98,7 +98,7 @@ _gui = None
|
||||
def get_gui():
|
||||
return _gui
|
||||
|
||||
class Main(MainWindow, MainWindowMixin, DeviceMixin, EmailMixin, # {{{
|
||||
class Main(MainWindow, MainWindowMixin, DeviceMixin, EmailMixin, # {{{
|
||||
TagBrowserMixin, CoverFlowMixin, LibraryViewMixin, SearchBoxMixin,
|
||||
SavedSearchBoxMixin, SearchRestrictionMixin, LayoutMixin, UpdateMixin,
|
||||
EbookDownloadMixin
|
||||
@ -187,7 +187,6 @@ class Main(MainWindow, MainWindowMixin, DeviceMixin, EmailMixin, # {{{
|
||||
else:
|
||||
stmap[st.name] = st
|
||||
|
||||
|
||||
def initialize(self, library_path, db, listener, actions, show_gui=True):
|
||||
opts = self.opts
|
||||
self.preferences_action, self.quit_action = actions
|
||||
@ -279,6 +278,7 @@ class Main(MainWindow, MainWindowMixin, DeviceMixin, EmailMixin, # {{{
|
||||
UpdateMixin.__init__(self, opts)
|
||||
|
||||
####################### Search boxes ########################
|
||||
SearchRestrictionMixin.__init__(self)
|
||||
SavedSearchBoxMixin.__init__(self)
|
||||
SearchBoxMixin.__init__(self)
|
||||
|
||||
@ -313,9 +313,8 @@ class Main(MainWindow, MainWindowMixin, DeviceMixin, EmailMixin, # {{{
|
||||
TagBrowserMixin.__init__(self, db)
|
||||
|
||||
######################### Search Restriction ##########################
|
||||
SearchRestrictionMixin.__init__(self)
|
||||
if db.prefs['gui_restriction']:
|
||||
self.apply_named_search_restriction(db.prefs['gui_restriction'])
|
||||
if db.prefs['virtual_lib_on_startup']:
|
||||
self.apply_virtual_library(db.prefs['virtual_lib_on_startup'])
|
||||
|
||||
########################### Cover Flow ################################
|
||||
|
||||
@ -339,7 +338,6 @@ class Main(MainWindow, MainWindowMixin, DeviceMixin, EmailMixin, # {{{
|
||||
if config['autolaunch_server']:
|
||||
self.start_content_server()
|
||||
|
||||
|
||||
self.keyboard_interrupt.connect(self.quit, type=Qt.QueuedConnection)
|
||||
|
||||
self.read_settings()
|
||||
@ -393,7 +391,7 @@ class Main(MainWindow, MainWindowMixin, DeviceMixin, EmailMixin, # {{{
|
||||
if not self.device_manager.is_running('Wireless Devices'):
|
||||
error_dialog(self, _('Problem starting the wireless device'),
|
||||
_('The wireless device driver did not start. '
|
||||
'It said "%s"')%message, show=True)
|
||||
'It said "%s"')%message, show=True)
|
||||
self.iactions['Connect Share'].set_smartdevice_action_state()
|
||||
|
||||
def start_content_server(self, check_started=True):
|
||||
@ -494,7 +492,7 @@ class Main(MainWindow, MainWindowMixin, DeviceMixin, EmailMixin, # {{{
|
||||
path = os.path.abspath(argv[1])
|
||||
if os.access(path, os.R_OK):
|
||||
self.iactions['Add Books'].add_filesystem_book(path)
|
||||
self.setWindowState(self.windowState() & \
|
||||
self.setWindowState(self.windowState() &
|
||||
~Qt.WindowMinimized|Qt.WindowActive)
|
||||
self.show_windows()
|
||||
self.raise_()
|
||||
@ -526,7 +524,8 @@ class Main(MainWindow, MainWindowMixin, DeviceMixin, EmailMixin, # {{{
|
||||
|
||||
def library_moved(self, newloc, copy_structure=False, call_close=True,
|
||||
allow_rebuild=False):
|
||||
if newloc is None: return
|
||||
if newloc is None:
|
||||
return
|
||||
default_prefs = None
|
||||
try:
|
||||
olddb = self.library_view.model().db
|
||||
@ -537,7 +536,8 @@ class Main(MainWindow, MainWindowMixin, DeviceMixin, EmailMixin, # {{{
|
||||
try:
|
||||
db = LibraryDatabase2(newloc, default_prefs=default_prefs)
|
||||
except (DatabaseException, sqlite.Error):
|
||||
if not allow_rebuild: raise
|
||||
if not allow_rebuild:
|
||||
raise
|
||||
import traceback
|
||||
repair = question_dialog(self, _('Corrupted database'),
|
||||
_('The library database at %s appears to be corrupted. Do '
|
||||
@ -571,8 +571,8 @@ class Main(MainWindow, MainWindowMixin, DeviceMixin, EmailMixin, # {{{
|
||||
db = self.library_view.model().db
|
||||
self.iactions['Choose Library'].count_changed(db.count())
|
||||
self.set_window_title()
|
||||
self.apply_named_search_restriction('') # reset restriction to null
|
||||
self.saved_searches_changed(recount=False) # reload the search restrictions combo box
|
||||
self.apply_named_search_restriction('') # reset restriction to null
|
||||
self.saved_searches_changed(recount=False) # reload the search restrictions combo box
|
||||
self.apply_named_search_restriction(db.prefs['gui_restriction'])
|
||||
for action in self.iactions.values():
|
||||
action.library_changed(db)
|
||||
@ -596,9 +596,19 @@ class Main(MainWindow, MainWindowMixin, DeviceMixin, EmailMixin, # {{{
|
||||
# interface later
|
||||
gc.collect()
|
||||
|
||||
|
||||
def set_window_title(self):
|
||||
self.setWindowTitle(__appname__ + u' - || %s ||'%self.iactions['Choose Library'].library_name())
|
||||
db = self.current_db
|
||||
restrictions = [x for x in (db.data.get_base_restriction_name(),
|
||||
db.data.get_search_restriction_name()) if x]
|
||||
restrictions = ' :: '.join(restrictions)
|
||||
font = QFont()
|
||||
if restrictions:
|
||||
restrictions = ' :: ' + restrictions
|
||||
font.setBold(True)
|
||||
self.virtual_library.setFont(font)
|
||||
title = u'{0} - || {1}{2} ||'.format(
|
||||
__appname__, self.iactions['Choose Library'].library_name(), restrictions)
|
||||
self.setWindowTitle(title)
|
||||
|
||||
def location_selected(self, location):
|
||||
'''
|
||||
@ -613,17 +623,15 @@ class Main(MainWindow, MainWindowMixin, DeviceMixin, EmailMixin, # {{{
|
||||
for action in self.iactions.values():
|
||||
action.location_selected(location)
|
||||
if location == 'library':
|
||||
self.search_restriction.setEnabled(True)
|
||||
self.virtual_library_menu.setEnabled(True)
|
||||
self.highlight_only_button.setEnabled(True)
|
||||
else:
|
||||
self.search_restriction.setEnabled(False)
|
||||
self.virtual_library_menu.setEnabled(False)
|
||||
self.highlight_only_button.setEnabled(False)
|
||||
# Reset the view in case something changed while it was invisible
|
||||
self.current_view().reset()
|
||||
self.set_number_of_books_shown()
|
||||
|
||||
|
||||
|
||||
def job_exception(self, job, dialog_title=_('Conversion Error')):
|
||||
if not hasattr(self, '_modeless_dialogs'):
|
||||
self._modeless_dialogs = []
|
||||
@ -715,7 +723,7 @@ class Main(MainWindow, MainWindowMixin, DeviceMixin, EmailMixin, # {{{
|
||||
self.read_layout_settings()
|
||||
|
||||
def write_settings(self):
|
||||
with gprefs: # Only write to gprefs once
|
||||
with gprefs: # Only write to gprefs once
|
||||
config.set('main_window_geometry', self.saveGeometry())
|
||||
dynamic.set('sort_history', self.library_view.model().sort_history)
|
||||
self.save_layout_state()
|
||||
@ -748,7 +756,6 @@ class Main(MainWindow, MainWindowMixin, DeviceMixin, EmailMixin, # {{{
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def shutdown(self, write_settings=True):
|
||||
try:
|
||||
db = self.library_view.model().db
|
||||
@ -808,13 +815,11 @@ class Main(MainWindow, MainWindowMixin, DeviceMixin, EmailMixin, # {{{
|
||||
pass
|
||||
QApplication.instance().quit()
|
||||
|
||||
|
||||
|
||||
def closeEvent(self, e):
|
||||
self.write_settings()
|
||||
if self.system_tray_icon.isVisible():
|
||||
if not dynamic['systray_msg'] and not isosx:
|
||||
info_dialog(self, 'calibre', 'calibre '+ \
|
||||
info_dialog(self, 'calibre', 'calibre '+
|
||||
_('will keep running in the system tray. To close it, '
|
||||
'choose <b>Quit</b> in the context menu of the '
|
||||
'system tray.'), show_copy_button=False).exec_()
|
||||
|
@ -15,16 +15,17 @@ from calibre.gui2 import choose_save_file, gprefs
|
||||
|
||||
class ImageView(QDialog):
|
||||
|
||||
def __init__(self, parent, current_img, current_url):
|
||||
def __init__(self, parent, current_img, current_url, geom_name='viewer_image_popup_geometry'):
|
||||
QDialog.__init__(self)
|
||||
dw = QApplication.instance().desktop()
|
||||
self.avail_geom = dw.availableGeometry(parent)
|
||||
self.current_img = current_img
|
||||
self.current_url = current_url
|
||||
self.factor = 1.0
|
||||
self.geom_name = geom_name
|
||||
|
||||
self.label = l = QLabel()
|
||||
l.setBackgroundRole(QPalette.Base);
|
||||
l.setBackgroundRole(QPalette.Base)
|
||||
l.setSizePolicy(QSizePolicy.Ignored, QSizePolicy.Ignored)
|
||||
l.setScaledContents(True)
|
||||
|
||||
@ -88,21 +89,27 @@ class ImageView(QDialog):
|
||||
self.label.setPixmap(pm)
|
||||
self.label.adjustSize()
|
||||
|
||||
def __call__(self):
|
||||
def __call__(self, use_exec=False):
|
||||
geom = self.avail_geom
|
||||
self.label.setPixmap(self.current_img)
|
||||
self.label.adjustSize()
|
||||
self.resize(QSize(int(geom.width()/2.5), geom.height()-50))
|
||||
geom = gprefs.get('viewer_image_popup_geometry', None)
|
||||
geom = gprefs.get(self.geom_name, None)
|
||||
if geom is not None:
|
||||
self.restoreGeometry(geom)
|
||||
self.current_image_name = unicode(self.current_url.toString()).rpartition('/')[-1]
|
||||
try:
|
||||
self.current_image_name = unicode(self.current_url.toString()).rpartition('/')[-1]
|
||||
except AttributeError:
|
||||
self.current_image_name = self.current_url
|
||||
title = _('View Image: %s')%self.current_image_name
|
||||
self.setWindowTitle(title)
|
||||
self.show()
|
||||
if use_exec:
|
||||
self.exec_()
|
||||
else:
|
||||
self.show()
|
||||
|
||||
def done(self, e):
|
||||
gprefs['viewer_image_popup_geometry'] = bytearray(self.saveGeometry())
|
||||
gprefs[self.geom_name] = bytearray(self.saveGeometry())
|
||||
return QDialog.done(self, e)
|
||||
|
||||
def wheelEvent(self, event):
|
||||
|
@ -14,7 +14,7 @@ from threading import Thread
|
||||
from calibre.utils.config import tweaks, prefs
|
||||
from calibre.utils.date import parse_date, now, UNDEFINED_DATE, clean_date_for_sort
|
||||
from calibre.utils.search_query_parser import SearchQueryParser
|
||||
from calibre.utils.pyparsing import ParseException
|
||||
from calibre.utils.search_query_parser import ParseException
|
||||
from calibre.utils.localization import (canonicalize_lang, lang_map, get_udc)
|
||||
from calibre.db.search import CONTAINS_MATCH, EQUALS_MATCH, REGEXP_MATCH, _match
|
||||
from calibre.ebooks.metadata import title_sort, author_to_author_sort
|
||||
@ -209,7 +209,8 @@ class ResultCache(SearchQueryParser): # {{{
|
||||
self._data = []
|
||||
self._map = self._map_filtered = []
|
||||
self.first_sort = True
|
||||
self.search_restriction = ''
|
||||
self.search_restriction = self.base_restriction = ''
|
||||
self.base_restriction_name = self.search_restriction_name = ''
|
||||
self.search_restriction_book_count = 0
|
||||
self.marked_ids_dict = {}
|
||||
self.field_metadata = field_metadata
|
||||
@ -365,25 +366,18 @@ class ResultCache(SearchQueryParser): # {{{
|
||||
elif query in self.local_thismonth:
|
||||
qd = now()
|
||||
field_count = 2
|
||||
elif query.endswith(self.local_daysago):
|
||||
elif query.endswith(self.local_daysago) or query.endswith(self.untrans_daysago):
|
||||
num = query[0:-self.local_daysago_len]
|
||||
try:
|
||||
qd = now() - timedelta(int(num))
|
||||
except:
|
||||
raise ParseException(query, len(query), 'Number conversion error', self)
|
||||
field_count = 3
|
||||
elif query.endswith(self.untrans_daysago):
|
||||
num = query[0:-self.untrans_daysago_len]
|
||||
try:
|
||||
qd = now() - timedelta(int(num))
|
||||
except:
|
||||
raise ParseException(query, len(query), 'Number conversion error', self)
|
||||
raise ParseException(_('Number conversion error: {0}').format(num))
|
||||
field_count = 3
|
||||
else:
|
||||
try:
|
||||
qd = parse_date(query, as_utc=False)
|
||||
except:
|
||||
raise ParseException(query, len(query), 'Date conversion error', self)
|
||||
raise ParseException(_('Date conversion error: {0}').format(query))
|
||||
if '-' in query:
|
||||
field_count = query.count('-') + 1
|
||||
else:
|
||||
@ -459,8 +453,7 @@ class ResultCache(SearchQueryParser): # {{{
|
||||
try:
|
||||
q = cast(query) * mult
|
||||
except:
|
||||
raise ParseException(query, len(query),
|
||||
'Non-numeric value in query', self)
|
||||
raise ParseException(_('Non-numeric value in query: {0}').format(query))
|
||||
|
||||
for id_ in candidates:
|
||||
item = self._data[id_]
|
||||
@ -504,8 +497,8 @@ class ResultCache(SearchQueryParser): # {{{
|
||||
if query.find(':') >= 0:
|
||||
q = [q.strip() for q in query.split(':')]
|
||||
if len(q) != 2:
|
||||
raise ParseException(query, len(query),
|
||||
'Invalid query format for colon-separated search', self)
|
||||
raise ParseException(
|
||||
_('Invalid query format for colon-separated search: {0}').format(query))
|
||||
(keyq, valq) = q
|
||||
keyq_mkind, keyq = self._matchkind(keyq)
|
||||
valq_mkind, valq = self._matchkind(valq)
|
||||
@ -654,7 +647,7 @@ class ResultCache(SearchQueryParser): # {{{
|
||||
if invert:
|
||||
matches = self.universal_set() - matches
|
||||
return matches
|
||||
raise ParseException(query, len(query), 'Recursive query group detected', self)
|
||||
raise ParseException(_('Recursive query group detected: {0}').format(query))
|
||||
|
||||
# apply the limit if appropriate
|
||||
if location == 'all' and prefs['limit_search_columns'] and \
|
||||
@ -825,8 +818,19 @@ class ResultCache(SearchQueryParser): # {{{
|
||||
return ans
|
||||
self._map_filtered = ans
|
||||
|
||||
def _build_restriction_string(self, restriction):
|
||||
if self.base_restriction:
|
||||
if restriction:
|
||||
return u'(%s) and (%s)' % (self.base_restriction, restriction)
|
||||
else:
|
||||
return self.base_restriction
|
||||
else:
|
||||
return restriction
|
||||
|
||||
def search_getting_ids(self, query, search_restriction,
|
||||
set_restriction_count=False):
|
||||
set_restriction_count=False, use_virtual_library=True):
|
||||
if use_virtual_library:
|
||||
search_restriction = self._build_restriction_string(search_restriction)
|
||||
q = ''
|
||||
if not query or not query.strip():
|
||||
q = search_restriction
|
||||
@ -847,11 +851,32 @@ class ResultCache(SearchQueryParser): # {{{
|
||||
self.search_restriction_book_count = len(rv)
|
||||
return rv
|
||||
|
||||
def get_search_restriction(self):
|
||||
return self.search_restriction
|
||||
|
||||
def set_search_restriction(self, s):
|
||||
self.search_restriction = s
|
||||
|
||||
def get_base_restriction(self):
|
||||
return self.base_restriction
|
||||
|
||||
def set_base_restriction(self, s):
|
||||
self.base_restriction = s
|
||||
|
||||
def get_base_restriction_name(self):
|
||||
return self.base_restriction_name
|
||||
|
||||
def set_base_restriction_name(self, s):
|
||||
self.base_restriction_name = s
|
||||
|
||||
def get_search_restriction_name(self):
|
||||
return self.search_restriction_name
|
||||
|
||||
def set_search_restriction_name(self, s):
|
||||
self.search_restriction_name = s
|
||||
|
||||
def search_restriction_applied(self):
|
||||
return bool(self.search_restriction)
|
||||
return bool(self.search_restriction) or bool((self.base_restriction))
|
||||
|
||||
def get_search_restriction_book_count(self):
|
||||
return self.search_restriction_book_count
|
||||
@ -1002,7 +1027,7 @@ class ResultCache(SearchQueryParser): # {{{
|
||||
if field is not None:
|
||||
self.sort(field, ascending)
|
||||
self._map_filtered = list(self._map)
|
||||
if self.search_restriction:
|
||||
if self.search_restriction or self.base_restriction:
|
||||
self.search('', return_matches=False)
|
||||
|
||||
# Sorting functions {{{
|
||||
|
@ -229,6 +229,8 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
||||
('uuid', False), ('comments', True), ('id', False), ('pubdate', False),
|
||||
('last_modified', False), ('size', False), ('languages', False),
|
||||
]
|
||||
defs['virtual_libraries'] = {}
|
||||
defs['virtual_lib_on_startup'] = defs['cs_virtual_lib_on_startup'] = ''
|
||||
|
||||
# Migrate the bool tristate tweak
|
||||
defs['bools_are_tristate'] = \
|
||||
@ -279,6 +281,24 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
||||
except:
|
||||
pass
|
||||
|
||||
# migrate the gui_restriction preference to a virtual library
|
||||
gr_pref = self.prefs.get('gui_restriction', None)
|
||||
if gr_pref:
|
||||
virt_libs = self.prefs.get('virtual_libraries', {})
|
||||
virt_libs[gr_pref] = 'search:"' + gr_pref + '"'
|
||||
self.prefs['virtual_libraries'] = virt_libs
|
||||
self.prefs['gui_restriction'] = ''
|
||||
self.prefs['virtual_lib_on_startup'] = gr_pref
|
||||
|
||||
# migrate the cs_restriction preference to a virtual library
|
||||
gr_pref = self.prefs.get('cs_restriction', None)
|
||||
if gr_pref:
|
||||
virt_libs = self.prefs.get('virtual_libraries', {})
|
||||
virt_libs[gr_pref] = 'search:"' + gr_pref + '"'
|
||||
self.prefs['virtual_libraries'] = virt_libs
|
||||
self.prefs['cs_restriction'] = ''
|
||||
self.prefs['cs_virtual_lib_on_startup'] = gr_pref
|
||||
|
||||
# Rename any user categories with names that differ only in case
|
||||
user_cats = self.prefs.get('user_categories', [])
|
||||
catmap = {}
|
||||
|
@ -205,26 +205,32 @@ class LibraryServer(ContentServer, MobileServer, XMLServer, OPDSServer, Cache,
|
||||
|
||||
def set_database(self, db):
|
||||
self.db = db
|
||||
virt_libs = db.prefs.get('virtual_libraries', {})
|
||||
sr = getattr(self.opts, 'restriction', None)
|
||||
sr = db.prefs.get('cs_restriction', '') if sr is None else sr
|
||||
self.set_search_restriction(sr)
|
||||
if sr:
|
||||
if sr in virt_libs:
|
||||
sr = virt_libs[sr]
|
||||
elif sr not in saved_searches().names():
|
||||
prints('WARNING: Content server: search restriction ',
|
||||
sr, ' does not exist')
|
||||
sr = ''
|
||||
else:
|
||||
sr = 'search:"%s"'%sr
|
||||
else:
|
||||
sr = db.prefs.get('cs_virtual_lib_on_startup', '')
|
||||
if sr:
|
||||
if sr not in virt_libs:
|
||||
prints('WARNING: Content server: virtual library ',
|
||||
sr, ' does not exist')
|
||||
sr = ''
|
||||
else:
|
||||
sr = virt_libs[sr]
|
||||
self.search_restriction = sr
|
||||
self.reset_caches()
|
||||
|
||||
def graceful(self):
|
||||
cherrypy.engine.graceful()
|
||||
|
||||
def set_search_restriction(self, restriction):
|
||||
self.search_restriction_name = restriction
|
||||
if restriction:
|
||||
if restriction not in saved_searches().names():
|
||||
prints('WARNING: Content server: search restriction ',
|
||||
restriction, ' does not exist')
|
||||
self.search_restriction = ''
|
||||
else:
|
||||
self.search_restriction = 'search:"%s"'%restriction
|
||||
else:
|
||||
self.search_restriction = ''
|
||||
self.reset_caches()
|
||||
|
||||
def setup_loggers(self):
|
||||
access_file = log_access_file
|
||||
error_file = log_error_file
|
||||
|
@ -145,10 +145,7 @@ def render_rating(rating, url_prefix, container='span', prefix=None): # {{{
|
||||
|
||||
# }}}
|
||||
|
||||
def get_category_items(category, items, restriction, datatype, prefix): # {{{
|
||||
|
||||
if category == 'search':
|
||||
items = [x for x in items if x.name != restriction]
|
||||
def get_category_items(category, items, datatype, prefix): # {{{
|
||||
|
||||
def item(i):
|
||||
templ = (u'<div title="{4}" class="category-item">'
|
||||
@ -489,8 +486,7 @@ class BrowseServer(object):
|
||||
if not cats and len(items) == 1:
|
||||
# Only one item in category, go directly to book list
|
||||
html = get_category_items(category, items,
|
||||
self.search_restriction_name, datatype,
|
||||
self.opts.url_prefix)
|
||||
datatype, self.opts.url_prefix)
|
||||
href = re.search(r'<a href="([^"]+)"', html)
|
||||
if href is not None:
|
||||
raise cherrypy.HTTPRedirect(href.group(1))
|
||||
@ -498,8 +494,7 @@ class BrowseServer(object):
|
||||
if len(items) <= self.opts.max_opds_ungrouped_items:
|
||||
script = 'false'
|
||||
items = get_category_items(category, items,
|
||||
self.search_restriction_name, datatype,
|
||||
self.opts.url_prefix)
|
||||
datatype, self.opts.url_prefix)
|
||||
else:
|
||||
getter = lambda x: unicode(getattr(x, 'sort', x.name))
|
||||
starts = set([])
|
||||
@ -588,8 +583,7 @@ class BrowseServer(object):
|
||||
|
||||
sort = self.browse_sort_categories(entries, sort)
|
||||
entries = get_category_items(category, entries,
|
||||
self.search_restriction_name, datatype,
|
||||
self.opts.url_prefix)
|
||||
datatype, self.opts.url_prefix)
|
||||
return json.dumps(entries, ensure_ascii=True)
|
||||
|
||||
|
||||
|
@ -55,10 +55,11 @@ The OPDS interface is advertised via BonJour automatically.
|
||||
help=_('Write process PID to the specified file'))
|
||||
parser.add_option('--daemonize', default=False, action='store_true',
|
||||
help='Run process in background as a daemon. No effect on windows.')
|
||||
parser.add_option('--restriction', default=None,
|
||||
help=_('Specifies a restriction to be used for this invocation. '
|
||||
parser.add_option('--restriction', '--virtual-library', default=None,
|
||||
help=_('Specifies a virtual library to be used for this invocation. '
|
||||
'This option overrides any per-library settings specified'
|
||||
' in the GUI'))
|
||||
' in the GUI. For compatibility, if the value is not a '
|
||||
'virtual library but is a saved search, that saved search is used.'))
|
||||
parser.add_option('--auto-reload', default=False, action='store_true',
|
||||
help=_('Auto reload server when source code changes. May not'
|
||||
' work in all environments.'))
|
||||
|