mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
Imported most recent upstream changes
This commit is contained in:
commit
40be364965
@ -23,6 +23,12 @@ CALIBREPLUGINS = os.path.join(CALIBRESRC, 'calibre', 'plugins')
|
||||
|
||||
sys.path.insert(0, CALIBRESRC)
|
||||
from calibre import __version__
|
||||
from calibre.parallel import PARALLEL_FUNCS
|
||||
from calibre.web.feeds.recipes import recipes
|
||||
hiddenimports = map(lambda x: x[0], PARALLEL_FUNCS.values())
|
||||
hiddenimports += ['lxml._elementpath', 'keyword', 'codeop', 'commands', 'shlex', 'pydoc']
|
||||
hiddenimports += map(lambda x: x.__module__, recipes)
|
||||
open(os.path.join(PYINSTALLER, 'hooks', 'hook-calibre.parallel.py'), 'wb').write('hiddenimports = %s'%repr(hiddenimports))
|
||||
|
||||
def run_pyinstaller(args=sys.argv):
|
||||
subprocess.check_call(('/usr/bin/sudo', 'chown', '-R', 'kovid:users', glob.glob('/usr/lib/python*/site-packages/')[-1]))
|
||||
@ -60,18 +66,7 @@ excludes = ['gtk._gtk', 'gtk.glade', 'qt', 'matplotlib.nxutils', 'matplotlib._cn
|
||||
'matplotlib._transforms', 'matplotlib._agg', 'matplotlib.backends._backend_agg',
|
||||
'matplotlib.axes', 'matplotlib', 'matplotlib.pyparsing',
|
||||
'TKinter', 'atk', 'gobject._gobject', 'pango', 'PIL', 'Image', 'IPython']
|
||||
temp = ['keyword', 'codeop']
|
||||
|
||||
recipes = ['calibre', 'web', 'feeds', 'recipes']
|
||||
prefix = '.'.join(recipes)+'.'
|
||||
recipes_toc = []
|
||||
extra_toc = [
|
||||
('keyword', '/usr/lib/python2.5/keyword.pyo', 'PYSOURCE'),
|
||||
('codeop', '/usr/lib/python2.5/codeop.pyo', 'PYSOURCE')
|
||||
]
|
||||
for f in glob.glob(os.path.join(CALIBRESRC, *(recipes+['*.py']))):
|
||||
py_compile.compile(f, doraise=True)
|
||||
recipes_toc.append((prefix + os.path.basename(f).partition('.')[0], f+'o', 'PYSOURCE'))
|
||||
|
||||
sys.path.insert(0, CALIBRESRC)
|
||||
from calibre.linux import entry_points
|
||||
@ -90,9 +85,6 @@ analyses = [Analysis([os.path.join(HOMEPATH,'support/_mountzlib.py'), os.path.jo
|
||||
pyz = TOC()
|
||||
binaries = TOC()
|
||||
|
||||
pyz += extra_toc
|
||||
pyz += recipes_toc
|
||||
|
||||
for a in analyses:
|
||||
pyz = a.pure + pyz
|
||||
binaries = a.binaries + binaries
|
||||
@ -133,7 +125,7 @@ for dirpath, dirnames, filenames in os.walk(plugdir):
|
||||
binaries += plugins
|
||||
|
||||
manifest = '/tmp/manifest'
|
||||
open(manifest, 'wb').write('\\n'.join(executables))
|
||||
open(manifest, 'wb').write('\n'.join(executables))
|
||||
version = '/tmp/version'
|
||||
open(version, 'wb').write(__version__)
|
||||
coll = COLLECT(binaries, pyz,
|
||||
|
@ -1,7 +1,7 @@
|
||||
''' E-book management software'''
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
__version__ = '0.4.76'
|
||||
__version__ = '0.4.77'
|
||||
__docformat__ = "epytext"
|
||||
__author__ = "Kovid Goyal <kovid at kovidgoyal.net>"
|
||||
__appname__ = 'calibre'
|
||||
|
@ -15,6 +15,7 @@
|
||||
# 02110-1301 USA
|
||||
######################### END LICENSE BLOCK #########################
|
||||
|
||||
|
||||
__version__ = "1.0"
|
||||
|
||||
import re
|
||||
@ -55,6 +56,9 @@ def xml_to_unicode(raw, verbose=False):
|
||||
print 'WARNING: Encoding detection confidence %d%%'%(chardet['confidence']*100)
|
||||
CHARSET_ALIASES = { "macintosh" : "mac-roman",
|
||||
"x-sjis" : "shift-jis" }
|
||||
if not encoding:
|
||||
from calibre import preferred_encoding
|
||||
encoding = preferred_encoding
|
||||
if encoding:
|
||||
encoding = encoding.lower()
|
||||
if CHARSET_ALIASES.has_key(encoding):
|
||||
|
@ -43,6 +43,7 @@
|
||||
SMALL{ font-size : 80% }
|
||||
BLOCKQUOTE{ margin-left :4em; margin-top:1em; margin-right:0.2em;}
|
||||
HR{ color : Black }
|
||||
DIV{font-family : "Times New Roman", Times, serif; text-align : justify}
|
||||
UL{margin-left: 0}
|
||||
.epigraph{width:50%; margin-left : 35%;}
|
||||
</style>
|
||||
|
@ -21,11 +21,11 @@ def get_metadata(stream):
|
||||
if comments and len(comments) > 1:
|
||||
comments = comments.p.contents[0]
|
||||
series = soup.find("sequence")
|
||||
series_name = series['name']
|
||||
# series_index = series.index
|
||||
mi = MetaInformation(title, author)
|
||||
mi.comments = comments
|
||||
mi.category = series_name
|
||||
if series:
|
||||
mi.series = series.get('name', None)
|
||||
# mi.series_index = series_index
|
||||
return mi
|
||||
|
||||
|
@ -176,8 +176,11 @@ class MobiReader(object):
|
||||
for elem in soup.findAll(['metadata', 'guide']):
|
||||
elem.extract()
|
||||
htmlfile = os.path.join(output_dir, self.name+'.html')
|
||||
for ref in guide.findAll('reference', href=True):
|
||||
ref['href'] = os.path.basename(htmlfile)+ref['href']
|
||||
try:
|
||||
for ref in guide.findAll('reference', href=True):
|
||||
ref['href'] = os.path.basename(htmlfile)+ref['href']
|
||||
except AttributeError:
|
||||
pass
|
||||
open(htmlfile, 'wb').write(unicode(soup).encode('utf8'))
|
||||
self.htmlfile = htmlfile
|
||||
|
||||
|
@ -20,11 +20,20 @@ if iswindows:
|
||||
import warnings
|
||||
warnings.simplefilter('ignore', DeprecationWarning)
|
||||
|
||||
def available_heights():
|
||||
desktop = QCoreApplication.instance().desktop()
|
||||
return map(lambda x: x.height(), map(desktop.availableGeometry, range(desktop.numScreens())))
|
||||
|
||||
def available_height():
|
||||
desktop = QCoreApplication.instance().desktop()
|
||||
return desktop.availableGeometry().height()
|
||||
|
||||
def max_available_height():
|
||||
return max(available_heights())
|
||||
|
||||
def min_available_height():
|
||||
return min(available_heights())
|
||||
|
||||
def available_width():
|
||||
desktop = QCoreApplication.instance().desktop()
|
||||
return desktop.availableGeometry().width()
|
||||
|
@ -35,6 +35,7 @@ class ConfigDialog(QDialog, Ui_Dialog):
|
||||
rn = settings.get('use roman numerals for series number', True)
|
||||
self.timeout.setValue(settings.get('network timeout', 5))
|
||||
self.roman_numerals.setChecked(rn)
|
||||
self.new_version_notification.setChecked(settings.get('new version notification', True))
|
||||
self.directory_list.addItems(dirs)
|
||||
self.connect(self.add_button, SIGNAL('clicked(bool)'), self.add_dir)
|
||||
self.connect(self.remove_button, SIGNAL('clicked(bool)'), self.remove_dir)
|
||||
@ -88,6 +89,7 @@ class ConfigDialog(QDialog, Ui_Dialog):
|
||||
def accept(self):
|
||||
settings = Settings()
|
||||
settings.set('use roman numerals for series number', bool(self.roman_numerals.isChecked()))
|
||||
settings.set('new version notification', bool(self.new_version_notification.isChecked()))
|
||||
settings.set('network timeout', int(self.timeout.value()))
|
||||
path = qstring_to_unicode(self.location.text())
|
||||
self.final_columns = [self.columns.item(i).checkState() == Qt.Checked for i in range(self.columns.count())]
|
||||
|
@ -78,6 +78,14 @@
|
||||
<number>0</number>
|
||||
</property>
|
||||
<widget class="QWidget" name="page_3" >
|
||||
<property name="geometry" >
|
||||
<rect>
|
||||
<x>0</x>
|
||||
<y>0</y>
|
||||
<width>595</width>
|
||||
<height>638</height>
|
||||
</rect>
|
||||
</property>
|
||||
<layout class="QVBoxLayout" name="verticalLayout" >
|
||||
<item>
|
||||
<layout class="QVBoxLayout" name="_2" >
|
||||
@ -124,6 +132,13 @@
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QCheckBox" name="new_version_notification" >
|
||||
<property name="text" >
|
||||
<string>Show notification when &new version is available</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<layout class="QGridLayout" name="gridLayout_2" >
|
||||
<item row="0" column="0" >
|
||||
|
@ -262,7 +262,7 @@ class MetadataSingleDialog(QDialog, Ui_MetadataSingleDialog):
|
||||
self.cover_changed = True
|
||||
self.cpixmap = pix
|
||||
except LibraryThingError, err:
|
||||
error_dialog(self, _('Could not fetch cover'), _('<b>Could not fetch cover.</b><br/>')+str(err)).exec_()
|
||||
error_dialog(self, _('Could not fetch cover'), _('<b>Could not fetch cover.</b><br/>')+unicode(err)).exec_()
|
||||
finally:
|
||||
self.fetch_cover_button.setEnabled(True)
|
||||
self.unsetCursor()
|
||||
|
@ -13,7 +13,8 @@
|
||||
<string>Configure Viewer</string>
|
||||
</property>
|
||||
<property name="windowIcon" >
|
||||
<iconset resource="../images.qrc" >:/images/config.svg</iconset>
|
||||
<iconset resource="../images.qrc" >
|
||||
<normaloff>:/images/config.svg</normaloff>:/images/config.svg</iconset>
|
||||
</property>
|
||||
<layout class="QGridLayout" >
|
||||
<item row="0" column="0" colspan="2" >
|
||||
@ -39,20 +40,17 @@
|
||||
<enum>Qt::Horizontal</enum>
|
||||
</property>
|
||||
<property name="standardButtons" >
|
||||
<set>QDialogButtonBox::Cancel|QDialogButtonBox::NoButton|QDialogButtonBox::Ok</set>
|
||||
<set>QDialogButtonBox::Cancel|QDialogButtonBox::Ok</set>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="2" column="0" colspan="2" >
|
||||
<widget class="QLabel" name="label" >
|
||||
<property name="windowModality" >
|
||||
<enum>Qt::WindowModal</enum>
|
||||
</property>
|
||||
<property name="frameShape" >
|
||||
<enum>QFrame::Box</enum>
|
||||
</property>
|
||||
<property name="text" >
|
||||
<string><b>Changes will only take affect after a restart.</string>
|
||||
<string><b>Changes will only take effect after a restart.</b></string>
|
||||
</property>
|
||||
<property name="textFormat" >
|
||||
<enum>Qt::RichText</enum>
|
||||
|
@ -20,7 +20,7 @@ from calibre.gui2 import APP_UID, warning_dialog, choose_files, error_dialog, \
|
||||
initialize_file_icon_provider, question_dialog,\
|
||||
pixmap_to_data, choose_dir, ORG_NAME, \
|
||||
set_sidebar_directories, \
|
||||
SingleApplication, Application, available_height
|
||||
SingleApplication, Application, available_height, max_available_height
|
||||
from calibre.gui2.cover_flow import CoverFlow, DatabaseImages
|
||||
from calibre.library.database import LibraryDatabase
|
||||
from calibre.gui2.update import CheckForUpdates
|
||||
@ -46,8 +46,6 @@ from calibre.ebooks.metadata import MetaInformation
|
||||
from calibre.ebooks import BOOK_EXTENSIONS
|
||||
from calibre.ebooks.lrf import preferred_source_formats as LRF_PREFERRED_SOURCE_FORMATS
|
||||
|
||||
|
||||
|
||||
class Main(MainWindow, Ui_MainWindow):
|
||||
|
||||
def set_default_thumbnail(self, height):
|
||||
@ -233,7 +231,7 @@ class Main(MainWindow, Ui_MainWindow):
|
||||
self.status_bar.cover_flow_button.disable(pictureflowerror)
|
||||
|
||||
|
||||
self.setMaximumHeight(available_height())
|
||||
self.setMaximumHeight(max_available_height())
|
||||
|
||||
####################### Setup device detection ########################
|
||||
self.detector = DeviceDetector(sleep_time=2000)
|
||||
@ -519,7 +517,7 @@ class Main(MainWindow, Ui_MainWindow):
|
||||
view = self.card_view if on_card else self.memory_view
|
||||
view.model().resort(reset=False)
|
||||
view.model().research()
|
||||
if memory[1]:
|
||||
if memory and memory[1]:
|
||||
rows = map(self.library_view.model().db.index, memory[1])
|
||||
self.library_view.model().delete_books(rows)
|
||||
|
||||
@ -634,7 +632,7 @@ class Main(MainWindow, Ui_MainWindow):
|
||||
metadata = iter(metadata)
|
||||
_files = self.library_view.model().get_preferred_formats(rows,
|
||||
self.device_manager.device_class.FORMATS, paths=True)
|
||||
files = [f.name for f in _files]
|
||||
files = [getattr(f, 'name', None) for f in _files]
|
||||
bad, good, gf, names = [], [], [], []
|
||||
for f in files:
|
||||
mi = metadata.next()
|
||||
@ -1112,7 +1110,7 @@ class Main(MainWindow, Ui_MainWindow):
|
||||
msg = ' '.join(msgs)
|
||||
print >>file, msg
|
||||
|
||||
def safe_unicode(self, arg):
|
||||
def safe_unicode(arg):
|
||||
if not arg:
|
||||
arg = unicode(repr(arg))
|
||||
if isinstance(arg, str):
|
||||
@ -1211,7 +1209,7 @@ class Main(MainWindow, Ui_MainWindow):
|
||||
device=self.device_info)))
|
||||
self.vanity.update()
|
||||
s = Settings()
|
||||
if s.get('update to version %s'%version, True):
|
||||
if s.get('new version notification', True) and s.get('update to version %s'%version, True):
|
||||
d = question_dialog(self, _('Update available'), _('%s has been updated to version %s. See the <a href="http://calibre.kovidgoyal.net/wiki/Changelog">new features</a>. Visit the download page?')%(__appname__, version))
|
||||
if d.exec_() == QMessageBox.Yes:
|
||||
url = 'http://calibre.kovidgoyal.net/download_'+('windows' if iswindows else 'osx' if isosx else 'linux')
|
||||
|
@ -7,10 +7,10 @@ See ftp://ftp.rarlabs.com/rar/unrarsrc-3.7.5.tar.gz
|
||||
"""
|
||||
import os, ctypes
|
||||
from ctypes import Structure, c_char_p, c_uint, c_void_p, POINTER, \
|
||||
byref, c_wchar_p, CFUNCTYPE, c_int, c_long, c_char, c_wchar
|
||||
byref, c_wchar_p, c_int, c_char, c_wchar
|
||||
from StringIO import StringIO
|
||||
|
||||
from calibre import iswindows, isosx, load_library
|
||||
from calibre import iswindows, load_library
|
||||
|
||||
_librar_name = 'libunrar'
|
||||
cdll = ctypes.cdll
|
||||
|
@ -360,7 +360,14 @@ def install_man_pages(fatal_errors):
|
||||
'--section', '1', '--no-info', '--include',
|
||||
f.name, '--manual', __appname__)
|
||||
manfile = os.path.join(manpath, prog+'.1'+__appname__+'.bz2')
|
||||
p = subprocess.Popen(help2man, stdout=subprocess.PIPE)
|
||||
try:
|
||||
p = subprocess.Popen(help2man, stdout=subprocess.PIPE)
|
||||
except OSError, err:
|
||||
import errno
|
||||
if err.errno != errno.ENOENT:
|
||||
raise
|
||||
print 'Failed to install MAN pages as help2man is missing from your system'
|
||||
break
|
||||
raw = re.compile(r'^\.IP\s*^([A-Z :]+)$', re.MULTILINE).sub(r'.SS\n\1', p.stdout.read())
|
||||
if not raw.strip():
|
||||
print 'Unable to create MAN page for', prog
|
||||
|
@ -4,7 +4,7 @@ __copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
|
||||
'''
|
||||
Download and install the linux binary.
|
||||
Download and install the linux binary.
|
||||
'''
|
||||
import sys, os, shutil, tarfile, subprocess, tempfile, urllib2, re, stat
|
||||
|
||||
|
@ -121,6 +121,7 @@ class WorkerMother(object):
|
||||
def __init__(self):
|
||||
ext = 'windows' if iswindows else 'osx' if isosx else 'linux'
|
||||
self.os = os # Needed incase cleanup called when interpreter is shutting down
|
||||
self.env = {}
|
||||
if iswindows:
|
||||
self.executable = os.path.join(os.path.dirname(sys.executable),
|
||||
'calibre-parallel.exe' if isfrozen else 'Scripts\\calibre-parallel.exe')
|
||||
@ -135,13 +136,14 @@ class WorkerMother(object):
|
||||
|
||||
self.prefix += 'import sys; sys.frameworks_dir = "%s"; sys.frozen = "macosx_app"; '%fd
|
||||
self.prefix += 'sys.path.insert(0, %s); '%repr(sp)
|
||||
self.env = {}
|
||||
if fd not in os.environ['PATH']:
|
||||
self.env['PATH'] = os.environ['PATH']+':'+fd
|
||||
self.env['PYTHONHOME'] = resources
|
||||
else:
|
||||
self.executable = os.path.join(getattr(sys, 'frozen_path'), 'calibre-parallel') \
|
||||
if isfrozen else 'calibre-parallel'
|
||||
if isfrozen:
|
||||
self.env['LD_LIBRARY_PATH'] = getattr(sys, 'frozen_path') + ':' + os.environ.get('LD_LIBRARY_PATH', '')
|
||||
|
||||
self.spawn_worker_windows = lambda arg : self.spawn_free_spirit_windows(arg, type='worker')
|
||||
self.spawn_worker_linux = lambda arg : self.spawn_free_spirit_linux(arg, type='worker')
|
||||
@ -176,6 +178,7 @@ class WorkerMother(object):
|
||||
def get_env(self):
|
||||
env = dict(os.environ)
|
||||
env['CALIBRE_WORKER'] = '1'
|
||||
env['ORIGWD'] = os.path.abspath(os.getcwd())
|
||||
if hasattr(self, 'env'):
|
||||
env.update(self.env)
|
||||
return env
|
||||
@ -189,7 +192,8 @@ class WorkerMother(object):
|
||||
|
||||
def spawn_free_spirit_linux(self, arg, type='free_spirit'):
|
||||
cmdline = [self.executable, arg]
|
||||
child = WorkerStatus(subprocess.Popen(cmdline, env=self.get_env()))
|
||||
child = WorkerStatus(subprocess.Popen(cmdline,
|
||||
env=self.get_env(), cwd=getattr(sys, 'frozen_path', None)))
|
||||
atexit.register(self.cleanup_child_linux, child)
|
||||
return child
|
||||
|
||||
@ -341,8 +345,11 @@ class Overseer(object):
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
self.os.kill(self.worker_pid, self.signal.SIGKILL)
|
||||
time.sleep(0.05)
|
||||
try:
|
||||
self.os.kill(self.worker_pid, self.signal.SIGKILL)
|
||||
time.sleep(0.5)
|
||||
finally:
|
||||
self.worker_status.kill()
|
||||
except:
|
||||
pass
|
||||
|
||||
@ -604,7 +611,7 @@ class BufferedSender(object):
|
||||
self.wbuf.append(msg)
|
||||
|
||||
def send(self):
|
||||
if select([self.socket], [], [], 0)[0]:
|
||||
if callable(select) and select([self.socket], [], [], 0)[0]:
|
||||
msg = read(self.socket)
|
||||
if msg == 'PING:':
|
||||
write(self.socket, 'OK')
|
||||
|
115
src/calibre/trac/bzr_commit_plugin.py
Normal file
115
src/calibre/trac/bzr_commit_plugin.py
Normal file
@ -0,0 +1,115 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
|
||||
'''
|
||||
Plugin to make the commit command automatically close bugs when the commit
|
||||
message contains `Fix #number` or `Implement #number`. Also updates the commit
|
||||
message with the summary of the closed bug. It also set the `--fixes` metadata
|
||||
appropriately. Currently only works with a Trac bug repository with the XMLRPC
|
||||
plugin enabled.
|
||||
|
||||
To use copy this file into `~/.bazaar/plugins` and add the following to branch.conf
|
||||
in the working tree you want to use it with::
|
||||
|
||||
trac_reponame_url = <url>
|
||||
trac_reponame_username = <username>
|
||||
trac_reponame_password = <password>
|
||||
|
||||
'''
|
||||
import os, re, xmlrpclib
|
||||
from bzrlib.builtins import cmd_commit as _cmd_commit, tree_files
|
||||
from bzrlib import branch
|
||||
import bzrlib
|
||||
|
||||
|
||||
class cmd_commit(_cmd_commit):
|
||||
|
||||
@classmethod
|
||||
def trac_url(self, username, password, url):
|
||||
return url.replace('//', '//%s:%s@'%(username, password))+'/login/xmlrpc'
|
||||
|
||||
def get_trac_summary(self, bug, url):
|
||||
server = xmlrpclib.ServerProxy(url)
|
||||
try:
|
||||
attributes = server.ticket.get(int(bug))[-1]
|
||||
return attributes['summary']
|
||||
except:
|
||||
raise
|
||||
pass
|
||||
|
||||
|
||||
def expand_bug(self, msg, nick, config, bug_tracker, type='trac'):
|
||||
prefix = '%s_%s_'%(type, nick)
|
||||
username = config.get_user_option(prefix+'username')
|
||||
password = config.get_user_option(prefix+'password')
|
||||
close_bug = config.get_user_option(prefix+'pattern')
|
||||
if close_bug is None:
|
||||
close_bug = r'(Fix|Implement)\s+#(\d+)'
|
||||
close_bug_pat = re.compile(close_bug, re.IGNORECASE)
|
||||
match = close_bug_pat.search(msg)
|
||||
if not match:
|
||||
return msg, None, None, None
|
||||
action, bug = match.group(1), match.group(2)
|
||||
summary = ''
|
||||
if type == 'trac':
|
||||
url = self.trac_url(username, password, bug_tracker)
|
||||
summary = self.get_trac_summary(bug, url)
|
||||
if summary:
|
||||
msg = msg.replace('#%s'%bug, '#%s (%s)'%(bug, summary))
|
||||
return msg, bug, url, action
|
||||
|
||||
|
||||
def get_bugtracker(self, basedir, type='trac'):
|
||||
config = os.path.join(basedir, '.bzr', 'branch', 'branch.conf')
|
||||
bugtracker, nick = None, None
|
||||
if os.access(config, os.R_OK):
|
||||
for line in open(config).readlines():
|
||||
match = re.search(r'%s_(\S+)_url\s*=\s*(\S+)'%type, line)
|
||||
if match:
|
||||
nick, bugtracker = match.group(1), match.group(2)
|
||||
break
|
||||
return nick, bugtracker
|
||||
|
||||
def expand_message(self, msg, tree):
|
||||
nick, bugtracker = self.get_bugtracker(tree.basedir, type='trac')
|
||||
if not bugtracker:
|
||||
return msg
|
||||
config = branch.Branch.open(tree.basedir).get_config()
|
||||
msg, bug, url, action = self.expand_bug(msg, nick, config, bugtracker)
|
||||
|
||||
return msg, bug, url, action, nick, config
|
||||
|
||||
def run(self, message=None, file=None, verbose=False, selected_list=None,
|
||||
unchanged=False, strict=False, local=False, fixes=None,
|
||||
author=None, show_diff=False):
|
||||
if message:
|
||||
message, bug, url, action, nick, config = \
|
||||
self.expand_message(message, tree_files(selected_list)[0])
|
||||
|
||||
if nick and bug and not fixes:
|
||||
fixes = [nick+':'+bug]
|
||||
|
||||
ret = _cmd_commit.run(self, message=message, file=file, verbose=verbose,
|
||||
selected_list=selected_list, unchanged=unchanged,
|
||||
strict=strict, local=local, fixes=fixes,
|
||||
author=author, show_diff=show_diff)
|
||||
if message and bug and action and nick and config:
|
||||
self.close_bug(bug, action, url, config)
|
||||
return ret
|
||||
|
||||
def close_bug(self, bug, action, url, config):
|
||||
nick = config.get_nickname()
|
||||
suffix = config.get_user_option('bug_close_comment')
|
||||
if suffix is None:
|
||||
suffix = 'The fix will be in the next release.'
|
||||
action = action+'ed'
|
||||
msg = '%s in branch %s. %s'%(action, nick, suffix)
|
||||
server = xmlrpclib.ServerProxy(url)
|
||||
server.ticket.update(int(bug), msg,
|
||||
{'status':'closed', 'resolution':'fixed'},
|
||||
False)
|
||||
|
||||
bzrlib.commands.register_command(cmd_commit)
|
@ -1,7 +1,7 @@
|
||||
'''
|
||||
Trac Macro to generate an end use Changelog from the svn logs.
|
||||
'''
|
||||
import re, collections
|
||||
import re, collections, time
|
||||
|
||||
from bzrlib import log as blog, branch
|
||||
|
||||
@ -33,7 +33,8 @@ class ChangelogFormatter(blog.LogFormatter):
|
||||
if match:
|
||||
if self.current_entry is not None:
|
||||
self.entries.append((self.current_entry, set(self.messages)))
|
||||
self.current_entry = match.group(1)
|
||||
timestamp = r.rev.timezone + r.rev.timestamp
|
||||
self.current_entry = match.group(1) + time.strftime(' (%d %b, %Y)', time.gmtime(timestamp))
|
||||
self.messages = collections.deque()
|
||||
|
||||
else:
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
3006
src/calibre/translations/pt.po
Normal file
3006
src/calibre/translations/pt.po
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -130,7 +130,7 @@ lib.FcConfigBuildFonts.restype = c_int
|
||||
|
||||
|
||||
# Initialize the fontconfig library. This has to be done manually
|
||||
# for the OS X bundle as it has its own private fontconfig.
|
||||
# for the OS X bundle as it may have its own private fontconfig.
|
||||
if hasattr(sys, 'frameworks_dir'):
|
||||
config_dir = os.path.join(os.path.dirname(getattr(sys, 'frameworks_dir')), 'Resources', 'fonts')
|
||||
if isinstance(config_dir, unicode):
|
||||
|
3600
src/calibre/utils/pyparsing.py
Normal file
3600
src/calibre/utils/pyparsing.py
Normal file
File diff suppressed because it is too large
Load Diff
523
src/calibre/utils/search_query_parser.py
Normal file
523
src/calibre/utils/search_query_parser.py
Normal file
@ -0,0 +1,523 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
|
||||
'''
|
||||
A parser for search queries with a syntax very similar to that used by
|
||||
the Google search engine.
|
||||
|
||||
For details on the search query syntax see :class:`SearchQueryParser`.
|
||||
To use the parser, subclass :class:`SearchQueryParser` and implement the
|
||||
methods :method:`SearchQueryParser.universal_set` and
|
||||
:method:`SearchQueryParser.get_matches`. See for example :class:`Tester`.
|
||||
|
||||
If this module is run, it will perform a series of unit tests.
|
||||
'''
|
||||
|
||||
import sys, string, operator
|
||||
|
||||
from calibre.utils.pyparsing import Keyword, Group, Forward, CharsNotIn, Suppress, \
|
||||
OneOrMore, oneOf, CaselessLiteral, Optional, NoMatch
|
||||
|
||||
|
||||
class SearchQueryParser(object):
|
||||
'''
|
||||
Parses a search query.
|
||||
|
||||
A search query consists of tokens. The tokens can be combined using
|
||||
the `or`, `and` and `not` operators as well as grouped using parentheses.
|
||||
When no operator is specified between two tokens, `and` is assumed.
|
||||
|
||||
Each token is a string of the form `location:query`. `location` is a string
|
||||
from :member:`LOCATIONS`. It is optional. If it is omitted, it is assumed to
|
||||
be `all`. `query` is an arbitrary string that must not contain parentheses.
|
||||
If it contains whitespace, it should be quoted by enclosing it in `"` marks.
|
||||
|
||||
Examples::
|
||||
|
||||
* `Asimov` [search for the string "Asimov" in location `all`]
|
||||
* `comments:"This is a good book"` [search for "This is a good book" in `comments`]
|
||||
* `author:Asimov tag:unread` [search for books by Asimov that have been tagged as unread]
|
||||
* `author:Asimov or author:Hardy` [search for books by Asimov or Hardy]
|
||||
* `(author:Asimov or author:Hardy) and not tag:read` [search for unread books by Asimov or Hardy]
|
||||
'''
|
||||
|
||||
LOCATIONS = [
|
||||
'tag',
|
||||
'title',
|
||||
'author',
|
||||
'publisher',
|
||||
'series',
|
||||
'comments',
|
||||
'format',
|
||||
'all',
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def run_tests(parser, result, tests):
|
||||
failed = []
|
||||
for test in tests:
|
||||
print '\tTesting:', test[0],
|
||||
res = parser.parseString(test[0])
|
||||
if list(res.get(result, None)) == test[1]:
|
||||
print 'OK'
|
||||
else:
|
||||
print 'FAILED:', 'Expected:', test[1], 'Got:', list(res.get(result, None))
|
||||
failed.append(test[0])
|
||||
return failed
|
||||
|
||||
def __init__(self, test=False):
|
||||
self._tests_failed = False
|
||||
# Define a token
|
||||
locations = map(lambda x : CaselessLiteral(x)+Suppress(':'),
|
||||
self.LOCATIONS)
|
||||
location = NoMatch()
|
||||
for l in locations:
|
||||
location |= l
|
||||
location = Optional(location, default='all')
|
||||
word_query = CharsNotIn(string.whitespace + '()')
|
||||
quoted_query = Suppress('"')+CharsNotIn('"')+Suppress('"')
|
||||
query = quoted_query | word_query
|
||||
Token = Group(location + query).setResultsName('token')
|
||||
|
||||
if test:
|
||||
print 'Testing Token parser:'
|
||||
failed = SearchQueryParser.run_tests(Token, 'token',
|
||||
(
|
||||
('tag:asd', ['tag', 'asd']),
|
||||
('ddsä', ['all', 'ddsä']),
|
||||
('"one two"', ['all', 'one two']),
|
||||
('title:"one two"', ['title', 'one two']),
|
||||
)
|
||||
)
|
||||
|
||||
Or = Forward()
|
||||
|
||||
Parenthesis = Group(
|
||||
Suppress('(') + Or + Suppress(')')
|
||||
).setResultsName('parenthesis') | Token
|
||||
|
||||
|
||||
Not = Forward()
|
||||
Not << (Group(
|
||||
Suppress(Keyword("not", caseless=True)) + Not
|
||||
).setResultsName("not") | Parenthesis)
|
||||
|
||||
And = Forward()
|
||||
And << (Group(
|
||||
Not + Suppress(Keyword("and", caseless=True)) + And
|
||||
).setResultsName("and") | Group(
|
||||
Not + OneOrMore(~oneOf("and or") + And)
|
||||
).setResultsName("and") | Not)
|
||||
|
||||
Or << (Group(
|
||||
And + Suppress(Keyword("or", caseless=True)) + Or
|
||||
).setResultsName("or") | And)
|
||||
|
||||
if test:
|
||||
Or.validate()
|
||||
self._tests_failed = bool(failed)
|
||||
|
||||
self._parser = Or
|
||||
#self._parser.setDebug(True)
|
||||
self.parse('(tolstoy)')
|
||||
self._parser.setDebug(False)
|
||||
|
||||
|
||||
def parse(self, query):
|
||||
res = self._parser.parseString(query)[0]
|
||||
return self.evaluate(res)
|
||||
|
||||
def method(self, group_name):
|
||||
return getattr(self, 'evaluate_'+group_name)
|
||||
|
||||
def evaluate(self, parse_result):
|
||||
return self.method(parse_result.getName())(parse_result)
|
||||
|
||||
def evaluate_and(self, argument):
|
||||
return self.evaluate(argument[0]).intersection(self.evaluate(argument[1]))
|
||||
|
||||
def evaluate_or(self, argument):
|
||||
return self.evaluate(argument[0]).union(self.evaluate(argument[1]))
|
||||
|
||||
def evaluate_not(self, argument):
|
||||
return self.universal_set().difference(self.evaluate(argument[0]))
|
||||
|
||||
def evaluate_parenthesis(self, argument):
|
||||
return self.evaluate(argument[0])
|
||||
|
||||
def evaluate_token(self, argument):
|
||||
return self.get_matches(argument[0], argument[1])
|
||||
|
||||
def get_matches(self, location, query):
|
||||
'''
|
||||
Should return the set of matches for :param:'location` and :param:`query`.
|
||||
|
||||
:param:`location` is one of the items in :member:`SearchQueryParser.LOCATIONS`.
|
||||
:param:`query` is a string literal.
|
||||
'''
|
||||
return set([])
|
||||
|
||||
def universal_set(self):
|
||||
'''
|
||||
Should return the set of all matches.
|
||||
'''
|
||||
return set([])
|
||||
|
||||
class Tester(SearchQueryParser):
|
||||
|
||||
texts = {
|
||||
1: [u'Eugenie Grandet', u'Honor\xe9 de Balzac', u'manybooks.net', u'lrf'],
|
||||
2: [u'Fanny Hill', u'John Cleland', u'manybooks.net', u'lrf'],
|
||||
3: [u'Persuasion', u'Jane Austen', u'manybooks.net', u'lrf'],
|
||||
4: [u'Psmith, Journalist', u'P. G. Wodehouse', u'Some Publisher', u'lrf'],
|
||||
5: [u'The Complete Works of William Shakespeare',
|
||||
u'William Shakespeare',
|
||||
u'manybooks.net',
|
||||
u'lrf'],
|
||||
6: [u'The History of England, Volume I',
|
||||
u'David Hume',
|
||||
u'manybooks.net',
|
||||
u'lrf'],
|
||||
7: [u'Someone Comes to Town, Someone Leaves Town',
|
||||
u'Cory Doctorow',
|
||||
u'Tor Books',
|
||||
u'lrf'],
|
||||
8: [u'Stalky and Co.', u'Rudyard Kipling', u'manybooks.net', u'lrf'],
|
||||
9: [u'A Game of Thrones', u'George R. R. Martin', None, u'lrf,rar'],
|
||||
10: [u'A Clash of Kings', u'George R. R. Martin', None, u'lrf,rar'],
|
||||
11: [u'A Storm of Swords', u'George R. R. Martin', None, u'lrf,rar'],
|
||||
12: [u'Biggles - Pioneer Air Fighter', u'W. E. Johns', None, u'lrf,rtf'],
|
||||
13: [u'Biggles of the Camel Squadron',
|
||||
u'W. E. Johns',
|
||||
u'London:Thames, (1977)',
|
||||
u'lrf,rtf'],
|
||||
14: [u'A Feast for Crows', u'George R. R. Martin', None, u'lrf,rar'],
|
||||
15: [u'Cryptonomicon', u'Neal Stephenson', None, u'lrf,rar'],
|
||||
16: [u'Quicksilver', u'Neal Stephenson', None, u'lrf,zip'],
|
||||
17: [u'The Comedies of William Shakespeare',
|
||||
u'William Shakespeare',
|
||||
None,
|
||||
u'lrf'],
|
||||
18: [u'The Histories of William Shakespeare',
|
||||
u'William Shakespeare',
|
||||
None,
|
||||
u'lrf'],
|
||||
19: [u'The Tragedies of William Shakespeare',
|
||||
u'William Shakespeare',
|
||||
None,
|
||||
u'lrf'],
|
||||
20: [u'An Ideal Husband', u'Oscar Wilde', u'manybooks.net', u'lrf'],
|
||||
21: [u'Flight of the Nighthawks', u'Raymond E. Feist', None, u'lrf,rar'],
|
||||
22: [u'Into a Dark Realm', u'Raymond E. Feist', None, u'lrf,rar'],
|
||||
23: [u'The Sundering', u'Walter Jon Williams', None, u'lrf,rar'],
|
||||
24: [u'The Praxis', u'Walter Jon Williams', None, u'lrf,rar'],
|
||||
25: [u'Conventions of War', u'Walter Jon Williams', None, u'lrf,rar'],
|
||||
26: [u'Banewreaker', u'Jacqueline Carey', None, u'lrf,rar'],
|
||||
27: [u'Godslayer', u'Jacqueline Carey', None, u'lrf,rar'],
|
||||
28: [u"Kushiel's Scion", u'Jacqueline Carey', None, u'lrf,rar'],
|
||||
29: [u'Underworld', u'Don DeLillo', None, u'lrf,rar'],
|
||||
30: [u'Genghis Khan and The Making of the Modern World',
|
||||
u'Jack Weatherford',
|
||||
u'Three Rivers Press',
|
||||
u'lrf,zip'],
|
||||
31: [u'The Best and the Brightest',
|
||||
u'David Halberstam',
|
||||
u'Modern Library',
|
||||
u'lrf,zip'],
|
||||
32: [u'The Killer Angels', u'Michael Shaara', None, u'html,lrf'],
|
||||
33: [u'Band Of Brothers', u'Stephen E Ambrose', None, u'lrf,txt'],
|
||||
34: [u'The Gates of Rome', u'Conn Iggulden', None, u'lrf,rar'],
|
||||
35: [u'The Death of Kings', u'Conn Iggulden', u'Bantam Dell', u'lit,lrf'],
|
||||
36: [u'The Field of Swords', u'Conn Iggulden', None, u'lrf,rar'],
|
||||
37: [u'Masterman Ready', u'Marryat, Captain Frederick', None, u'lrf'],
|
||||
38: [u'With the Lightnings',
|
||||
u'David Drake',
|
||||
u'Baen Publishing Enterprises',
|
||||
u'lit,lrf'],
|
||||
39: [u'Lt. Leary, Commanding',
|
||||
u'David Drake',
|
||||
u'Baen Publishing Enterprises',
|
||||
u'lit,lrf'],
|
||||
40: [u'The Far Side of The Stars',
|
||||
u'David Drake',
|
||||
u'Baen Publishing Enterprises',
|
||||
u'lrf,rar'],
|
||||
41: [u'The Way to Glory',
|
||||
u'David Drake',
|
||||
u'Baen Publishing Enterprises',
|
||||
u'lrf,rar'],
|
||||
42: [u'Some Golden Harbor', u'David Drake', u'Baen Books', u'lrf,rar'],
|
||||
43: [u'Harry Potter And The Half-Blood Prince',
|
||||
u'J. K. Rowling',
|
||||
None,
|
||||
u'lrf,rar'],
|
||||
44: [u'Harry Potter and the Order of the Phoenix',
|
||||
u'J. K. Rowling',
|
||||
None,
|
||||
u'lrf,rtf'],
|
||||
45: [u'The Stars at War', u'David Weber , Steve White', None, u'lrf,rtf'],
|
||||
46: [u'The Stars at War II',
|
||||
u'Steve White',
|
||||
u'Baen Publishing Enterprises',
|
||||
u'lrf,rar'],
|
||||
47: [u'Exodus', u'Steve White,Shirley Meier', u'Baen Books', u'lrf,rar'],
|
||||
48: [u'Harry Potter and the Goblet of Fire',
|
||||
u'J. K. Rowling',
|
||||
None,
|
||||
u'lrf,rar'],
|
||||
49: [u'Harry Potter and the Prisoner of Azkaban',
|
||||
u'J. K. Rowling',
|
||||
None,
|
||||
u'lrf,rtf'],
|
||||
50: [u'Harry Potter and the Chamber of Secrets',
|
||||
u'J. K. Rowling',
|
||||
None,
|
||||
u'lit,lrf'],
|
||||
51: [u'Harry Potter and the Deathly Hallows',
|
||||
u'J.K. Rowling',
|
||||
None,
|
||||
u'lit,lrf,pdf'],
|
||||
52: [u"His Majesty's Dragon", u'Naomi Novik', None, u'lrf,rar'],
|
||||
53: [u'Throne of Jade', u'Naomi Novik', u'Del Rey', u'lit,lrf'],
|
||||
54: [u'Black Powder War', u'Naomi Novik', u'Del Rey', u'lrf,rar'],
|
||||
55: [u'War and Peace', u'Leo Tolstoy', u'gutenberg.org', u'lrf,txt'],
|
||||
56: [u'Anna Karenina', u'Leo Tolstoy', u'gutenberg.org', u'lrf,txt'],
|
||||
57: [u'A Shorter History of Rome',
|
||||
u'Eugene Lawrence,Sir William Smith',
|
||||
u'gutenberg.org',
|
||||
u'lrf,zip'],
|
||||
58: [u'The Name of the Rose', u'Umberto Eco', None, u'lrf,rar'],
|
||||
71: [u"Wind Rider's Oath", u'David Weber', u'Baen', u'lrf'],
|
||||
74: [u'Rally Cry', u'William R Forstchen', None, u'htm,lrf'],
|
||||
86: [u'Empire of Ivory', u'Naomi Novik', None, u'lrf,rar'],
|
||||
87: [u"Renegade's Magic", u'Robin Hobb', None, u'lrf,rar'],
|
||||
89: [u'Master and commander',
|
||||
u"Patrick O'Brian",
|
||||
u'Fontana,\n1971',
|
||||
u'lit,lrf'],
|
||||
91: [u'A Companion to Wolves',
|
||||
u'Sarah Monette,Elizabeth Beär',
|
||||
None,
|
||||
u'lrf,rar'],
|
||||
92: [u'The Lions of al-Rassan', u'Guy Gavriel Kay', u'Eos', u'lit,lrf'],
|
||||
93: [u'Gardens of the Moon', u'Steven Erikson', u'Tor Fantasy', u'lit,lrf'],
|
||||
95: [u'The Master and Margarita',
|
||||
u'Mikhail Bulgakov',
|
||||
u'N.Y. : Knopf, 1992.',
|
||||
u'lrf,rtf'],
|
||||
120: [u'Deadhouse Gates',
|
||||
u'Steven Erikson',
|
||||
u'London : Bantam Books, 2001.',
|
||||
u'lit,lrf'],
|
||||
121: [u'Memories of Ice', u'Steven Erikson', u'Bantam Books', u'lit,lrf'],
|
||||
123: [u'House of Chains', u'Steven Erikson', u'Bantam Books', u'lit,lrf'],
|
||||
125: [u'Midnight Tides', u'Steven Erikson', u'Bantam Books', u'lit,lrf'],
|
||||
126: [u'The Bonehunters', u'Steven Erikson', u'Bantam Press', u'lit,lrf'],
|
||||
129: [u'Guns, germs, and steel: the fates of human societies',
|
||||
u'Jared Diamond',
|
||||
u'New York : W.W. Norton, c1997.',
|
||||
u'lit,lrf'],
|
||||
136: [u'Wildcards', u'George R. R. Martin', None, u'html,lrf'],
|
||||
138: [u'Off Armageddon Reef', u'David Weber', u'Tor Books', u'lit,lrf'],
|
||||
144: [u'Atonement',
|
||||
u'Ian McEwan',
|
||||
u'New York : Nan A. Talese/Doubleday, 2002.',
|
||||
u'lrf,rar'],
|
||||
146: [u'1632', u'Eric Flint', u'Baen Books', u'lit,lrf'],
|
||||
147: [u'1633', u'David Weber,Eric Flint,Dru Blair', u'Baen', u'lit,lrf'],
|
||||
148: [u'1634: The Baltic War',
|
||||
u'David Weber,Eric Flint',
|
||||
u'Baen',
|
||||
u'lit,lrf'],
|
||||
150: [u'The Dragonbone Chair', u'Tad Williams', u'DAW Trade', u'lrf,rtf'],
|
||||
152: [u'The Little Book That Beats the Market',
|
||||
u'Joel Greenblatt',
|
||||
u'Wiley',
|
||||
u'epub,lrf'],
|
||||
153: [u'Pride of Carthage', u'David Anthony Durham', u'Anchor', u'lit,lrf'],
|
||||
154: [u'Stone of farewell',
|
||||
u'Tad Williams',
|
||||
u'New York : DAW Books, 1990.',
|
||||
u'lrf,txt'],
|
||||
166: [u'American Gods', u'Neil Gaiman', u'HarperTorch', u'lit,lrf'],
|
||||
176: [u'Pillars of the Earth',
|
||||
u'Ken Follett',
|
||||
u'New American Library',
|
||||
u'lit,lrf'],
|
||||
182: [u'The Eye of the world',
|
||||
u'Robert Jordan',
|
||||
u'New York : T. Doherty Associates, c1990.',
|
||||
u'lit,lrf'],
|
||||
188: [u'The Great Hunt', u'Robert Jordan', u'ATOM', u'lrf,zip'],
|
||||
189: [u'The Dragon Reborn', u'Robert Jordan', None, u'lit,lrf'],
|
||||
190: [u'The Shadow Rising', u'Robert Jordan', None, u'lit,lrf'],
|
||||
191: [u'The Fires of Heaven',
|
||||
u'Robert Jordan',
|
||||
u'Time Warner Books Uk',
|
||||
u'lit,lrf'],
|
||||
216: [u'Lord of chaos',
|
||||
u'Robert Jordan',
|
||||
u'New York : TOR, c1994.',
|
||||
u'lit,lrf'],
|
||||
217: [u'A Crown of Swords', u'Robert Jordan', None, u'lit,lrf'],
|
||||
236: [u'The Path of Daggers', u'Robert Jordan', None, u'lit,lrf'],
|
||||
238: [u'The Client',
|
||||
u'John Grisham',
|
||||
u'New York : Island, 1994, c1993.',
|
||||
u'lit,lrf'],
|
||||
240: [u"Winter's Heart", u'Robert Jordan', None, u'lit,lrf'],
|
||||
242: [u'In the Beginning was the Command Line',
|
||||
u'Neal Stephenson',
|
||||
None,
|
||||
u'lrf,txt'],
|
||||
249: [u'Crossroads of Twilight', u'Robert Jordan', None, u'lit,lrf'],
|
||||
251: [u'Caves of Steel', u'Isaac Asimov', u'Del Rey', u'lrf,zip'],
|
||||
253: [u"Hunter's Run",
|
||||
u'George R. R. Martin,Gardner Dozois,Daniel Abraham',
|
||||
u'Eos',
|
||||
u'lrf,rar'],
|
||||
257: [u'Knife of Dreams', u'Robert Jordan', None, u'lit,lrf'],
|
||||
258: [u'Saturday',
|
||||
u'Ian McEwan',
|
||||
u'London : Jonathan Cape, 2005.',
|
||||
u'lrf,txt'],
|
||||
259: [u'My name is Red',
|
||||
u'Orhan Pamuk; translated from the Turkish by Erda\u011f G\xf6knar',
|
||||
u'New York : Alfred A. Knopf, 2001.',
|
||||
u'lit,lrf'],
|
||||
265: [u'Harbinger', u'David Mack', u'Star Trek', u'lit,lrf'],
|
||||
267: [u'Summon the Thunder',
|
||||
u'Dayton Ward,Kevin Dilmore',
|
||||
u'Pocket Books',
|
||||
u'lit,lrf'],
|
||||
268: [u'Shalimar the Clown',
|
||||
u'Salman Rushdie',
|
||||
u'New York : Random House, 2005.',
|
||||
u'lit,lrf'],
|
||||
269: [u'Reap the Whirlwind', u'David Mack', u'Star Trek', u'lit,lrf'],
|
||||
272: [u'Mistborn', u'Brandon Sanderson', u'Tor Fantasy', u'lrf,rar'],
|
||||
273: [u'The Thousandfold Thought',
|
||||
u'R. Scott Bakker',
|
||||
u'Overlook TP',
|
||||
u'lrf,rtf'],
|
||||
276: [u'Elantris',
|
||||
u'Brandon Sanderson',
|
||||
u'New York : Tor, 2005.',
|
||||
u'lrf,rar'],
|
||||
291: [u'Sundiver',
|
||||
u'David Brin',
|
||||
u'New York : Bantam Books, 1995.',
|
||||
u'lit,lrf'],
|
||||
299: [u'Imperium', u'Robert Harris', u'Arrow', u'lrf,rar'],
|
||||
300: [u'Startide Rising', u'David Brin', u'Bantam', u'htm,lrf'],
|
||||
301: [u'The Uplift War', u'David Brin', u'Spectra', u'lit,lrf'],
|
||||
304: [u'Brightness Reef', u'David Brin', u'Orbit', u'lrf,rar'],
|
||||
305: [u"Infinity's Shore", u'David Brin', u'Spectra', u'txt'],
|
||||
306: [u"Heaven's Reach", u'David Brin', u'Spectra', u'lrf,rar'],
|
||||
325: [u"Foundation's Triumph", u'David Brin', u'Easton Press', u'lit,lrf'],
|
||||
327: [u'I am Charlotte Simmons', u'Tom Wolfe', u'Vintage', u'htm,lrf'],
|
||||
335: [u'The Currents of Space', u'Isaac Asimov', None, u'lit,lrf'],
|
||||
340: [u'The Other Boleyn Girl',
|
||||
u'Philippa Gregory',
|
||||
u'Touchstone',
|
||||
u'lit,lrf'],
|
||||
341: [u"Old Man's War", u'John Scalzi', u'Tor', u'htm,lrf'],
|
||||
342: [u'The Ghost Brigades',
|
||||
u'John Scalzi',
|
||||
u'Tor Science Fiction',
|
||||
u'html,lrf'],
|
||||
343: [u'The Last Colony', u'John Scalzi', u'Tor Books', u'html,lrf'],
|
||||
344: [u'Gossip Girl', u'Cecily von Ziegesar', u'Warner Books', u'lrf,rtf'],
|
||||
347: [u'Little Brother', u'Cory Doctorow', u'Tor Teen', u'lrf'],
|
||||
348: [u'The Reality Dysfunction',
|
||||
u'Peter F. Hamilton',
|
||||
u'Pan MacMillan',
|
||||
u'lit,lrf'],
|
||||
353: [u'A Thousand Splendid Suns',
|
||||
u'Khaled Hosseini',
|
||||
u'Center Point Large Print',
|
||||
u'lit,lrf'],
|
||||
354: [u'Amsterdam', u'Ian McEwan', u'Anchor', u'lrf,txt'],
|
||||
355: [u'The Neutronium Alchemist',
|
||||
u'Peter F. Hamilton',
|
||||
u'Aspect',
|
||||
u'lit,lrf'],
|
||||
356: [u'The Naked God', u'Peter F. Hamilton', u'Aspect', u'lit,lrf'],
|
||||
421: [u'A Shadow in Summer', u'Daniel Abraham', u'Tor Fantasy', u'lrf,rar'],
|
||||
427: [u'Lonesome Dove', u'Larry McMurtry', None, u'lit,lrf'],
|
||||
440: [u'Ghost', u'John Ringo', u'Baen', u'lit,lrf'],
|
||||
441: [u'Kildar', u'John Ringo', u'Baen', u'lit,lrf'],
|
||||
443: [u'Hidden Empire ', u'Kevin J. Anderson', u'Aspect', u'lrf,rar'],
|
||||
444: [u'The Gun Seller',
|
||||
u'Hugh Laurie',
|
||||
u'Washington Square Press',
|
||||
u'lrf,rar']
|
||||
}
|
||||
|
||||
tests = {
|
||||
'Dysfunction' : set([348]),
|
||||
'title:Dysfunction' : set([348]),
|
||||
'title:Dysfunction or author:Laurie': set([348, 444]),
|
||||
'(tag:txt or tag:pdf)': set([33, 258, 354, 305, 242, 51, 55, 56, 154]),
|
||||
'(tag:txt or tag:pdf) and author:Tolstoy': set([55, 56]),
|
||||
'Tolstoy txt': set([55, 56]),
|
||||
'Hamilton Amsterdam' : set([]),
|
||||
u'Beär' : set([91]),
|
||||
'dysfunc or tolstoy': set([348, 55, 56]),
|
||||
'tag:txt and not tolstoy': set([33, 258, 354, 305, 242, 154]),
|
||||
'not tag:lrf' : set([305]),
|
||||
'london:thames': set([13]),
|
||||
'publisher:london:thames': set([13]),
|
||||
'"(1977)"': set([13]),
|
||||
}
|
||||
fields = {'title':0, 'author':1, 'publisher':2, 'tag':3}
|
||||
|
||||
_universal_set = set(texts.keys())
|
||||
|
||||
def universal_set(self):
|
||||
return self._universal_set
|
||||
|
||||
def get_matches(self, location, query):
|
||||
location = location.lower()
|
||||
if location in self.fields.keys():
|
||||
getter = operator.itemgetter(self.fields[location])
|
||||
elif location == 'all':
|
||||
getter = lambda y: ''.join(x if x else '' for x in y)
|
||||
else:
|
||||
getter = lambda x: ''
|
||||
|
||||
if not query:
|
||||
return set([])
|
||||
query = query.lower()
|
||||
return set(key for key, val in self.texts.items() \
|
||||
if query and query in getattr(getter(val), 'lower', lambda : '')())
|
||||
|
||||
|
||||
|
||||
def run_tests(self):
|
||||
failed = []
|
||||
for query in self.tests.keys():
|
||||
print 'Testing query:', query,
|
||||
res = self.parse(query)
|
||||
if res != self.tests[query]:
|
||||
print 'FAILED', 'Expected:', self.tests[query], 'Got:', res
|
||||
failed.append(query)
|
||||
else:
|
||||
print 'OK'
|
||||
return failed
|
||||
|
||||
|
||||
def main(args=sys.argv):
|
||||
tester = Tester(test=True)
|
||||
failed = tester.run_tests()
|
||||
if tester._tests_failed or failed:
|
||||
print '>>>>>>>>>>>>>> Tests Failed <<<<<<<<<<<<<<<'
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
@ -9,6 +9,7 @@ __docformat__ = "restructuredtext en"
|
||||
|
||||
import logging, os, cStringIO, time, traceback, re, urlparse
|
||||
from collections import defaultdict
|
||||
from functools import partial
|
||||
|
||||
from calibre import browser, __appname__, iswindows, LoggingInterface, strftime
|
||||
from calibre.ebooks.BeautifulSoup import BeautifulSoup, NavigableString, CData, Tag
|
||||
@ -812,6 +813,14 @@ class BasicNewsRecipe(object, LoggingInterface):
|
||||
strings.append(item['alt'])
|
||||
return u''.join(strings)
|
||||
|
||||
@classmethod
|
||||
def soup(cls, raw):
|
||||
entity_replace = [(re.compile(ur'&(\S+?);'), partial(entity_to_unicode,
|
||||
exceptions=[]))]
|
||||
nmassage = list(BeautifulSoup.MARKUP_MASSAGE)
|
||||
nmassage.extend(entity_replace)
|
||||
return BeautifulSoup(raw, markupMassage=nmassage)
|
||||
|
||||
class Profile2Recipe(BasicNewsRecipe):
|
||||
'''
|
||||
Used to migrate the old news Profiles to the new Recipes. Uses the settings
|
||||
|
@ -8,7 +8,7 @@ recipes = [
|
||||
'newsweek', 'atlantic', 'economist', 'portfolio',
|
||||
'nytimes', 'usatoday', 'outlook_india', 'bbc', 'greader', 'wsj',
|
||||
'wired', 'globe_and_mail', 'smh', 'espn', 'business_week',
|
||||
'ars_technica', 'upi',
|
||||
'ars_technica', 'upi', 'new_yorker',
|
||||
]
|
||||
|
||||
import re, imp, inspect, time
|
||||
|
77
src/calibre/web/feeds/recipes/new_yorker.py
Normal file
77
src/calibre/web/feeds/recipes/new_yorker.py
Normal file
@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env python
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
|
||||
import re, time
|
||||
from calibre.web.feeds.news import BasicNewsRecipe
|
||||
from calibre.ebooks.BeautifulSoup import NavigableString
|
||||
|
||||
class NewYorker(BasicNewsRecipe):
|
||||
|
||||
title = 'The New Yorker'
|
||||
__author__ = 'Kovid Goyal'
|
||||
description = 'News and opinion'
|
||||
|
||||
remove_tags = [
|
||||
dict(name='div', id=['printoptions', 'header', 'articleBottom']),
|
||||
dict(name='div', attrs={'class':['utils', 'icons']})
|
||||
]
|
||||
|
||||
|
||||
def parse_index(self):
|
||||
toc_pat = re.compile(time.strftime(r'.+magazine/toc/%Y/%m/.+toc_%Y\d+'))
|
||||
soup = self.soup(self.browser.open('http://www.newyorker.com/').read())
|
||||
a = soup.find('a', href=toc_pat)
|
||||
if a is None:
|
||||
raise Exception('Could not find the current issue of The New Yorker')
|
||||
href = a['href']
|
||||
href = 'http://www.newyorker.com'+href[href.index('/magazine'):]
|
||||
soup = self.soup(self.browser.open(href).read())
|
||||
img = soup.find(id='inThisIssuePhoto')
|
||||
if img is not None:
|
||||
self.cover_url = 'http://www.newyorker.com'+img['src']
|
||||
alt = img.get('alt', None)
|
||||
if alt:
|
||||
self.timefmt = ' [%s]'%alt
|
||||
features = soup.findAll(attrs={'class':re.compile('feature')})
|
||||
|
||||
category, sections, articles = None, [], []
|
||||
for feature in features:
|
||||
head = feature.find('img', alt=True, attrs={'class':'featurehed'})
|
||||
if head is None:
|
||||
continue
|
||||
if articles:
|
||||
sections.append((category, articles))
|
||||
category, articles = head['alt'], []
|
||||
if category in ('', 'AUDIO', 'VIDEO', 'BLOGS', 'GOINGS ON'):
|
||||
continue
|
||||
|
||||
for a in feature.findAll('a', href=True):
|
||||
href = 'http://www.newyorker.com'+a['href']+'?printable=true'
|
||||
title, in_title, desc = '', True, ''
|
||||
for tag in a.contents:
|
||||
if getattr(tag, 'name', None) == 'br':
|
||||
in_title = False
|
||||
continue
|
||||
if isinstance(tag, NavigableString):
|
||||
text = unicode(tag)
|
||||
if in_title:
|
||||
title += text
|
||||
else:
|
||||
desc += text
|
||||
if title and not 'Audio:' in title:
|
||||
art = {
|
||||
'title': title,
|
||||
'desc': desc, 'content':'',
|
||||
'url': href,
|
||||
'date': time.strftime('%a, %d %b', time.localtime()),
|
||||
}
|
||||
articles.append(art)
|
||||
|
||||
# from IPython.Shell import IPShellEmbed
|
||||
# ipshell = IPShellEmbed()
|
||||
# ipshell()
|
||||
# raise Exception()
|
||||
|
||||
return sections
|
@ -167,6 +167,8 @@ def curl_upload_file(stream, url):
|
||||
|
||||
|
||||
def upload_installer(name):
|
||||
if not os.path.exists(name):
|
||||
return
|
||||
bname = os.path.basename(name)
|
||||
pat = re.compile(bname.replace(__version__, r'\d+\.\d+\.\d+'))
|
||||
for f in curl_list_dir():
|
||||
|
@ -539,7 +539,7 @@ def main():
|
||||
sys.argv[1:2] = ['py2exe']
|
||||
|
||||
console = [dict(dest_base=basenames['console'][i], script=scripts['console'][i])
|
||||
for i in range(len(scripts['console']))]# if not 'parallel.py' in scripts['console'][i] ]
|
||||
for i in range(len(scripts['console']))]
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'src'))
|
||||
setup(
|
||||
cmdclass = {'py2exe': BuildEXE},
|
||||
@ -568,7 +568,8 @@ def main():
|
||||
'calibre.ebooks.lrf.feeds.*',
|
||||
'lxml', 'lxml._elementpath', 'genshi',
|
||||
'path', 'pydoc', 'IPython.Extensions.*',
|
||||
'calibre.web.feeds.recipes.*', 'PyQt4.QtWebKit',
|
||||
'calibre.web.feeds.recipes.*',
|
||||
'PyQt4.QtWebKit', 'PyQt4.QtNetwork',
|
||||
],
|
||||
'packages' : ['PIL'],
|
||||
'excludes' : ["Tkconstants", "Tkinter", "tcl",
|
||||
|
Loading…
x
Reference in New Issue
Block a user