Sync with main branch

This commit is contained in:
Kovid Goyal 2008-07-18 21:02:05 -07:00
commit 1dc3d6ebdf
52 changed files with 17196 additions and 2844 deletions

View File

@ -19,3 +19,4 @@ src/calibre/gui2/pictureflow/debug/
src/calibre/gui2/pictureflow/pictureflow_resource.rc src/calibre/gui2/pictureflow/pictureflow_resource.rc
src/calibre/gui2/pictureflow/release/ src/calibre/gui2/pictureflow/release/
src/calibre/translations/compiled.py src/calibre/translations/compiled.py
installer/windows/calibre/build.log

View File

@ -0,0 +1,46 @@
#!/usr/bin/env python
__license__ = 'GPL v3'
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
__docformat__ = 'restructuredtext en'
'''
'''
import sys, time, subprocess, os
from calibre import __appname__, __version__
cmdline = [
'/usr/local/installjammer/installjammer',
'--build-dir', '/tmp/calibre-installjammer',
'-DAppName', __appname__,
'-DShortAppName', __appname__,
'-DApplicationURL', 'http://%s.kovidgoyal.net'%__appname__,
'-DCopyright', time.strftime('%Y Kovid Goyal'),
'-DPackageDescription', '%s is an e-book library manager. It can view, convert and catalog e-books in most of the major e-book formats. It can also talk to a few e-book reader devices. It can go out to the internet and fetch metadata for your books. It can download newspapers and convert them into e-books for convenient reading.'%__appname__,
'-DPackageSummary', '%s: E-book library management'%__appname__,
'-DVersion', __version__,
'-DInstallVersion', __version__ + '.0',
'-DLicense', open(os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'LICENSE')).read().replace('\n', '\r\n'),
'--output-dir', os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'dist'),
'--platform', 'Windows',
]
def run_install_jammer(installer_name='<%AppName%>-<%Version%><%Ext%>', build_for_release=True):
global cmdline
mpi = os.path.abspath(os.path.join(os.path.dirname(__file__), 'calibre', 'calibre.mpi'))
cmdline.extend(['-DWindows,Executable', installer_name])
compression = 'zlib'
if build_for_release:
cmdline += ['--build-for-release']
compression = 'lzma (solid)'
cmdline += ['-DCompressionMethod', compression]
cmdline += ['--build', mpi]
#print 'Running installjammer with cmdline:'
#print cmdline
subprocess.check_call(cmdline)
def main(args=sys.argv):
run_install_jammer(build_for_release=False)
return 0
if __name__ == '__main__':
sys.exit(main())

File diff suppressed because it is too large Load Diff

207
installer/windows/freeze.py Normal file
View File

@ -0,0 +1,207 @@
#!/usr/bin/env python
__license__ = 'GPL v3'
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
__docformat__ = 'restructuredtext en'
'''
Freeze app into executable using py2exe.
'''
QT_DIR = 'C:\\Qt\\4.4.0'
DEVCON = 'C:\\devcon\\i386\\devcon.exe'
LIBUSB_DIR = 'C:\\libusb'
LIBUNRAR = 'C:\\Program Files\\UnrarDLL\\unrar.dll'
CLIT = 'C:\\clit\\clit.exe'
PDFTOHTML = 'C:\\pdftohtml\\pdftohtml.exe'
IMAGEMAGICK_DIR = 'C:\\ImageMagick'
FONTCONFIG_DIR = 'C:\\fontconfig'
import sys, os, py2exe, shutil, zipfile, glob, subprocess
from distutils.core import setup
from distutils.filelist import FileList
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
sys.path.insert(0, BASE_DIR)
from setup import VERSION, APPNAME, entry_points, scripts, basenames
sys.path.remove(BASE_DIR)
PY2EXE_DIR = os.path.join(BASE_DIR, 'build','py2exe')
class BuildEXE(py2exe.build_exe.py2exe):
manifest_resource_id = 0
MANIFEST_TEMPLATE = '''
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<assemblyIdentity version="%(version)s"
processorArchitecture="x86"
name="net.kovidgoyal.%(prog)s"
type="win32"
/>
<description>Ebook management application</description>
<!-- Identify the application security requirements. -->
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v2">
<security>
<requestedPrivileges>
<requestedExecutionLevel
level="asInvoker"
uiAccess="false"/>
</requestedPrivileges>
</security>
</trustInfo>
</assembly>
'''
def build_plugins(self):
cwd = os.getcwd()
dd = os.path.join(cwd, self.dist_dir)
try:
os.chdir(os.path.join('src', 'calibre', 'gui2', 'pictureflow'))
if os.path.exists('.build'):
shutil.rmtree('.build')
os.mkdir('.build')
os.chdir('.build')
subprocess.check_call(['qmake', '../pictureflow.pro'])
subprocess.check_call(['mingw32-make', '-f', 'Makefile.Release'])
shutil.copyfile('release\\pictureflow0.dll', os.path.join(dd, 'pictureflow0.dll'))
os.chdir('..\\PyQt')
if not os.path.exists('.build'):
os.mkdir('.build')
os.chdir('.build')
subprocess.check_call(['python', '..\\configure.py'])
subprocess.check_call(['mingw32-make', '-f', 'Makefile'])
shutil.copyfile('pictureflow.pyd', os.path.join(dd, 'pictureflow.pyd'))
os.chdir('..')
shutil.rmtree('.build', True)
os.chdir('..')
shutil.rmtree('.build', True)
finally:
os.chdir(cwd)
def run(self):
if not os.path.exists(self.dist_dir):
os.makedirs(self.dist_dir)
print 'Building custom plugins...'
self.build_plugins()
py2exe.build_exe.py2exe.run(self)
qtsvgdll = None
for other in self.other_depends:
if 'qtsvg4.dll' in other.lower():
qtsvgdll = other
break
shutil.copyfile('LICENSE', os.path.join(self.dist_dir, 'LICENSE'))
print
if qtsvgdll:
print 'Adding', qtsvgdll
shutil.copyfile(qtsvgdll, os.path.join(self.dist_dir, os.path.basename(qtsvgdll)))
qtxmldll = os.path.join(os.path.dirname(qtsvgdll), 'QtXml4.dll')
print 'Adding', qtxmldll
shutil.copyfile(qtxmldll,
os.path.join(self.dist_dir, os.path.basename(qtxmldll)))
print 'Adding plugins...',
qt_prefix = QT_DIR
if qtsvgdll:
qt_prefix = os.path.dirname(os.path.dirname(qtsvgdll))
plugdir = os.path.join(qt_prefix, 'plugins')
for d in ('imageformats', 'codecs', 'iconengines'):
print d,
imfd = os.path.join(plugdir, d)
tg = os.path.join(self.dist_dir, d)
if os.path.exists(tg):
shutil.rmtree(tg)
shutil.copytree(imfd, tg)
print
print 'Adding main scripts'
f = zipfile.ZipFile(os.path.join(PY2EXE_DIR, 'library.zip'), 'a', zipfile.ZIP_DEFLATED)
for i in scripts['console'] + scripts['gui']:
f.write(i, i.partition('\\')[-1])
f.close()
print
print 'Adding third party dependencies'
print '\tAdding devcon'
tdir = os.path.join(PY2EXE_DIR, 'driver')
os.makedirs(tdir)
for pat in ('*.dll', '*.sys', '*.cat', '*.inf'):
for f in glob.glob(os.path.join(LIBUSB_DIR, pat)):
shutil.copyfile(f, os.path.join(tdir, os.path.basename(f)))
shutil.copyfile(DEVCON, os.path.join(tdir, os.path.basename(DEVCON)))
print '\tAdding unrar'
shutil.copyfile(LIBUNRAR, os.path.join(PY2EXE_DIR, os.path.basename(LIBUNRAR)))
print '\tAdding ConvertLIT'
shutil.copyfile(CLIT, os.path.join(PY2EXE_DIR, os.path.basename(CLIT)))
print '\tAdding pdftohtml'
shutil.copyfile(PDFTOHTML, os.path.join(PY2EXE_DIR, os.path.basename(PDFTOHTML)))
print '\tAdding ImageMagick'
shutil.copytree(IMAGEMAGICK_DIR, os.path.join(PY2EXE_DIR, 'ImageMagick'))
print '\tCopying fontconfig'
for f in glob.glob(os.path.join(FONTCONFIG_DIR, '*')):
tgt = os.path.join(PY2EXE_DIR, os.path.basename(f))
if os.path.isdir(f):
shutil.copytree(f, tgt)
else:
shutil.copyfile(f, tgt)
print
print 'Doing DLL redirection' # See http://msdn.microsoft.com/en-us/library/ms682600(VS.85).aspx
for f in glob.glob(os.path.join(PY2EXE_DIR, '*.exe')):
open(f + '.local', 'w').write('\n')
@classmethod
def manifest(cls, prog):
cls.manifest_resource_id += 1
return (24, cls.manifest_resource_id,
cls.MANIFEST_TEMPLATE % dict(prog=prog, version=VERSION+'.0'))
def main(args=sys.argv):
sys.argv[1:2] = ['py2exe']
if os.path.exists(PY2EXE_DIR):
shutil.rmtree(PY2EXE_DIR)
console = [dict(dest_base=basenames['console'][i], script=scripts['console'][i])
for i in range(len(scripts['console']))]
setup(
cmdclass = {'py2exe': BuildEXE},
windows = [
{'script' : scripts['gui'][0],
'dest_base' : APPNAME,
'icon_resources' : [(1, os.path.join(BASE_DIR, 'icons', 'library.ico'))],
'other_resources' : [BuildEXE.manifest(APPNAME)],
},
{'script' : scripts['gui'][1],
'dest_base' : 'lrfviewer',
'icon_resources' : [(1, os.path.join(BASE_DIR, 'icons', 'viewer.ico'))],
'other_resources' : [BuildEXE.manifest('lrfviewer')],
},
],
console = console,
options = { 'py2exe' : {'compressed': 1,
'optimize' : 2,
'dist_dir' : PY2EXE_DIR,
'includes' : [
'sip', 'pkg_resources', 'PyQt4.QtSvg',
'mechanize', 'ClientForm', 'wmi',
'win32file', 'pythoncom', 'rtf2xml',
'win32process', 'win32api', 'msvcrt',
'win32event', 'calibre.ebooks.lrf.any.*',
'calibre.ebooks.lrf.feeds.*',
'lxml', 'lxml._elementpath', 'genshi',
'path', 'pydoc', 'IPython.Extensions.*',
'calibre.web.feeds.recipes.*',
'PyQt4.QtWebKit', 'PyQt4.QtNetwork',
],
'packages' : ['PIL'],
'excludes' : ["Tkconstants", "Tkinter", "tcl",
"_imagingtk", "ImageTk", "FixTk"
],
'dll_excludes' : ['mswsock.dll'],
},
},
)
return 0
if __name__ == '__main__':
sys.exit(main())

View File

@ -23,6 +23,12 @@ CALIBREPLUGINS = os.path.join(CALIBRESRC, 'calibre', 'plugins')
sys.path.insert(0, CALIBRESRC) sys.path.insert(0, CALIBRESRC)
from calibre import __version__ from calibre import __version__
from calibre.parallel import PARALLEL_FUNCS
from calibre.web.feeds.recipes import recipes
hiddenimports = map(lambda x: x[0], PARALLEL_FUNCS.values())
hiddenimports += ['lxml._elementpath', 'keyword', 'codeop', 'commands', 'shlex', 'pydoc']
hiddenimports += map(lambda x: x.__module__, recipes)
open(os.path.join(PYINSTALLER, 'hooks', 'hook-calibre.parallel.py'), 'wb').write('hiddenimports = %s'%repr(hiddenimports))
def run_pyinstaller(args=sys.argv): def run_pyinstaller(args=sys.argv):
subprocess.check_call(('/usr/bin/sudo', 'chown', '-R', 'kovid:users', glob.glob('/usr/lib/python*/site-packages/')[-1])) subprocess.check_call(('/usr/bin/sudo', 'chown', '-R', 'kovid:users', glob.glob('/usr/lib/python*/site-packages/')[-1]))
@ -60,18 +66,7 @@ excludes = ['gtk._gtk', 'gtk.glade', 'qt', 'matplotlib.nxutils', 'matplotlib._cn
'matplotlib._transforms', 'matplotlib._agg', 'matplotlib.backends._backend_agg', 'matplotlib._transforms', 'matplotlib._agg', 'matplotlib.backends._backend_agg',
'matplotlib.axes', 'matplotlib', 'matplotlib.pyparsing', 'matplotlib.axes', 'matplotlib', 'matplotlib.pyparsing',
'TKinter', 'atk', 'gobject._gobject', 'pango', 'PIL', 'Image', 'IPython'] 'TKinter', 'atk', 'gobject._gobject', 'pango', 'PIL', 'Image', 'IPython']
temp = ['keyword', 'codeop']
recipes = ['calibre', 'web', 'feeds', 'recipes']
prefix = '.'.join(recipes)+'.'
recipes_toc = []
extra_toc = [
('keyword', '/usr/lib/python2.5/keyword.pyo', 'PYSOURCE'),
('codeop', '/usr/lib/python2.5/codeop.pyo', 'PYSOURCE')
]
for f in glob.glob(os.path.join(CALIBRESRC, *(recipes+['*.py']))):
py_compile.compile(f, doraise=True)
recipes_toc.append((prefix + os.path.basename(f).partition('.')[0], f+'o', 'PYSOURCE'))
sys.path.insert(0, CALIBRESRC) sys.path.insert(0, CALIBRESRC)
from calibre.linux import entry_points from calibre.linux import entry_points
@ -90,9 +85,6 @@ analyses = [Analysis([os.path.join(HOMEPATH,'support/_mountzlib.py'), os.path.jo
pyz = TOC() pyz = TOC()
binaries = TOC() binaries = TOC()
pyz += extra_toc
pyz += recipes_toc
for a in analyses: for a in analyses:
pyz = a.pure + pyz pyz = a.pure + pyz
binaries = a.binaries + binaries binaries = a.binaries + binaries

View File

@ -1,7 +1,7 @@
''' E-book management software''' ''' E-book management software'''
__license__ = 'GPL v3' __license__ = 'GPL v3'
__copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>' __copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
__version__ = '0.4.76' __version__ = '0.4.77'
__docformat__ = "epytext" __docformat__ = "epytext"
__author__ = "Kovid Goyal <kovid at kovidgoyal.net>" __author__ = "Kovid Goyal <kovid at kovidgoyal.net>"
__appname__ = 'calibre' __appname__ = 'calibre'
@ -30,6 +30,11 @@ islinux = not(iswindows or isosx)
try: try:
locale.setlocale(locale.LC_ALL, '') locale.setlocale(locale.LC_ALL, '')
except: except:
dl = locale.getdefaultlocale()
try:
if dl:
locale.setlocale(dl[0])
except:
pass pass
try: try:

View File

@ -6,13 +6,51 @@ __copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
Embedded console for debugging. Embedded console for debugging.
''' '''
import sys import sys, os, re
from calibre import OptionParser, iswindows
from calibre.libunzip import update
def option_parser():
parser = OptionParser(usage='''\
%prog [options]
Run an embedded python interpreter.
''')
parser.add_option('--update-module', help='Update the specified module in the frozen library. '+
'Module specifications are of the form full.name.of.module,path_to_module.py', default=None
)
parser.add_option('-c', help='Run python code.', default=None, dest='command')
return parser
def update_zipfile(zipfile, mod, path):
pat = re.compile(mod.replace('.', '/')+r'\.py[co]*')
name = mod.replace('.', '/') + os.path.splitext(path)[-1]
update(zipfile, [pat], [path], [name])
def update_module(mod, path):
if not hasattr(sys, 'frozen'):
raise RuntimeError('Modules can only be updated in frozen installs.')
if True or iswindows:
zp = os.path.join(os.path.dirname(sys.executable), 'library.zip')
update_zipfile(zp, mod, path)
else:
raise ValueError('Updating modules is not supported on this platform.')
def main(args=sys.argv): def main(args=sys.argv):
opts, args = option_parser().parse_args(args)
if opts.update_module:
mod, path = opts.update_module.partition(',')[0], opts.update_module.partition(',')[-1]
update_module(mod, os.path.expanduser(path))
elif opts.command:
exec opts.command
else:
from IPython.Shell import IPShellEmbed from IPython.Shell import IPShellEmbed
ipshell = IPShellEmbed() ipshell = IPShellEmbed()
ipshell() ipshell()
return 0 return 0
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -15,6 +15,7 @@
# 02110-1301 USA # 02110-1301 USA
######################### END LICENSE BLOCK ######################### ######################### END LICENSE BLOCK #########################
__version__ = "1.0" __version__ = "1.0"
import re import re
@ -55,6 +56,9 @@ def xml_to_unicode(raw, verbose=False):
print 'WARNING: Encoding detection confidence %d%%'%(chardet['confidence']*100) print 'WARNING: Encoding detection confidence %d%%'%(chardet['confidence']*100)
CHARSET_ALIASES = { "macintosh" : "mac-roman", CHARSET_ALIASES = { "macintosh" : "mac-roman",
"x-sjis" : "shift-jis" } "x-sjis" : "shift-jis" }
if not encoding:
from calibre import preferred_encoding
encoding = preferred_encoding
if encoding: if encoding:
encoding = encoding.lower() encoding = encoding.lower()
if CHARSET_ALIASES.has_key(encoding): if CHARSET_ALIASES.has_key(encoding):

View File

@ -3,15 +3,13 @@ __copyright__ = '2008, Anatoly Shipitsin <norguhtar at gmail.com>'
""" """
Convert .fb2 files to .lrf Convert .fb2 files to .lrf
""" """
import os, sys, tempfile, subprocess, shutil, logging, glob import os, sys, tempfile, shutil, logging
from base64 import b64decode
from calibre.ptempfile import PersistentTemporaryFile
from calibre.ebooks.lrf import option_parser as lrf_option_parser from calibre.ebooks.lrf import option_parser as lrf_option_parser
from calibre.ebooks.metadata.meta import get_metadata from calibre.ebooks.metadata.meta import get_metadata
from calibre.ebooks import ConversionError
from calibre.ebooks.lrf.html.convert_from import process_file as html_process_file from calibre.ebooks.lrf.html.convert_from import process_file as html_process_file
from calibre import setup_cli_handlers, __appname__ from calibre import setup_cli_handlers, __appname__
from calibre.ebooks.BeautifulSoup import BeautifulStoneSoup
from calibre.resources import fb2_xsl from calibre.resources import fb2_xsl
def option_parser(): def option_parser():
@ -22,25 +20,27 @@ _('''%prog [options] mybook.fb2
%prog converts mybook.fb2 to mybook.lrf''')) %prog converts mybook.fb2 to mybook.lrf'''))
parser.add_option('--debug-html-generation', action='store_true', default=False, parser.add_option('--debug-html-generation', action='store_true', default=False,
dest='debug_html_generation', help=_('Print generated HTML to stdout and quit.')) dest='debug_html_generation', help=_('Print generated HTML to stdout and quit.'))
parser.add_option('--keep-intermediate-files', action='store_true', default=False,
help=_('Keep generated HTML files after completing conversion to LRF.'))
return parser return parser
def extract_embedded_content(doc):
for elem in doc.xpath('./*'):
if 'binary' in elem.tag and elem.attrib.has_key('id'):
fname = elem.attrib['id']
data = b64decode(elem.text.strip())
open(fname, 'wb').write(data)
def generate_html(fb2file, encoding, logger): def generate_html(fb2file, encoding, logger):
from lxml import etree from lxml import etree
tdir = tempfile.mkdtemp(prefix=__appname__+'_') tdir = tempfile.mkdtemp(prefix=__appname__+'_fb2_')
ofile = os.path.join(tdir, 'index.xml')
cwd = os.getcwdu() cwd = os.getcwdu()
os.chdir(tdir) os.chdir(tdir)
try: try:
logger.info('Parsing XML...') logger.info('Parsing XML...')
parser = etree.XMLParser(recover=True, no_network=True) parser = etree.XMLParser(recover=True, no_network=True)
try:
doc = etree.parse(fb2file, parser) doc = etree.parse(fb2file, parser)
except: extract_embedded_content(doc)
raise
logger.info('Parsing failed. Trying to clean up XML...')
soup = BeautifulStoneSoup(open(fb2file, 'rb').read())
doc = etree.fromstring(str(soup))
logger.info('Converting XML to HTML...') logger.info('Converting XML to HTML...')
styledoc = etree.fromstring(fb2_xsl) styledoc = etree.fromstring(fb2_xsl)
@ -72,7 +72,7 @@ def process_file(path, options, logger=None):
options.output = os.path.abspath(os.path.basename(os.path.splitext(path)[0]) + ext) options.output = os.path.abspath(os.path.basename(os.path.splitext(path)[0]) + ext)
options.output = os.path.abspath(os.path.expanduser(options.output)) options.output = os.path.abspath(os.path.expanduser(options.output))
if not mi.title: if not mi.title:
mi.title = os.path.splitext(os.path.basename(rtf))[0] mi.title = os.path.splitext(os.path.basename(fb2))[0]
if (not options.title or options.title == 'Unknown'): if (not options.title or options.title == 'Unknown'):
options.title = mi.title options.title = mi.title
if (not options.author or options.author == 'Unknown') and mi.authors: if (not options.author or options.author == 'Unknown') and mi.authors:
@ -85,7 +85,7 @@ def process_file(path, options, logger=None):
html_process_file(htmlfile, options, logger) html_process_file(htmlfile, options, logger)
finally: finally:
os.chdir(cwd) os.chdir(cwd)
if hasattr(options, 'keep_intermediate_files') and options.keep_intermediate_files: if getattr(options, 'keep_intermediate_files', False):
logger.debug('Intermediate files in '+ tdir) logger.debug('Intermediate files in '+ tdir)
else: else:
shutil.rmtree(tdir) shutil.rmtree(tdir)

View File

@ -128,21 +128,40 @@
</xsl:template> </xsl:template>
<xsl:template match="fb:section"> <xsl:template match="fb:section">
<a name="TOC_{generate-id()}"></a> <xsl:variable name="section_has_title">
<xsl:choose>
<xsl:when test="./fb:title"><xsl:value-of select="generate-id()" /></xsl:when>
<xsl:otherwise>None</xsl:otherwise>
</xsl:choose>
</xsl:variable>
<xsl:if test="$section_has_title = 'None'">
<a name="TOC_{generate-id()}" />
<xsl:if test="@id"> <xsl:if test="@id">
<xsl:element name="a"> <xsl:element name="a">
<xsl:attribute name="name"><xsl:value-of select="@id"/></xsl:attribute> <xsl:attribute name="name"><xsl:value-of select="@id"/></xsl:attribute>
</xsl:element> </xsl:element>
</xsl:if> </xsl:if>
<xsl:apply-templates/> </xsl:if>
<xsl:apply-templates>
<xsl:with-param name="section_toc_id" select="$section_has_title" />
</xsl:apply-templates>
</xsl:template> </xsl:template>
<!-- section/title --> <!-- section/title -->
<xsl:template match="fb:section/fb:title|fb:poem/fb:title"> <xsl:template match="fb:section/fb:title|fb:poem/fb:title">
<xsl:param name="section_toc_id" />
<xsl:choose> <xsl:choose>
<xsl:when test="count(ancestor::node()) &lt; 9"> <xsl:when test="count(ancestor::node()) &lt; 9">
<xsl:element name="{concat('h',count(ancestor::node())-3)}"> <xsl:element name="{concat('h',count(ancestor::node())-3)}">
<xsl:if test="../@id">
<xsl:attribute name="id"><xsl:value-of select="../@id" /></xsl:attribute>
</xsl:if>
<xsl:if test="$section_toc_id != 'None'">
<xsl:element name="a">
<xsl:attribute name="name">TOC_<xsl:value-of select="$section_toc_id"/></xsl:attribute>
</xsl:element>
</xsl:if>
<a name="TOC_{generate-id()}"></a> <a name="TOC_{generate-id()}"></a>
<xsl:if test="@id"> <xsl:if test="@id">
<xsl:element name="a"> <xsl:element name="a">
@ -166,7 +185,9 @@
</xsl:template> </xsl:template>
<!-- section/title --> <!-- section/title -->
<xsl:template match="fb:body/fb:title"> <xsl:template match="fb:body/fb:title">
<h1><xsl:apply-templates mode="title"/></h1> <xsl:element name="h1">
<xsl:apply-templates mode="title"/>
</xsl:element>
</xsl:template> </xsl:template>
<xsl:template match="fb:title/fb:p"> <xsl:template match="fb:title/fb:p">

View File

@ -222,6 +222,7 @@ class HTMLConverter(object, LoggingInterface):
self.memory = [] #: Used to ensure that duplicate CSS unhandled erros are not reported self.memory = [] #: Used to ensure that duplicate CSS unhandled erros are not reported
self.tops = {} #: element representing the top of each HTML file in the LRF file self.tops = {} #: element representing the top of each HTML file in the LRF file
self.previous_text = '' #: Used to figure out when to lstrip self.previous_text = '' #: Used to figure out when to lstrip
self.stripped_space = ''
self.preserve_block_style = False #: Used so that <p> tags in <blockquote> elements are handled properly self.preserve_block_style = False #: Used so that <p> tags in <blockquote> elements are handled properly
self.avoid_page_break = False self.avoid_page_break = False
self.current_page = book.create_page() self.current_page = book.create_page()
@ -864,6 +865,10 @@ class HTMLConverter(object, LoggingInterface):
if collapse_whitespace: if collapse_whitespace:
src = re.sub(r'\s{1,}', ' ', src) src = re.sub(r'\s{1,}', ' ', src)
if self.stripped_space and len(src) == len(src.lstrip(u' \n\r\t')):
src = self.stripped_space + src
src, orig = src.rstrip(u' \n\r\t'), src
self.stripped_space = orig[len(src):]
if len(self.previous_text) != len(self.previous_text.rstrip(u' \n\r\t')): if len(self.previous_text) != len(self.previous_text.rstrip(u' \n\r\t')):
src = src.lstrip(u' \n\r\t') src = src.lstrip(u' \n\r\t')
if len(src): if len(src):

View File

@ -5,7 +5,8 @@ __copyright__ = '2008, Anatoly Shipitsin <norguhtar at gmail.com>'
'''Read meta information from fb2 files''' '''Read meta information from fb2 files'''
import sys, os import sys, os, mimetypes
from base64 import b64decode
from calibre.ebooks.BeautifulSoup import BeautifulStoneSoup from calibre.ebooks.BeautifulSoup import BeautifulStoneSoup
from calibre.ebooks.metadata import MetaInformation from calibre.ebooks.metadata import MetaInformation
@ -18,15 +19,30 @@ def get_metadata(stream):
author= [firstname+" "+lastname] author= [firstname+" "+lastname]
title = soup.find("book-title").string title = soup.find("book-title").string
comments = soup.find("annotation") comments = soup.find("annotation")
cp = soup.find('coverpage')
cdata = None
if cp:
cimage = cp.find('image', attrs={'l:href':True})
if cimage:
id = cimage['l:href'].replace('#', '')
binary = soup.find('binary', id=id, attrs={'content-type':True})
if binary:
mt = binary['content-type']
exts = mimetypes.guess_all_extensions(mt)
if not exts:
exts = ['.jpg']
cdata = (exts[0][1:], b64decode(binary.string.strip()))
if comments and len(comments) > 1: if comments and len(comments) > 1:
comments = comments.p.contents[0] comments = comments.p.contents[0]
series = soup.find("sequence") series = soup.find("sequence")
# series_index = series.index
mi = MetaInformation(title, author) mi = MetaInformation(title, author)
mi.comments = comments mi.comments = comments
mi.author_sort = lastname+'; '+firstname
if series: if series:
mi.series = series.get('name', None) mi.series = series.get('name', None)
# mi.series_index = series_index if cdata:
mi.cover_data = cdata
return mi return mi
def main(args=sys.argv): def main(args=sys.argv):

View File

@ -70,7 +70,7 @@ class TOC(list):
break break
if toc is not None: if toc is not None:
if toc.lower() != 'ncx': if toc.lower() not in ('ncx', 'ncxtoc'):
toc = urlparse(unquote(toc))[2] toc = urlparse(unquote(toc))[2]
toc = toc.replace('/', os.sep) toc = toc.replace('/', os.sep)
if not os.path.isabs(toc): if not os.path.isabs(toc):
@ -88,6 +88,10 @@ class TOC(list):
traceback.print_exc(file=sys.stdout) traceback.print_exc(file=sys.stdout)
print 'Continuing anyway' print 'Continuing anyway'
else: else:
path = opfreader.manifest.item(toc.lower())
if path and os.access(path, os.R_OK):
self.read_ncx_toc(path)
return
cwd = os.path.abspath(self.base_path) cwd = os.path.abspath(self.base_path)
m = glob.glob(os.path.join(cwd, '*.ncx')) m = glob.glob(os.path.join(cwd, '*.ncx'))
if m: if m:

View File

@ -32,8 +32,8 @@ class BitReader(object):
class HuffReader(object): class HuffReader(object):
def __init__(self, huffs, extra_flags): def __init__(self, huffs):
self.huffs, self.extra_flags = huffs, extra_flags self.huffs = huffs
if huffs[0][0:4] != 'HUFF' or huffs[0][4:8] != '\x00\x00\x00\x18': if huffs[0][0:4] != 'HUFF' or huffs[0][4:8] != '\x00\x00\x00\x18':
raise MobiError('Invalid HUFF header') raise MobiError('Invalid HUFF header')
@ -84,32 +84,10 @@ class HuffReader(object):
self._unpack(BitReader(data)) self._unpack(BitReader(data))
return self.r return self.r
def sizeof_trailing_entries(self, data):
def sizeof_trailing_entry(ptr, psize):
bitpos, result = 0, 0
while True:
v = ord(ptr[psize-1])
result |= (v & 0x7F) << bitpos
bitpos += 7
psize -= 1
if (v & 0x80) != 0 or (bitpos >= 28) or (psize == 0):
return result
num = 0
size = len(data)
flags = self.extra_flags >> 1
while flags:
if flags & 1:
num += sizeof_trailing_entry(data, size - num)
flags >>= 1
return num
def decompress(self, sections): def decompress(self, sections):
r = '' r = ''
for data in sections: for data in sections:
trail_size = self.sizeof_trailing_entries(data) r += self.unpack(data)
r += self.unpack(data[:len(data)-trail_size])
if r.endswith('#'): if r.endswith('#'):
r = r[:-1] r = r[:-1]
return r return r

View File

@ -13,7 +13,7 @@ except ImportError:
import Image as PILImage import Image as PILImage
from calibre import __appname__ from calibre import __appname__
from calibre.ebooks.BeautifulSoup import BeautifulSoup from calibre.ebooks.BeautifulSoup import BeautifulSoup, Tag
from calibre.ebooks.mobi import MobiError from calibre.ebooks.mobi import MobiError
from calibre.ebooks.mobi.huffcdic import HuffReader from calibre.ebooks.mobi.huffcdic import HuffReader
from calibre.ebooks.mobi.palmdoc import decompress_doc from calibre.ebooks.mobi.palmdoc import decompress_doc
@ -89,7 +89,7 @@ class BookHeader(object):
print '[WARNING] Unknown codepage %d. Assuming cp-1252'%self.codepage print '[WARNING] Unknown codepage %d. Assuming cp-1252'%self.codepage
self.codec = 'cp1252' self.codec = 'cp1252'
if ident == 'TEXTREAD' or self.length != 0xE4: if ident == 'TEXTREAD' or self.length < 0xE4 or 0xE8 < self.length:
self.extra_flags = 0 self.extra_flags = 0
else: else:
self.extra_flags, = struct.unpack('>L', raw[0xF0:0xF4]) self.extra_flags, = struct.unpack('>L', raw[0xF0:0xF4])
@ -165,13 +165,14 @@ class MobiReader(object):
self.processed_html = self.processed_html.decode(self.book_header.codec, 'ignore') self.processed_html = self.processed_html.decode(self.book_header.codec, 'ignore')
self.extract_images(processed_records, output_dir) self.extract_images(processed_records, output_dir)
self.replace_page_breaks() self.replace_page_breaks()
self.cleanup() self.cleanup_html()
self.processed_html = re.compile('<head>', re.IGNORECASE).sub( self.processed_html = re.compile('<head>', re.IGNORECASE).sub(
'<head>\n<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />\n', '<head>\n<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />\n',
self.processed_html) self.processed_html)
soup = BeautifulSoup(self.processed_html.replace('> <', '>\n<')) soup = BeautifulSoup(self.processed_html.replace('> <', '>\n<'))
self.cleanup_soup(soup)
guide = soup.find('guide') guide = soup.find('guide')
for elem in soup.findAll(['metadata', 'guide']): for elem in soup.findAll(['metadata', 'guide']):
elem.extract() elem.extract()
@ -192,9 +193,30 @@ class MobiReader(object):
if ncx: if ncx:
open(os.path.splitext(htmlfile)[0]+'.ncx', 'wb').write(ncx) open(os.path.splitext(htmlfile)[0]+'.ncx', 'wb').write(ncx)
def cleanup(self): def cleanup_html(self):
self.processed_html = re.sub(r'<div height="0(pt|px|ex|em|%){0,1}"></div>', '', self.processed_html) self.processed_html = re.sub(r'<div height="0(pt|px|ex|em|%){0,1}"></div>', '', self.processed_html)
def cleanup_soup(self, soup):
for tag in soup.recursiveChildGenerator():
if not isinstance(tag, Tag): continue
styles = []
try:
styles.append(tag['style'])
except KeyError:
pass
try:
styles.append('margin-top: %s' % tag['height'])
del tag['height']
except KeyError:
pass
try:
styles.append('text-indent: %s' % tag['width'])
del tag['width']
except KeyError:
pass
if styles:
tag['style'] = '; '.join(styles)
def create_opf(self, htmlfile, guide=None): def create_opf(self, htmlfile, guide=None):
mi = self.book_header.exth.mi mi = self.book_header.exth.mi
opf = OPFCreator(os.path.dirname(htmlfile), mi) opf = OPFCreator(os.path.dirname(htmlfile), mi)
@ -232,8 +254,33 @@ class MobiReader(object):
return opf return opf
def sizeof_trailing_entries(self, data):
def sizeof_trailing_entry(ptr, psize):
bitpos, result = 0, 0
while True:
v = ord(ptr[psize-1])
result |= (v & 0x7F) << bitpos
bitpos += 7
psize -= 1
if (v & 0x80) != 0 or (bitpos >= 28) or (psize == 0):
return result
num = 0
size = len(data)
flags = self.book_header.extra_flags >> 1
while flags:
if flags & 1:
num += sizeof_trailing_entry(data, size - num)
flags >>= 1
return num
def text_section(self, index):
data = self.sections[index][0]
trail_size = self.sizeof_trailing_entries(data)
return data[:len(data)-trail_size]
def extract_text(self): def extract_text(self):
text_sections = [self.sections[i][0] for i in range(1, self.book_header.records+1)] text_sections = [self.text_section(i) for i in range(1, self.book_header.records+1)]
processed_records = list(range(0, self.book_header.records+1)) processed_records = list(range(0, self.book_header.records+1))
self.mobi_html = '' self.mobi_html = ''
@ -244,7 +291,7 @@ class MobiReader(object):
self.book_header.huff_offset+self.book_header.huff_number)] self.book_header.huff_offset+self.book_header.huff_number)]
processed_records += list(range(self.book_header.huff_offset, processed_records += list(range(self.book_header.huff_offset,
self.book_header.huff_offset+self.book_header.huff_number)) self.book_header.huff_offset+self.book_header.huff_number))
huff = HuffReader(huffs, self.book_header.extra_flags) huff = HuffReader(huffs)
self.mobi_html = huff.decompress(text_sections) self.mobi_html = huff.decompress(text_sections)
elif self.book_header.compression_type == '\x00\x02': elif self.book_header.compression_type == '\x00\x02':

View File

@ -144,6 +144,7 @@ class MetadataSingleDialog(QDialog, Ui_MetadataSingleDialog):
self.edit_tags) self.edit_tags)
QObject.connect(self.remove_series_button, SIGNAL('clicked()'), QObject.connect(self.remove_series_button, SIGNAL('clicked()'),
self.remove_unused_series) self.remove_unused_series)
self.connect(self.swap_button, SIGNAL('clicked()'), self.swap_title_author)
self.timeout = float(Settings().get('network timeout', 5)) self.timeout = float(Settings().get('network timeout', 5))
self.title.setText(db.title(row)) self.title.setText(db.title(row))
isbn = db.isbn(self.id, index_is_id=True) isbn = db.isbn(self.id, index_is_id=True)
@ -193,6 +194,11 @@ class MetadataSingleDialog(QDialog, Ui_MetadataSingleDialog):
if not pm.isNull(): if not pm.isNull():
self.cover.setPixmap(pm) self.cover.setPixmap(pm)
def swap_title_author(self):
title = self.title.text()
self.title.setText(self.authors.text())
self.authors.setText(title)
self.author_sort.setText('')
def cover_dropped(self): def cover_dropped(self):
self.cover_changed = True self.cover_changed = True

View File

@ -5,8 +5,8 @@
<rect> <rect>
<x>0</x> <x>0</x>
<y>0</y> <y>0</y>
<width>668</width> <width>796</width>
<height>663</height> <height>715</height>
</rect> </rect>
</property> </property>
<property name="sizePolicy" > <property name="sizePolicy" >
@ -41,7 +41,7 @@
<property name="title" > <property name="title" >
<string>Meta information</string> <string>Meta information</string>
</property> </property>
<layout class="QGridLayout" > <layout class="QGridLayout" name="gridLayout_3" >
<item row="0" column="0" > <item row="0" column="0" >
<widget class="QLabel" name="label" > <widget class="QLabel" name="label" >
<property name="text" > <property name="text" >
@ -62,6 +62,26 @@
</property> </property>
</widget> </widget>
</item> </item>
<item rowspan="2" row="0" column="2" >
<widget class="QToolButton" name="swap_button" >
<property name="toolTip" >
<string>Swap the author and title</string>
</property>
<property name="text" >
<string>...</string>
</property>
<property name="icon" >
<iconset resource="../images.qrc" >
<normaloff>:/images/swap.svg</normaloff>:/images/swap.svg</iconset>
</property>
<property name="iconSize" >
<size>
<width>16</width>
<height>16</height>
</size>
</property>
</widget>
</item>
<item row="1" column="0" > <item row="1" column="0" >
<widget class="QLabel" name="label_2" > <widget class="QLabel" name="label_2" >
<property name="text" > <property name="text" >
@ -95,7 +115,7 @@
</property> </property>
</widget> </widget>
</item> </item>
<item row="2" column="1" > <item row="2" column="1" colspan="2" >
<widget class="QLineEdit" name="author_sort" > <widget class="QLineEdit" name="author_sort" >
<property name="toolTip" > <property name="toolTip" >
<string>Specify how the author(s) of this book should be sorted. For example Charles Dickens should be sorted as Dickens, Charles.</string> <string>Specify how the author(s) of this book should be sorted. For example Charles Dickens should be sorted as Dickens, Charles.</string>
@ -115,7 +135,7 @@
</property> </property>
</widget> </widget>
</item> </item>
<item row="3" column="1" > <item row="3" column="1" colspan="2" >
<widget class="QSpinBox" name="rating" > <widget class="QSpinBox" name="rating" >
<property name="toolTip" > <property name="toolTip" >
<string>Rating of this book. 0-5 stars</string> <string>Rating of this book. 0-5 stars</string>
@ -147,7 +167,7 @@
</property> </property>
</widget> </widget>
</item> </item>
<item row="4" column="1" > <item row="4" column="1" colspan="2" >
<widget class="QLineEdit" name="publisher" > <widget class="QLineEdit" name="publisher" >
<property name="toolTip" > <property name="toolTip" >
<string>Change the publisher of this book</string> <string>Change the publisher of this book</string>
@ -167,7 +187,7 @@
</property> </property>
</widget> </widget>
</item> </item>
<item row="5" column="1" > <item row="5" column="1" colspan="2" >
<layout class="QHBoxLayout" > <layout class="QHBoxLayout" >
<item> <item>
<widget class="QLineEdit" name="tags" > <widget class="QLineEdit" name="tags" >
@ -208,7 +228,7 @@
</property> </property>
</widget> </widget>
</item> </item>
<item row="6" column="1" > <item row="6" column="1" colspan="2" >
<layout class="QHBoxLayout" > <layout class="QHBoxLayout" >
<property name="spacing" > <property name="spacing" >
<number>5</number> <number>5</number>
@ -254,7 +274,7 @@
</item> </item>
</layout> </layout>
</item> </item>
<item row="7" column="1" > <item row="7" column="1" colspan="2" >
<widget class="QSpinBox" name="series_index" > <widget class="QSpinBox" name="series_index" >
<property name="enabled" > <property name="enabled" >
<bool>false</bool> <bool>false</bool>
@ -289,7 +309,7 @@
</property> </property>
</widget> </widget>
</item> </item>
<item row="8" column="1" > <item row="8" column="1" colspan="2" >
<widget class="QLineEdit" name="isbn" /> <widget class="QLineEdit" name="isbn" />
</item> </item>
</layout> </layout>
@ -300,11 +320,16 @@
<property name="title" > <property name="title" >
<string>Comments</string> <string>Comments</string>
</property> </property>
<layout class="QGridLayout" > <widget class="QTextEdit" name="comments" >
<item row="0" column="0" > <property name="geometry" >
<widget class="QTextEdit" name="comments" /> <rect>
</item> <x>9</x>
</layout> <y>39</y>
<width>354</width>
<height>557</height>
</rect>
</property>
</widget>
</widget> </widget>
</item> </item>
<item> <item>
@ -316,7 +341,7 @@
</item> </item>
</layout> </layout>
</widget> </widget>
<widget class="QWidget" name="" > <widget class="QWidget" name="layoutWidget" >
<layout class="QVBoxLayout" name="verticalLayout_2" > <layout class="QVBoxLayout" name="verticalLayout_2" >
<item> <item>
<widget class="QGroupBox" name="af_group_box" > <widget class="QGroupBox" name="af_group_box" >
@ -364,7 +389,7 @@
</property> </property>
<property name="icon" > <property name="icon" >
<iconset resource="../images.qrc" > <iconset resource="../images.qrc" >
<normaloff>:/images/plus.svg</normaloff>:/images/plus.svg</iconset> <normaloff>:/images/add_book.svg</normaloff>:/images/add_book.svg</iconset>
</property> </property>
<property name="iconSize" > <property name="iconSize" >
<size> <size>
@ -398,9 +423,6 @@
</item> </item>
</layout> </layout>
<zorder></zorder> <zorder></zorder>
<zorder></zorder>
<zorder></zorder>
<zorder>groupBox_4</zorder>
</widget> </widget>
</item> </item>
<item> <item>

File diff suppressed because it is too large Load Diff

After

Width:  |  Height:  |  Size: 89 KiB

View File

@ -0,0 +1,722 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://web.resource.org/cc/"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
width="128"
height="128"
id="svg2606"
sodipodi:version="0.32"
inkscape:version="0.45.1"
version="1.0"
sodipodi:docname="edit-undo.svgz"
inkscape:output_extension="org.inkscape.output.svgz.inkscape"
sodipodi:docbase="/home/david/oxygen/trunk/scalable/actions"
inkscape:export-filename="edit-undo.png"
inkscape:export-xdpi="11.25"
inkscape:export-ydpi="11.25">
<defs
id="defs2608">
<linearGradient
inkscape:collect="always"
id="linearGradient3342">
<stop
style="stop-color:#000000;stop-opacity:1;"
offset="0"
id="stop3344" />
<stop
style="stop-color:#000000;stop-opacity:0;"
offset="1"
id="stop3347" />
</linearGradient>
<linearGradient
inkscape:collect="always"
id="linearGradient3326">
<stop
style="stop-color:#000000;stop-opacity:1;"
offset="0"
id="stop3328" />
<stop
style="stop-color:#000000;stop-opacity:0;"
offset="1"
id="stop3330" />
</linearGradient>
<linearGradient
id="linearGradient3825">
<stop
style="stop-color:#ffffff;stop-opacity:0;"
offset="0"
id="stop3827" />
<stop
id="stop3833"
offset="0.5"
style="stop-color:#ffffff;stop-opacity:0.18705036;" />
<stop
style="stop-color:#ffffff;stop-opacity:1;"
offset="1"
id="stop3829" />
</linearGradient>
<linearGradient
id="linearGradient3751">
<stop
style="stop-color:#beba2c;stop-opacity:1;"
offset="0"
id="stop3753" />
<stop
style="stop-color:#b6be2c;stop-opacity:0;"
offset="1"
id="stop3755" />
</linearGradient>
<linearGradient
id="linearGradient3528">
<stop
style="stop-color:#eaf209;stop-opacity:1;"
offset="0"
id="stop3530" />
<stop
style="stop-color:#c7c634;stop-opacity:0;"
offset="1"
id="stop3532" />
</linearGradient>
<linearGradient
id="linearGradient3295">
<stop
style="stop-color:#fffe63;stop-opacity:1;"
offset="0"
id="stop3297" />
<stop
style="stop-color:#ffffff;stop-opacity:0;"
offset="1"
id="stop3299" />
</linearGradient>
<linearGradient
id="linearGradient3202">
<stop
style="stop-color:#fcff9c;stop-opacity:1;"
offset="0"
id="stop3204" />
<stop
style="stop-color:#c1a965;stop-opacity:0;"
offset="1"
id="stop3206" />
</linearGradient>
<radialGradient
id="XMLID_4_"
cx="48"
cy="-0.2148"
r="55.148"
gradientTransform="matrix(0.9792,0,0,0.9725,133.0002,20.8762)"
gradientUnits="userSpaceOnUse">
<stop
offset="0"
style="stop-color:#cfd13d;stop-opacity:1;"
id="stop3082" />
<stop
offset="1"
style="stop-color:#db8900;stop-opacity:1;"
id="stop3090" />
</radialGradient>
<linearGradient
id="linearGradient3260"
inkscape:collect="always">
<stop
id="stop3262"
offset="0"
style="stop-color:#ffffff;stop-opacity:1;" />
<stop
id="stop3264"
offset="1"
style="stop-color:#ffffff;stop-opacity:0;" />
</linearGradient>
<linearGradient
id="linearGradient3344">
<stop
id="stop3346"
offset="0"
style="stop-color:#fdff63;stop-opacity:1;" />
<stop
id="stop3348"
offset="1"
style="stop-color:#ffffff;stop-opacity:0;" />
</linearGradient>
<linearGradient
id="linearGradient3449">
<stop
id="stop3451"
offset="0"
style="stop-color:#000000;stop-opacity:1;" />
<stop
id="stop3453"
offset="1"
style="stop-color:#000000;stop-opacity:0;" />
</linearGradient>
<linearGradient
inkscape:collect="always"
xlink:href="#XMLID_4_"
id="linearGradient3516"
x1="147.09375"
y1="33.40625"
x2="8.083992"
y2="123.90625"
gradientUnits="userSpaceOnUse" />
<radialGradient
inkscape:collect="always"
xlink:href="#XMLID_4_"
id="radialGradient3524"
cx="67.09375"
cy="116.90625"
fx="67.09375"
fy="116.90625"
r="56"
gradientUnits="userSpaceOnUse" />
<radialGradient
inkscape:collect="always"
xlink:href="#linearGradient3528"
id="radialGradient3535"
cx="99.726295"
cy="27.418272"
fx="64.689766"
fy="68.231934"
r="56"
gradientUnits="userSpaceOnUse"
gradientTransform="matrix(0.693735,5.8671246e-2,-3.6242796e-2,0.4285387,33.939389,26.8809)" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3295"
id="linearGradient3548"
x1="75.09375"
y1="4.5317035"
x2="75.09375"
y2="80.172485"
gradientUnits="userSpaceOnUse" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3260"
id="linearGradient3581"
gradientUnits="userSpaceOnUse"
gradientTransform="translate(4.2161108e-3,1.9705695e-3)"
x1="75.09375"
y1="4.5317035"
x2="75.09375"
y2="80.172485" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3295"
id="linearGradient3613"
x1="208.59375"
y1="130.40625"
x2="208.59375"
y2="63.426777"
gradientUnits="userSpaceOnUse"
gradientTransform="translate(-137,0)" />
<filter
inkscape:collect="always"
id="filter3639">
<feGaussianBlur
inkscape:collect="always"
stdDeviation="1.0580524"
id="feGaussianBlur3641" />
</filter>
<radialGradient
inkscape:collect="always"
xlink:href="#linearGradient3449"
id="radialGradient3654"
cx="-10.165252"
cy="66.906013"
fx="-10.165252"
fy="66.906013"
r="59.995121"
gradientTransform="matrix(0.4582893,-2.1035589e-8,4.5903973e-8,1.0000813,20.447953,-5.1974351e-3)"
gradientUnits="userSpaceOnUse" />
<radialGradient
inkscape:collect="always"
xlink:href="#linearGradient3449"
id="radialGradient3658"
gradientUnits="userSpaceOnUse"
gradientTransform="matrix(0.2676699,0.521376,-0.7037472,0.3612977,108.99386,-36.062981)"
cx="167.67001"
cy="80.404922"
fx="167.67001"
fy="80.404922"
r="59.995121" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3260"
id="linearGradient3676"
x1="120.0625"
y1="12.569496"
x2="125.30366"
y2="14.444496"
gradientUnits="userSpaceOnUse"
spreadMethod="reflect" />
<filter
inkscape:collect="always"
x="-0.15096202"
width="1.301924"
y="-0.13732364"
height="1.2746473"
id="filter3738">
<feGaussianBlur
inkscape:collect="always"
stdDeviation="0.39257441"
id="feGaussianBlur3740" />
</filter>
<radialGradient
inkscape:collect="always"
xlink:href="#linearGradient3449"
id="radialGradient3744"
gradientUnits="userSpaceOnUse"
gradientTransform="matrix(0.982366,1.671718e-2,-3.5801148e-3,0.2103843,-18.56344,30.477792)"
cx="72.684891"
cy="48.228905"
fx="74.871155"
fy="26.862719"
r="59.995121" />
<radialGradient
inkscape:collect="always"
xlink:href="#linearGradient3751"
id="radialGradient3757"
cx="66.01458"
cy="126.69183"
fx="66.01458"
fy="126.69183"
r="59.99512"
gradientTransform="matrix(0.675025,0,0,0.3583625,19.527377,41.004647)"
gradientUnits="userSpaceOnUse" />
<radialGradient
inkscape:collect="always"
xlink:href="#linearGradient3260"
id="radialGradient3767"
cx="64.088867"
cy="7.4108429"
fx="64.088867"
fy="7.4108429"
r="59.995121"
gradientTransform="matrix(0.3093869,0,0,0.4779247,44.260611,3.8644223)"
gradientUnits="userSpaceOnUse" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3344"
id="linearGradient3771"
gradientUnits="userSpaceOnUse"
spreadMethod="reflect"
x1="120.0625"
y1="12.569496"
x2="125.30366"
y2="14.444496" />
<filter
inkscape:collect="always"
id="filter3438">
<feGaussianBlur
inkscape:collect="always"
stdDeviation="1.3342697"
id="feGaussianBlur3440" />
</filter>
<filter
inkscape:collect="always"
id="filter3630">
<feGaussianBlur
inkscape:collect="always"
stdDeviation="0.89883985"
id="feGaussianBlur3632" />
</filter>
<radialGradient
inkscape:collect="always"
xlink:href="#linearGradient3825"
id="radialGradient2361"
gradientUnits="userSpaceOnUse"
gradientTransform="matrix(0.6484284,0.1017206,-3.1257154e-2,0.1992521,-4.56257,53.15916)"
cx="-112.17241"
cy="118.60459"
fx="-113.14772"
fy="59.708473"
r="59.99512" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3326"
id="linearGradient2363"
gradientUnits="userSpaceOnUse"
x1="-151.43935"
y1="37.68198"
x2="-152.26776"
y2="57.25" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3326"
id="linearGradient2365"
gradientUnits="userSpaceOnUse"
x1="-132.51041"
y1="39.803303"
x2="-158.92462"
y2="72.881729" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3326"
id="linearGradient2367"
gradientUnits="userSpaceOnUse"
x1="-83.012932"
y1="44.753052"
x2="-158.92462"
y2="72.881729" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3342"
id="linearGradient3349"
x1="-73"
y1="105.625"
x2="-163"
y2="86.125"
gradientUnits="userSpaceOnUse" />
<filter
inkscape:collect="always"
x="-0.087741371"
width="1.1754827"
y="-0.10211017"
height="1.2042203"
id="filter3363">
<feGaussianBlur
inkscape:collect="always"
stdDeviation="3.0526685"
id="feGaussianBlur3365" />
</filter>
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3342"
id="linearGradient3372"
gradientUnits="userSpaceOnUse"
gradientTransform="translate(-40.5,-1.5)"
x1="-83.593941"
y1="137.13324"
x2="-138.0043"
y2="92.603989" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3342"
id="linearGradient3376"
gradientUnits="userSpaceOnUse"
gradientTransform="translate(-40.5,-1.5)"
x1="-61.802711"
y1="99.979607"
x2="-136.51074"
y2="112.70422" />
<radialGradient
inkscape:collect="always"
xlink:href="#linearGradient3825"
id="radialGradient3388"
gradientUnits="userSpaceOnUse"
gradientTransform="matrix(0.6484284,0.1017206,-3.1257154e-2,0.1992521,-4.56257,53.15916)"
cx="-112.17241"
cy="118.60459"
fx="-113.14772"
fy="59.708473"
r="59.99512" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3326"
id="linearGradient3390"
gradientUnits="userSpaceOnUse"
x1="-151.43935"
y1="37.68198"
x2="-152.26776"
y2="57.25" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3326"
id="linearGradient3392"
gradientUnits="userSpaceOnUse"
x1="-132.51041"
y1="39.803303"
x2="-158.92462"
y2="72.881729" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3326"
id="linearGradient3394"
gradientUnits="userSpaceOnUse"
x1="-83.012932"
y1="44.753052"
x2="-158.92462"
y2="72.881729" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3342"
id="linearGradient3396"
gradientUnits="userSpaceOnUse"
x1="-73"
y1="105.625"
x2="-163"
y2="86.125" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3342"
id="linearGradient3398"
gradientUnits="userSpaceOnUse"
gradientTransform="translate(-40.5,-1.5)"
x1="-83.593941"
y1="137.13324"
x2="-138.0043"
y2="92.603989" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3342"
id="linearGradient3400"
gradientUnits="userSpaceOnUse"
gradientTransform="translate(-40.5,-1.5)"
x1="-61.802711"
y1="99.979607"
x2="-136.51074"
y2="112.70422" />
<radialGradient
inkscape:collect="always"
xlink:href="#linearGradient3825"
id="radialGradient3422"
gradientUnits="userSpaceOnUse"
gradientTransform="matrix(0.6484284,0.1017206,-3.1257154e-2,0.1992521,-4.56257,53.15916)"
cx="-112.17241"
cy="118.60459"
fx="-113.14772"
fy="59.708473"
r="59.99512" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3326"
id="linearGradient3424"
gradientUnits="userSpaceOnUse"
x1="-151.43935"
y1="37.68198"
x2="-152.26776"
y2="57.25" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3326"
id="linearGradient3426"
gradientUnits="userSpaceOnUse"
x1="-132.51041"
y1="39.803303"
x2="-158.92462"
y2="72.881729" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3326"
id="linearGradient3428"
gradientUnits="userSpaceOnUse"
x1="-83.012932"
y1="44.753052"
x2="-158.92462"
y2="72.881729" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3342"
id="linearGradient3430"
gradientUnits="userSpaceOnUse"
x1="-73"
y1="105.625"
x2="-163"
y2="86.125" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3342"
id="linearGradient3432"
gradientUnits="userSpaceOnUse"
gradientTransform="translate(-40.5,-1.5)"
x1="-83.593941"
y1="137.13324"
x2="-138.0043"
y2="92.603989" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3342"
id="linearGradient3434"
gradientUnits="userSpaceOnUse"
gradientTransform="translate(-40.5,-1.5)"
x1="-61.802711"
y1="99.979607"
x2="-136.51074"
y2="112.70422" />
<mask
maskUnits="userSpaceOnUse"
id="mask3402">
<g
id="g3404"
transform="translate(167.50257,-3.755156e-3)">
<g
id="g3406"
transform="translate(80.51637,30.885255)">
<path
style="opacity:1;fill:url(#radialGradient3422);fill-opacity:1;stroke:none;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dashoffset:1.08779998;stroke-opacity:1"
d="M -184.42232,-32.47243 C -217.54751,-32.47243 -248.42232,-0.097625 -248.42232,33.02757 C -248.42232,66.15276 -217.04751,97.02757 -183.92232,97.02757 C -153.6332,97.02757 -128.58571,70.81131 -124.51607,41.68382 L -159.54732,36.65257 C -161.172,48.6137 -171.47739,57.62132 -183.92232,57.62132 C -197.49395,57.62132 -206.01607,46.0992 -206.01607,32.52757 C -206.01607,18.955936 -199.99395,12.43382 -186.42232,12.43382 C -179.6365,12.433819 -176.50103,10.198864 -172.04732,14.65257 L -176.8745,25.979749 C -178.93037,28.035619 -179.11822,29.285529 -178.55411,30.595278 C -178.04554,31.776057 -177.03338,33.12132 -174.34438,33.12132 L -130.39107,33.12132 C -126.54518,33.12132 -123.93208,30.466941 -123.93208,26.871189 L -124.00095,-17.206829 C -124.00095,-19.687584 -124.90346,-21.050058 -126.18242,-21.556444 C -127.49674,-22.076829 -129.21563,-21.679122 -131.28951,-19.605244 L -141.48482,-19.40993 C -152.34417,-30.269281 -167.85972,-32.47243 -184.42232,-32.47243 z "
id="path3408"
sodipodi:nodetypes="cssccsssccsccccsccc" />
<path
sodipodi:nodetypes="ccccc"
style="fill:url(#linearGradient3424);fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;filter:url(#filter3438)"
d="M -161.5,34.5 C -162,37 -180,54 -180,54 L -132.5,80 L -112,38.5 L -161.5,34.5 z "
id="path3410" />
<path
sodipodi:nodetypes="ccccc"
id="path3412"
d="M -161.5,34.5 C -162,37 -181.27817,54.389087 -181.27817,54.389087 L -151.62742,97.591883 L -112,38.5 L -161.5,34.5 z "
style="fill:url(#linearGradient3426);fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;filter:url(#filter3438)" />
<path
style="fill:url(#linearGradient3428);fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;filter:url(#filter3438)"
d="M -161.5,34.5 C -162,37 -181.27817,54.389087 -181.27817,54.389087 L -151.62742,97.591883 L -112,38.5 L -161.5,34.5 z "
id="path3414"
sodipodi:nodetypes="ccccc" />
</g>
<rect
y="69.75"
x="-119"
height="71.75"
width="83.5"
id="rect3416"
style="opacity:1;fill:url(#linearGradient3430);fill-opacity:1;stroke:none;stroke-width:1;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:1.08779998;stroke-opacity:1;filter:url(#filter3363)" />
<path
sodipodi:nodetypes="ccccc"
id="path3418"
d="M -159.5,68.25 L -39.138259,55.983708 L -93.453327,162.55286 L -197.79465,128.96507 L -159.5,68.25 z "
style="fill:url(#linearGradient3432);fill-opacity:1;stroke:none;stroke-width:1;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dashoffset:1.08779998;stroke-opacity:1;filter:url(#filter3363)"
transform="matrix(0.6393762,0.7688941,-0.7688941,0.6393762,37.597642,128.08723)" />
<path
transform="matrix(0.6393762,0.7688941,-0.7688941,0.6393762,37.597642,128.08723)"
style="fill:url(#linearGradient3434);fill-opacity:1;stroke:none;stroke-width:1;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dashoffset:1.08779998;stroke-opacity:1;filter:url(#filter3363)"
d="M -159.5,68.25 L -39.138259,55.983708 L -93.453327,162.55286 L -197.79465,128.96507 L -159.5,68.25 z "
id="path3420"
sodipodi:nodetypes="ccccc" />
</g>
</mask>
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3751"
id="linearGradient3565"
x1="-267.47665"
y1="18.103027"
x2="-33.476654"
y2="18.103027"
gradientUnits="userSpaceOnUse" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient3295"
id="linearGradient3567"
gradientUnits="userSpaceOnUse"
gradientTransform="translate(-137,0)"
x1="208.59375"
y1="130.40625"
x2="208.59375"
y2="63.426777" />
</defs>
<sodipodi:namedview
id="base"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:pageopacity="0.0"
inkscape:pageshadow="2"
inkscape:zoom="4.6484375"
inkscape:cx="64"
inkscape:cy="64"
inkscape:document-units="px"
inkscape:current-layer="layer1"
width="128px"
height="128px"
gridspacingx="4px"
gridspacingy="4px"
gridempspacing="2"
showgrid="false"
inkscape:grid-points="true"
showguides="true"
inkscape:guide-bbox="true"
inkscape:window-width="748"
inkscape:window-height="681"
inkscape:window-x="526"
inkscape:window-y="51" />
<metadata
id="metadata2611">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
</cc:Work>
</rdf:RDF>
</metadata>
<g
inkscape:label="Livello 1"
inkscape:groupmode="layer"
id="layer1">
<g
id="g3835"
mask="url(#mask3402)"
transform="matrix(-1,0,0,1,128.17774,0)">
<path
sodipodi:nodetypes="cssccsssccsccccsccc"
id="rect3204"
d="M 64.09375,3.90625 C 30.968558,3.9062499 4.0937499,30.781055 4.09375,63.90625 C 4.0937501,97.031442 30.96856,123.90625 64.09375,123.90625 C 94.382866,123.90625 119.43036,101.68999 123.5,72.5625 L 88.46875,67.53125 C 86.844066,79.492379 76.538676,88.5 64.09375,88.5 C 50.522122,88.499999 39.5,77.477881 39.5,63.90625 C 39.500001,50.334616 50.522119,39.3125 64.09375,39.3125 C 70.879568,39.312499 77.015044,42.077544 81.46875,46.53125 L 71.141571,56.858429 C 69.085701,58.914299 68.897846,60.164209 69.461963,61.473958 C 69.970531,62.654737 70.982695,64 73.671688,64 L 117.625,64 C 121.47089,64 124.08399,61.345621 124.08399,57.749869 L 124.01512,13.671851 C 124.01512,11.191096 123.11261,9.8286218 121.83365,9.3222363 C 120.51933,8.8018514 118.80044,9.1995576 116.72656,11.273436 L 106.53125,21.46875 C 95.671902,10.609399 80.656349,3.90625 64.09375,3.90625 z "
style="opacity:1;fill:url(#linearGradient3516);fill-opacity:1;stroke:none;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dashoffset:1.08779998;stroke-opacity:1" />
<path
style="opacity:0.79775277;fill:url(#radialGradient3757);fill-opacity:1;stroke:none;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dashoffset:1.08779998;stroke-opacity:1"
d="M 64.09375,3.90625 C 30.968558,3.9062499 4.0937499,30.781055 4.09375,63.90625 C 4.0937501,97.031442 30.96856,123.90625 64.09375,123.90625 C 94.382866,123.90625 119.43036,101.68999 123.5,72.5625 L 88.46875,67.53125 C 86.844066,79.492379 76.538676,88.5 64.09375,88.5 C 50.522122,88.499999 39.5,77.477881 39.5,63.90625 C 39.500001,50.334616 50.522119,39.3125 64.09375,39.3125 C 70.879568,39.312499 77.015044,42.077544 81.46875,46.53125 L 71.141571,56.858429 C 69.085701,58.914299 68.897846,60.164209 69.461963,61.473958 C 69.970531,62.654737 70.982695,64 73.671688,64 L 117.625,64 C 121.47089,64 124.08399,61.345621 124.08399,57.749869 L 124.01512,13.671851 C 124.01512,11.191096 123.11261,9.8286218 121.83365,9.3222363 C 120.51933,8.8018514 118.80044,9.1995576 116.72656,11.273436 L 106.53125,21.46875 C 95.671902,10.609399 80.656349,3.90625 64.09375,3.90625 z "
id="path3749"
sodipodi:nodetypes="cssccsssccsccccsccc" />
<path
sodipodi:nodetypes="cssccsssccsccccsccc"
id="path3656"
d="M 64.09375,3.90625 C 30.968558,3.9062499 4.0937499,30.781055 4.09375,63.90625 C 4.0937501,97.031442 30.96856,123.90625 64.09375,123.90625 C 94.382866,123.90625 119.43036,101.68999 123.5,72.5625 L 88.46875,67.53125 C 86.844066,79.492379 76.538676,88.5 64.09375,88.5 C 50.522122,88.499999 39.5,77.477881 39.5,63.90625 C 39.500001,50.334616 50.522119,39.3125 64.09375,39.3125 C 70.879568,39.312499 77.015044,42.077544 81.46875,46.53125 L 71.141571,56.858429 C 69.085701,58.914299 68.897846,60.164209 69.461963,61.473958 C 69.970531,62.654737 70.982695,64 73.671688,64 L 117.625,64 C 121.47089,64 124.08399,61.345621 124.08399,57.749869 L 124.01512,13.671851 C 124.01512,11.191096 123.11261,9.8286218 121.83365,9.3222363 C 120.51933,8.8018514 118.80044,9.1995576 116.72656,11.273436 L 106.53125,21.46875 C 95.671902,10.609399 80.656349,3.90625 64.09375,3.90625 z "
style="opacity:0.68913861;fill:url(#radialGradient3658);fill-opacity:1;stroke:none;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dashoffset:1.08779998;stroke-opacity:1" />
<path
style="opacity:1;fill:url(#radialGradient3654);fill-opacity:1;stroke:none;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dashoffset:1.08779998;stroke-opacity:1"
d="M 64.09375,3.90625 C 30.968558,3.9062499 4.0937499,30.781055 4.09375,63.90625 C 4.0937501,97.031442 30.96856,123.90625 64.09375,123.90625 C 94.382866,123.90625 119.43036,101.68999 123.5,72.5625 L 88.46875,67.53125 C 86.844066,79.492379 76.538676,88.5 64.09375,88.5 C 50.522122,88.499999 39.5,77.477881 39.5,63.90625 C 39.500001,50.334616 50.522119,39.3125 64.09375,39.3125 C 70.879568,39.312499 77.015044,42.077544 81.46875,46.53125 L 71.141571,56.858429 C 69.085701,58.914299 68.897846,60.164209 69.461963,61.473958 C 69.970531,62.654737 70.982695,64 73.671688,64 L 117.625,64 C 121.47089,64 124.08399,61.345621 124.08399,57.749869 L 124.01512,13.671851 C 124.01512,11.191096 123.11261,9.8286218 121.83365,9.3222363 C 120.51933,8.8018514 118.80044,9.1995576 116.72656,11.273436 L 106.53125,21.46875 C 95.671902,10.609399 80.656349,3.90625 64.09375,3.90625 z "
id="path3643"
sodipodi:nodetypes="cssccsssccsccccsccc" />
<path
sodipodi:nodetypes="cssccsssccsccccsccc"
id="path3742"
d="M 64.09375,3.90625 C 30.968558,3.9062499 4.0937499,30.781055 4.09375,63.90625 C 4.0937501,97.031442 30.96856,123.90625 64.09375,123.90625 C 94.382866,123.90625 119.43036,101.68999 123.5,72.5625 L 88.46875,67.53125 C 86.844066,79.492379 76.538676,88.5 64.09375,88.5 C 50.522122,88.499999 39.5,77.477881 39.5,63.90625 C 39.500001,50.334616 50.522119,39.3125 64.09375,39.3125 C 70.879568,39.312499 77.015044,42.077544 81.46875,46.53125 L 71.141571,56.858429 C 69.085701,58.914299 68.897846,60.164209 69.461963,61.473958 C 69.970531,62.654737 70.982695,64 73.671688,64 L 117.625,64 C 121.47089,64 124.08399,61.345621 124.08399,57.749869 L 124.01512,13.671851 C 124.01512,11.191096 123.11261,9.8286218 121.83365,9.3222363 C 120.51933,8.8018514 118.80044,9.1995576 116.72656,11.273436 L 106.53125,21.46875 C 95.671902,10.609399 80.656349,3.90625 64.09375,3.90625 z "
style="opacity:0.79775277;fill:url(#radialGradient3744);fill-opacity:1;stroke:none;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dashoffset:1.08779998;stroke-opacity:1" />
<path
style="opacity:0.74531836;fill:url(#radialGradient3767);fill-opacity:1;stroke:none;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dashoffset:1.08779998;stroke-opacity:1;filter:url(#filter3630)"
d="M 64.09375,4.20625 C 30.968558,4.2062499 4.0937499,30.781055 4.09375,63.90625 C 4.0937501,97.031442 30.96856,123.90625 64.09375,123.90625 C 94.382866,123.90625 119.43036,101.68999 123.5,72.5625 L 88.46875,67.53125 C 86.844066,79.492379 76.538676,88.5 64.09375,88.5 C 50.522122,88.499999 39.5,77.477881 39.5,63.90625 C 39.500001,50.334616 50.522119,39.3125 64.09375,39.3125 C 70.879568,39.312499 77.015044,42.077544 81.46875,46.53125 L 71.141571,56.858429 C 69.085701,58.914299 68.897846,60.164209 69.461963,61.473958 C 69.970531,62.654737 70.982695,64 73.671688,64 L 117.625,64 C 121.47089,64 124.08399,61.345621 124.08399,57.749869 L 124.01512,13.671851 C 124.01512,11.191096 123.11261,9.8286218 121.83365,9.3222363 C 120.51933,8.8018514 118.80044,9.1995576 116.72656,11.273436 L 106.53125,21.46875 C 95.671902,10.609399 80.656349,4.20625 64.09375,4.20625 z "
id="path3759"
sodipodi:nodetypes="cssccsssccsccccsccc" />
<path
sodipodi:nodetypes="cccczc"
id="path3660"
d="M 117.6875,10.75 L 118.625,15.125 L 119.875,16 L 123.875,13.375 C 124.12188,11.651249 123.52383,10.027571 121.9375,9.3749999 C 120.35116,8.7224285 118.77622,9.5017032 117.6875,10.75 z "
style="opacity:0.82022472;fill:url(#linearGradient3676);fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;filter:url(#filter3738)" />
<path
transform="matrix(0,1,1,0,60.363582,-60.363586)"
style="opacity:0.82022472;fill:url(#linearGradient3771);fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;filter:url(#filter3738)"
d="M 117.6875,10.75 L 119.875,13.875 L 120.375,13.75 L 123.875,13.375 C 124.12188,11.651249 123.52383,10.027571 121.9375,9.3749999 C 120.35116,8.7224285 118.77622,9.5017032 117.6875,10.75 z "
id="path3769"
sodipodi:nodetypes="cccczc" />
<path
id="path3494"
d="M 64.09375,7.90625 C 33.132052,7.9062499 8.0937499,32.944549 8.09375,63.90625 C 8.0937501,94.867948 33.132054,119.90625 64.09375,119.90625 C 91.026646,119.90625 113.21548,101.0995 118.625,75.90625 L 91.5,72.03125 C 88.061436,83.928551 77.059621,92.5 64.09375,92.5 C 48.356404,92.499999 35.5,79.643599 35.5,63.90625 C 35.500001,48.168899 48.356402,35.3125 64.09375,35.3125 C 71.966166,35.312499 79.145304,38.520304 84.3125,43.6875 C 85.071964,44.438909 85.499997,45.462886 85.5,46.53125 C 85.5,47.599614 85.071964,48.623591 84.3125,49.375 L 73.6875,60 L 117.625,60 C 119.63039,60 120.09375,59.407836 120.09375,57.75 L 120,13.65625 L 109.375,24.3125 C 108.62359,25.071964 107.59961,25.5 106.53125,25.5 C 105.46289,25.5 104.43891,25.071964 103.6875,24.3125 C 93.549835,14.174833 79.577106,7.90625 64.09375,7.90625 z "
style="opacity:1;fill:url(#radialGradient3524);fill-opacity:1;stroke:none;stroke-width:4;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:1.08779998;stroke-opacity:1" />
<path
style="opacity:1;fill:url(#radialGradient3535);fill-opacity:1;stroke:none;stroke-width:4;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:1.08779998;stroke-opacity:1"
d="M 64.09375,7.90625 C 33.132052,7.9062499 8.0937499,32.944549 8.09375,63.90625 C 8.0937501,94.867948 33.132054,119.90625 64.09375,119.90625 C 91.026646,119.90625 113.21548,101.0995 118.625,75.90625 L 91.5,72.03125 C 88.061436,83.928551 77.059621,92.5 64.09375,92.5 C 48.356404,92.499999 35.5,79.643599 35.5,63.90625 C 35.500001,48.168899 48.356402,35.3125 64.09375,35.3125 C 71.966166,35.312499 79.145304,38.520304 84.3125,43.6875 C 85.071964,44.438909 85.499997,45.462886 85.5,46.53125 C 85.5,47.599614 85.071964,48.623591 84.3125,49.375 L 73.6875,60 L 117.625,60 C 119.63039,60 120.09375,59.407836 120.09375,57.75 L 120,13.65625 L 109.375,24.3125 C 108.62359,25.071964 107.59961,25.5 106.53125,25.5 C 105.46289,25.5 104.43891,25.071964 103.6875,24.3125 C 93.549835,14.174833 79.577106,7.90625 64.09375,7.90625 z "
id="path3526" />
<path
sodipodi:nodetypes="csccssccccccscc"
id="path3537"
d="M 64.09375,7.90625 C 33.132052,7.9062499 8.0937499,32.944549 8.09375,63.90625 C 8.09375,64.474122 8.1082724,65.029981 8.125,65.59375 C 14.11447,66.271402 20.266218,66.74388 26.53125,67 C 26.260548,56.540958 30.202859,46.025084 38.34375,38.21875 C 53.683067,23.509813 78.072313,24.004431 92.78125,39.34375 C 95.545099,42.226046 97.537852,45.032117 99.34375,48.59375 L 78.84375,59 L 98,59 C 105.9282,56.973373 113.18621,55.563033 120.09375,52.8125 L 120,13.65625 L 109.375,24.3125 C 108.62359,25.071964 107.59961,25.5 106.53125,25.5 C 105.46289,25.5 104.43891,25.071964 103.6875,24.3125 C 93.549835,14.174833 79.577106,7.90625 64.09375,7.90625 z "
style="opacity:1;fill:url(#linearGradient3548);fill-opacity:1;stroke:none;stroke-width:8;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:1.08779998;stroke-opacity:1" />
<path
sodipodi:nodetypes="cscscscccccccccccssssssccscscc"
id="path3553"
d="M 64.099866,7.9087646 C 33.138176,7.9087644 8.0998661,32.947063 8.0998661,63.908764 C 8.0998761,64.087476 8.0973761,64.263059 8.0998661,64.440014 C 8.3531061,33.696509 33.295846,8.9087645 64.099866,8.9087646 C 79.583236,8.9087645 93.555946,15.177347 103.69361,25.315014 C 104.44503,26.074479 105.469,26.502514 106.53736,26.502514 C 107.60573,26.502515 108.6297,26.074478 109.38111,25.315014 L 119.50611,15.158764 L 119.99986,52.708764 C 113.09232,55.459294 105.43431,56.569624 97.506116,58.596264 L 78.849866,59.002514 L 98.006116,59.002514 C 105.93431,56.975884 113.19232,55.565544 120.09986,52.815014 L 120.00611,14.658764 L 120.00611,13.658764 L 119.50611,14.158764 L 109.38111,24.315014 C 108.62971,25.074479 107.60572,25.502514 106.53736,25.502514 C 105.46901,25.502515 104.44502,25.074478 103.69361,24.315014 C 103.68314,24.304548 103.67283,24.294222 103.66236,24.283764 C 103.60999,24.231473 103.55869,24.179598 103.50611,24.127514 C 102.93231,23.559643 102.35524,23.012364 101.75611,22.471264 C 101.67459,22.397145 101.58807,22.326157 101.50611,22.252514 C 91.590066,13.342335 78.496526,7.9087646 64.099866,7.9087646 z M 63.443616,27.127514 C 54.205446,27.378034 45.040176,30.920194 37.849866,37.815014 C 30.217786,45.133448 26.722316,55.187931 26.537366,65.033764 C 26.777246,55.231884 30.717786,45.539698 38.349866,38.221264 C 51.665996,25.452364 71.803196,24.123207 86.506116,34.033764 C 79.627056,29.22869 71.518656,26.908534 63.443616,27.127514 z "
style="opacity:1;fill:url(#linearGradient3581);fill-opacity:1;stroke:none;stroke-width:8;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:1.08779998;stroke-opacity:1" />
<path
id="path3603"
d="M 63.59375,7.90625 C 32.63205,7.9062499 7.59375,32.944549 7.59375,63.90625 C 7.59375,94.867948 32.63205,119.90625 63.59375,119.90625 C 90.52665,119.90625 112.71548,101.0995 118.125,75.90625 L 91,72.03125 C 87.56144,83.928551 76.55962,92.5 63.59375,92.5 C 47.8564,92.499999 35,79.643599 35,63.90625 C 35,48.168899 47.8564,35.3125 63.59375,35.3125 C 71.46617,35.312499 78.6453,38.520304 83.8125,43.6875 C 84.57196,44.438909 85,45.462886 85,46.53125 C 85,47.599614 84.57196,48.623591 83.8125,49.375 L 73.1875,60 L 117.125,60 C 119.13039,60 119.59375,59.407836 119.59375,57.75 L 119.5,13.65625 L 108.875,24.3125 C 108.12359,25.071964 107.09961,25.5 106.03125,25.5 C 104.96289,25.5 103.93891,25.071964 103.1875,24.3125 C 93.04984,14.174833 79.07711,7.90625 63.59375,7.90625 z "
style="opacity:1;fill:none;fill-opacity:1;stroke:url(#linearGradient3567);stroke-width:1;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:1.08779998;stroke-opacity:1;filter:url(#filter3639)" />
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 35 KiB

View File

@ -690,7 +690,7 @@ class DeviceBooksModel(BooksModel):
dt = item.datetime dt = item.datetime
dt = datetime(*dt[0:6]) dt = datetime(*dt[0:6])
dt = dt - timedelta(seconds=time.timezone) + timedelta(hours=time.daylight) dt = dt - timedelta(seconds=time.timezone) + timedelta(hours=time.daylight)
data[_('Timestamp')] = dt.ctime() data[_('Timestamp')] = dt.strftime('%a %b %d %H:%M:%S %Y')
data[_('Tags')] = ', '.join(item.tags) data[_('Tags')] = ', '.join(item.tags)
self.emit(SIGNAL('new_bookdisplay_data(PyQt_PyObject)'), data) self.emit(SIGNAL('new_bookdisplay_data(PyQt_PyObject)'), data)

View File

@ -13,7 +13,8 @@
<string>Configure Viewer</string> <string>Configure Viewer</string>
</property> </property>
<property name="windowIcon" > <property name="windowIcon" >
<iconset resource="../images.qrc" >:/images/config.svg</iconset> <iconset resource="../images.qrc" >
<normaloff>:/images/config.svg</normaloff>:/images/config.svg</iconset>
</property> </property>
<layout class="QGridLayout" > <layout class="QGridLayout" >
<item row="0" column="0" colspan="2" > <item row="0" column="0" colspan="2" >
@ -39,20 +40,17 @@
<enum>Qt::Horizontal</enum> <enum>Qt::Horizontal</enum>
</property> </property>
<property name="standardButtons" > <property name="standardButtons" >
<set>QDialogButtonBox::Cancel|QDialogButtonBox::NoButton|QDialogButtonBox::Ok</set> <set>QDialogButtonBox::Cancel|QDialogButtonBox::Ok</set>
</property> </property>
</widget> </widget>
</item> </item>
<item row="2" column="0" colspan="2" > <item row="2" column="0" colspan="2" >
<widget class="QLabel" name="label" > <widget class="QLabel" name="label" >
<property name="windowModality" >
<enum>Qt::WindowModal</enum>
</property>
<property name="frameShape" > <property name="frameShape" >
<enum>QFrame::Box</enum> <enum>QFrame::Box</enum>
</property> </property>
<property name="text" > <property name="text" >
<string>&lt;b>Changes will only take affect after a restart.</string> <string>&lt;b&gt;Changes will only take effect after a restart.&lt;/b&gt;</string>
</property> </property>
<property name="textFormat" > <property name="textFormat" >
<enum>Qt::RichText</enum> <enum>Qt::RichText</enum>

View File

@ -60,6 +60,9 @@ class Main(MainWindow, Ui_MainWindow):
def __init__(self, single_instance, opts, parent=None): def __init__(self, single_instance, opts, parent=None):
MainWindow.__init__(self, opts, parent) MainWindow.__init__(self, opts, parent)
# Initialize fontconfig in a separate thread as this can be a lengthy
# process if run for the first time on this machine
self.fc = __import__('calibre.utils.fontconfig', fromlist=1)
self.single_instance = single_instance self.single_instance = single_instance
if self.single_instance is not None: if self.single_instance is not None:
self.connect(self.single_instance, SIGNAL('message_received(PyQt_PyObject)'), self.connect(self.single_instance, SIGNAL('message_received(PyQt_PyObject)'),
@ -646,7 +649,7 @@ class Main(MainWindow, Ui_MainWindow):
metadata = iter(metadata) metadata = iter(metadata)
_files = self.library_view.model().get_preferred_formats(rows, _files = self.library_view.model().get_preferred_formats(rows,
self.device_manager.device_class.FORMATS, paths=True) self.device_manager.device_class.FORMATS, paths=True)
files = [f.name for f in _files] files = [getattr(f, 'name', None) for f in _files]
bad, good, gf, names = [], [], [], [] bad, good, gf, names = [], [], [], []
for f in files: for f in files:
mi = metadata.next() mi = metadata.next()
@ -1280,12 +1283,12 @@ path_to_ebook to the database.
if __name__ == '__main__': if __name__ == '__main__':
try: try:
sys.exit(main()) sys.exit(main())
except: except Exception, err:
if not iswindows: raise if not iswindows: raise
tb = traceback.format_exc()
from PyQt4.QtGui import QErrorMessage from PyQt4.QtGui import QErrorMessage
logfile = os.path.join(os.path.expanduser('~'), 'calibre.log') logfile = os.path.join(os.path.expanduser('~'), 'calibre.log')
if os.path.exists(logfile): if os.path.exists(logfile):
log = open(logfile).read() log = open(logfile).read().decode('utf-8', 'ignore')
if log.strip(): d = QErrorMessage('<b>Error:</b>%s<br><b>Traceback:</b><br>%s<b>Log:</b><br>'%(unicode(err), unicode(tb), log))
d = QErrorMessage() d.exec_()
d.showMessage(log)

View File

@ -440,7 +440,7 @@
<action name="action_add" > <action name="action_add" >
<property name="icon" > <property name="icon" >
<iconset resource="images.qrc" > <iconset resource="images.qrc" >
<normaloff>:/images/plus.svg</normaloff>:/images/plus.svg</iconset> <normaloff>:/images/add_book.svg</normaloff>:/images/add_book.svg</iconset>
</property> </property>
<property name="text" > <property name="text" >
<string>Add books</string> <string>Add books</string>

View File

@ -172,6 +172,7 @@ def do_add(db, paths, one_book_per_directory, recurse, add_duplicates):
for mi, formats in dir_dups: for mi, formats in dir_dups:
db.import_book(mi, formats) db.import_book(mi, formats)
else: else:
if dir_dups or file_duplicates:
print >>sys.stderr, _('The following books were not added as they already exist in the database (see --duplicates option):') print >>sys.stderr, _('The following books were not added as they already exist in the database (see --duplicates option):')
for mi, formats in dir_dups: for mi, formats in dir_dups:
title = mi.title title = mi.title

View File

@ -5,7 +5,7 @@ This module provides a thin ctypes based wrapper around libunrar.
See ftp://ftp.rarlabs.com/rar/unrarsrc-3.7.5.tar.gz See ftp://ftp.rarlabs.com/rar/unrarsrc-3.7.5.tar.gz
""" """
import os, ctypes import os, ctypes, sys
from ctypes import Structure, c_char_p, c_uint, c_void_p, POINTER, \ from ctypes import Structure, c_char_p, c_uint, c_void_p, POINTER, \
byref, c_wchar_p, c_int, c_char, c_wchar byref, c_wchar_p, c_int, c_char, c_wchar
from StringIO import StringIO from StringIO import StringIO
@ -18,6 +18,8 @@ if iswindows:
Structure._pack_ = 1 Structure._pack_ = 1
_librar_name = 'unrar' _librar_name = 'unrar'
cdll = ctypes.windll cdll = ctypes.windll
if hasattr(sys, 'frozen') and iswindows:
_libunrar = cdll.LoadLibrary(os.path.join(os.path.dirname(sys.executable), 'unrar.dll'))
_libunrar = load_library(_librar_name, cdll) _libunrar = load_library(_librar_name, cdll)
RAR_OM_LIST = 0 RAR_OM_LIST = 0

View File

@ -1,8 +1,41 @@
#!/usr/bin/env python
__license__ = 'GPL v3' __license__ = 'GPL v3'
__copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>' __copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
__docformat__ = 'restructuredtext en'
import os, zipfile import os
from cStringIO import StringIO from cStringIO import StringIO
from calibre.utils import zipfile
def update(pathtozip, patterns, filepaths, names, compression=zipfile.ZIP_DEFLATED, verbose=True):
'''
Update files in the zip file at `pathtozip` matching the given
`patterns` with the given `filepaths`. If more than
one file matches, all of the files are replaced.
:param patterns: A list of compiled regular expressions
:param filepaths: A list of paths to the replacement files. Must have the
same length as `patterns`.
:param names: A list of archive names for each file in filepaths.
A name can be `None` in which case the name of the existing
file in the archive is used.
:param compression: The compression to use when replacing files. Can be
either `zipfile.ZIP_DEFLATED` or `zipfile.ZIP_STORED`.
'''
assert len(patterns) == len(filepaths) == len(names)
z = zipfile.ZipFile(pathtozip, mode='a')
for name in z.namelist():
for pat, fname, new_name in zip(patterns, filepaths, names):
if pat.search(name):
if verbose:
print 'Updating %s with %s' % (name, fname)
if new_name is None:
z.replace(fname, arcname=name, compress_type=compression)
else:
z.delete(name)
z.write(fname, new_name, compress_type=compression)
break
z.close()
def extract(filename, dir): def extract(filename, dir):
""" """

View File

@ -167,7 +167,9 @@ def setup_completion(fatal_errors):
f = open_file('/etc/bash_completion.d/libprs500') f = open_file('/etc/bash_completion.d/libprs500')
f.close() f.close()
os.remove(f.name) os.remove(f.name)
manifest = []
f = open_file('/etc/bash_completion.d/calibre') f = open_file('/etc/bash_completion.d/calibre')
manifest.append(f.name)
f.write('# calibre Bash Shell Completion\n') f.write('# calibre Bash Shell Completion\n')
f.write(opts_and_exts('html2lrf', htmlop, f.write(opts_and_exts('html2lrf', htmlop,
@ -275,18 +277,22 @@ complete -o nospace -F _prs500 prs500
print 'failed' print 'failed'
import traceback import traceback
traceback.print_exc() traceback.print_exc()
return manifest
def setup_udev_rules(group_file, reload, fatal_errors): def setup_udev_rules(group_file, reload, fatal_errors):
print 'Trying to setup udev rules...' print 'Trying to setup udev rules...'
manifest = []
sys.stdout.flush() sys.stdout.flush()
groups = open(group_file, 'rb').read() groups = open(group_file, 'rb').read()
group = 'plugdev' if 'plugdev' in groups else 'usb' group = 'plugdev' if 'plugdev' in groups else 'usb'
udev = open_file('/etc/udev/rules.d/95-calibre.rules') udev = open_file('/etc/udev/rules.d/95-calibre.rules')
manifest.append(udev.name)
udev.write('''# Sony Reader PRS-500\n''' udev.write('''# Sony Reader PRS-500\n'''
'''BUS=="usb", SYSFS{idProduct}=="029b", SYSFS{idVendor}=="054c", MODE="660", GROUP="%s"\n'''%(group,) '''BUS=="usb", SYSFS{idProduct}=="029b", SYSFS{idVendor}=="054c", MODE="660", GROUP="%s"\n'''%(group,)
) )
udev.close() udev.close()
fdi = open_file('/usr/share/hal/fdi/policy/20thirdparty/10-calibre.fdi') fdi = open_file('/usr/share/hal/fdi/policy/20thirdparty/10-calibre.fdi')
manifest.append(fdi.name)
fdi.write('<?xml version="1.0" encoding="UTF-8"?>\n\n<deviceinfo version="0.2">\n') fdi.write('<?xml version="1.0" encoding="UTF-8"?>\n\n<deviceinfo version="0.2">\n')
for cls in DEVICES: for cls in DEVICES:
fdi.write(\ fdi.write(\
@ -326,6 +332,7 @@ def setup_udev_rules(group_file, reload, fatal_errors):
if fatal_errors: if fatal_errors:
raise Exception("Couldn't reload udev, you may have to reboot") raise Exception("Couldn't reload udev, you may have to reboot")
print >>sys.stderr, "Couldn't reload udev, you may have to reboot" print >>sys.stderr, "Couldn't reload udev, you may have to reboot"
return manifest
def option_parser(): def option_parser():
from optparse import OptionParser from optparse import OptionParser
@ -340,6 +347,8 @@ def option_parser():
help='If set, do not check if we are root.') help='If set, do not check if we are root.')
parser.add_option('--make-errors-fatal', action='store_true', default=False, parser.add_option('--make-errors-fatal', action='store_true', default=False,
dest='fatal_errors', help='If set die on errors.') dest='fatal_errors', help='If set die on errors.')
parser.add_option('--save-manifest-to', default=None,
help='Save a manifest of all installed files to the specified location')
return parser return parser
def install_man_pages(fatal_errors): def install_man_pages(fatal_errors):
@ -350,6 +359,7 @@ def install_man_pages(fatal_errors):
f = open_file('/tmp/man_extra', 'wb') f = open_file('/tmp/man_extra', 'wb')
f.write('[see also]\nhttp://%s.kovidgoyal.net\n'%__appname__) f.write('[see also]\nhttp://%s.kovidgoyal.net\n'%__appname__)
f.close() f.close()
manifest = []
for src in entry_points['console_scripts']: for src in entry_points['console_scripts']:
prog = src[:src.index('=')].strip() prog = src[:src.index('=')].strip()
if prog in ('prs500', 'pdf-meta', 'epub-meta', 'lit-meta', if prog in ('prs500', 'pdf-meta', 'epub-meta', 'lit-meta',
@ -360,6 +370,7 @@ def install_man_pages(fatal_errors):
'--section', '1', '--no-info', '--include', '--section', '1', '--no-info', '--include',
f.name, '--manual', __appname__) f.name, '--manual', __appname__)
manfile = os.path.join(manpath, prog+'.1'+__appname__+'.bz2') manfile = os.path.join(manpath, prog+'.1'+__appname__+'.bz2')
print '\tInstalling MAN page for', prog
try: try:
p = subprocess.Popen(help2man, stdout=subprocess.PIPE) p = subprocess.Popen(help2man, stdout=subprocess.PIPE)
except OSError, err: except OSError, err:
@ -372,10 +383,10 @@ def install_man_pages(fatal_errors):
if not raw.strip(): if not raw.strip():
print 'Unable to create MAN page for', prog print 'Unable to create MAN page for', prog
continue continue
open_file(manfile).write(compress(raw)) f2 = open_file(manfile)
manifest.append(f2.name)
f2.write(compress(raw))
return manifest
def post_install(): def post_install():
parser = option_parser() parser = option_parser()
@ -387,19 +398,21 @@ def post_install():
global use_destdir global use_destdir
use_destdir = opts.destdir use_destdir = opts.destdir
manifest = []
setup_udev_rules(opts.group_file, not opts.dont_reload, opts.fatal_errors) manifest += setup_udev_rules(opts.group_file, not opts.dont_reload, opts.fatal_errors)
setup_completion(opts.fatal_errors) manifest += setup_completion(opts.fatal_errors)
setup_desktop_integration(opts.fatal_errors) setup_desktop_integration(opts.fatal_errors)
install_man_pages(opts.fatal_errors) manifest += install_man_pages(opts.fatal_errors)
try: try:
from PyQt4 import Qt from PyQt4 import Qt
if Qt.PYQT_VERSION < int('0x40301', 16): if Qt.PYQT_VERSION < int('0x40402', 16):
print 'WARNING: You need PyQt >= 4.3.1 for the GUI. You have', Qt.PYQT_VERSION_STR, '\nYou may experience crashes or other strange behavior.' print 'WARNING: You need PyQt >= 4.4.2 for the GUI. You have', Qt.PYQT_VERSION_STR, '\nYou may experience crashes or other strange behavior.'
except ImportError: except ImportError:
print 'WARNING: You do not have PyQt4 installed. The GUI will not work.' print 'WARNING: You do not have PyQt4 installed. The GUI will not work.'
if opts.save_manifest_to:
open(opts.save_manifest_to, 'wb').write('\n'.join(manifest)+'\n')
VIEWER = '''\ VIEWER = '''\

View File

@ -221,6 +221,7 @@ def extract_tarball(tar, destdir):
tarfile.open(tar, 'r').extractall(destdir) tarfile.open(tar, 'r').extractall(destdir)
def create_launchers(destdir, bindir='/usr/bin'): def create_launchers(destdir, bindir='/usr/bin'):
manifest = []
for launcher in open(os.path.join(destdir, 'manifest')).readlines(): for launcher in open(os.path.join(destdir, 'manifest')).readlines():
if 'postinstall' in launcher: if 'postinstall' in launcher:
continue continue
@ -229,24 +230,37 @@ def create_launchers(destdir, bindir='/usr/bin'):
print 'Creating', lp print 'Creating', lp
open(lp, 'wb').write(LAUNCHER%(destdir, launcher)) open(lp, 'wb').write(LAUNCHER%(destdir, launcher))
os.chmod(lp, stat.S_IXUSR|stat.S_IXOTH|stat.S_IXGRP|stat.S_IREAD|stat.S_IWRITE|stat.S_IRGRP|stat.S_IROTH) os.chmod(lp, stat.S_IXUSR|stat.S_IXOTH|stat.S_IXGRP|stat.S_IREAD|stat.S_IWRITE|stat.S_IRGRP|stat.S_IROTH)
manifest.append(lp)
return manifest
def do_postinstall(destdir): def do_postinstall(destdir):
cwd = os.getcwd() cwd = os.getcwd()
t = tempfile.NamedTemporaryFile()
try: try:
os.chdir(destdir) os.chdir(destdir)
os.environ['LD_LIBRARY_PATH'] = destdir+':'+os.environ.get('LD_LIBRARY_PATH', '') os.environ['LD_LIBRARY_PATH'] = destdir+':'+os.environ.get('LD_LIBRARY_PATH', '')
subprocess.call((os.path.join(destdir, 'calibre_postinstall'),)) subprocess.call((os.path.join(destdir, 'calibre_postinstall'), '--save-manifest-to', t.name))
finally: finally:
os.chdir(cwd) os.chdir(cwd)
t.seek(0)
return list(t.readlines())
def download_tarball(): def download_tarball():
try:
pb = ProgressBar(TerminalController(sys.stdout), 'Downloading calibre...') pb = ProgressBar(TerminalController(sys.stdout), 'Downloading calibre...')
except ValueError:
print 'Downloading calibre...'
pb = None
src = urllib2.urlopen(MOBILEREAD+'calibre-%version-i686.tar.bz2') src = urllib2.urlopen(MOBILEREAD+'calibre-%version-i686.tar.bz2')
size = int(src.info()['content-length']) size = int(src.info()['content-length'])
f = tempfile.NamedTemporaryFile() f = tempfile.NamedTemporaryFile()
while f.tell() < size: while f.tell() < size:
f.write(src.read(4*1024)) f.write(src.read(4*1024))
pb.update(f.tell()/float(size)) percent = f.tell()/float(size)
if pb is not None:
pb.update(percent)
else:
print '%d%%, '%int(percent*100),
f.seek(0) f.seek(0)
return f return f
@ -263,8 +277,37 @@ def main(args=sys.argv):
print 'Extracting...' print 'Extracting...'
extract_tarball(f, destdir) extract_tarball(f, destdir)
create_launchers(destdir) manifest = create_launchers(destdir)
do_postinstall(destdir) manifest += do_postinstall(destdir)
manifest += ['/usr/bin/calibre-uninstall']
UNINSTALLER = '''\
#!/usr/bin/env python
import os, sys
if os.geteuid() != 0:
print 'You must run this uninstaller as root'
sys.exit(0)
manifest = %s
failures = []
for path in manifest:
print 'Deleting', path
try:
os.unlink(path)
except:
failures.append(path)
print 'Uninstalling complete.'
if failures:
print 'Failed to remove the following files:'
for f in failures: print f
'''%repr(manifest)
open('/usr/bin/calibre-uninstall', 'wb').write(UNINSTALLER)
os.chmod('/usr/bin/calibre-uninstall',
stat.S_IXUSR|stat.S_IXOTH|stat.S_IXGRP|stat.S_IREAD|stat.S_IWRITE|stat.S_IRGRP|stat.S_IROTH)
print 'You can uninstall calibre by running sudo calibre-uninstall'
return 0 return 0

View File

@ -131,7 +131,14 @@ Why does |app| show only some of my fonts on OS X?
The graphical user interface of |app| is not starting on Windows? The graphical user interface of |app| is not starting on Windows?
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If you've never used the graphical user interface before, try deleting the file library1.db (it will be somewhere under :file:`C:\\Documents and Settings` on Windows XP and :file:`C:\\Users` on Windows Vista. If that doesn't fix the problem, locate the file calibre.log (in the same places as library1.db) and post its contents in a help message on the `Forums <http://calibre.kovidgoyal.net/discussion>`_. If you've never used the graphical user interface before, try deleting the file library1.db (it will be somewhere under :file:`C:\\Documents and Settings` on Windows XP and :file:`C:\\Users` on Windows Vista. If that doesn't fix the problem, locate the file calibre.log (in the same places as library1.db) and post its contents in a help message on the `Forums <http://calibre.kovidgoyal.net/discussion>`_. If you can't find either file, try using the windows find feature to search for them. If the files dont exist on your system, try the following:
Start a command prompt (press the windows key and R and type cmd.exe in the run dialog). At the command prompt type the command `calibre-debug` and press enter. You will se a new, green prompt. At theis prompt, type the following two lines::
from calibre.gui2.main import main
main()
Post any output you see when asking for help.
I want some feature added to |app|. What can I do? I want some feature added to |app|. What can I do?
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

View File

@ -121,6 +121,7 @@ class WorkerMother(object):
def __init__(self): def __init__(self):
ext = 'windows' if iswindows else 'osx' if isosx else 'linux' ext = 'windows' if iswindows else 'osx' if isosx else 'linux'
self.os = os # Needed incase cleanup called when interpreter is shutting down self.os = os # Needed incase cleanup called when interpreter is shutting down
self.env = {}
if iswindows: if iswindows:
self.executable = os.path.join(os.path.dirname(sys.executable), self.executable = os.path.join(os.path.dirname(sys.executable),
'calibre-parallel.exe' if isfrozen else 'Scripts\\calibre-parallel.exe') 'calibre-parallel.exe' if isfrozen else 'Scripts\\calibre-parallel.exe')
@ -135,13 +136,14 @@ class WorkerMother(object):
self.prefix += 'import sys; sys.frameworks_dir = "%s"; sys.frozen = "macosx_app"; '%fd self.prefix += 'import sys; sys.frameworks_dir = "%s"; sys.frozen = "macosx_app"; '%fd
self.prefix += 'sys.path.insert(0, %s); '%repr(sp) self.prefix += 'sys.path.insert(0, %s); '%repr(sp)
self.env = {}
if fd not in os.environ['PATH']: if fd not in os.environ['PATH']:
self.env['PATH'] = os.environ['PATH']+':'+fd self.env['PATH'] = os.environ['PATH']+':'+fd
self.env['PYTHONHOME'] = resources self.env['PYTHONHOME'] = resources
else: else:
self.executable = os.path.join(getattr(sys, 'frozen_path'), 'calibre-parallel') \ self.executable = os.path.join(getattr(sys, 'frozen_path'), 'calibre-parallel') \
if isfrozen else 'calibre-parallel' if isfrozen else 'calibre-parallel'
if isfrozen:
self.env['LD_LIBRARY_PATH'] = getattr(sys, 'frozen_path') + ':' + os.environ.get('LD_LIBRARY_PATH', '')
self.spawn_worker_windows = lambda arg : self.spawn_free_spirit_windows(arg, type='worker') self.spawn_worker_windows = lambda arg : self.spawn_free_spirit_windows(arg, type='worker')
self.spawn_worker_linux = lambda arg : self.spawn_free_spirit_linux(arg, type='worker') self.spawn_worker_linux = lambda arg : self.spawn_free_spirit_linux(arg, type='worker')
@ -176,6 +178,7 @@ class WorkerMother(object):
def get_env(self): def get_env(self):
env = dict(os.environ) env = dict(os.environ)
env['CALIBRE_WORKER'] = '1' env['CALIBRE_WORKER'] = '1'
env['ORIGWD'] = os.path.abspath(os.getcwd())
if hasattr(self, 'env'): if hasattr(self, 'env'):
env.update(self.env) env.update(self.env)
return env return env
@ -189,7 +192,8 @@ class WorkerMother(object):
def spawn_free_spirit_linux(self, arg, type='free_spirit'): def spawn_free_spirit_linux(self, arg, type='free_spirit'):
cmdline = [self.executable, arg] cmdline = [self.executable, arg]
child = WorkerStatus(subprocess.Popen(cmdline, env=self.get_env())) child = WorkerStatus(subprocess.Popen(cmdline,
env=self.get_env(), cwd=getattr(sys, 'frozen_path', None)))
atexit.register(self.cleanup_child_linux, child) atexit.register(self.cleanup_child_linux, child)
return child return child
@ -607,7 +611,7 @@ class BufferedSender(object):
self.wbuf.append(msg) self.wbuf.append(msg)
def send(self): def send(self):
if select([self.socket], [], [], 0)[0]: if callable(select) and select([self.socket], [], [], 0)[0]:
msg = read(self.socket) msg = read(self.socket)
if msg == 'PING:': if msg == 'PING:':
write(self.socket, 'OK') write(self.socket, 'OK')

View File

@ -0,0 +1,117 @@
#!/usr/bin/env python
__license__ = 'GPL v3'
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
__docformat__ = 'restructuredtext en'
'''
Plugin to make the commit command automatically close bugs when the commit
message contains `Fix #number` or `Implement #number`. Also updates the commit
message with the summary of the closed bug. It also set the `--fixes` metadata
appropriately. Currently only works with a Trac bug repository with the XMLRPC
plugin enabled.
To use copy this file into `~/.bazaar/plugins` and add the following to branch.conf
in the working tree you want to use it with::
trac_reponame_url = <url>
trac_reponame_username = <username>
trac_reponame_password = <password>
'''
import os, re, xmlrpclib
from bzrlib.builtins import cmd_commit as _cmd_commit, tree_files
from bzrlib import branch
import bzrlib
class cmd_commit(_cmd_commit):
@classmethod
def trac_url(self, username, password, url):
return url.replace('//', '//%s:%s@'%(username, password))+'/login/xmlrpc'
def get_trac_summary(self, bug, url):
print 'Getting bug summary for bug #%s'%bug
server = xmlrpclib.ServerProxy(url)
try:
attributes = server.ticket.get(int(bug))[-1]
return attributes['summary']
except:
raise
pass
def expand_bug(self, msg, nick, config, bug_tracker, type='trac'):
prefix = '%s_%s_'%(type, nick)
username = config.get_user_option(prefix+'username')
password = config.get_user_option(prefix+'password')
close_bug = config.get_user_option(prefix+'pattern')
if close_bug is None:
close_bug = r'(Fix|Implement)\s+#(\d+)'
close_bug_pat = re.compile(close_bug, re.IGNORECASE)
match = close_bug_pat.search(msg)
if not match:
return msg, None, None, None
action, bug = match.group(1), match.group(2)
summary = ''
if type == 'trac':
url = self.trac_url(username, password, bug_tracker)
summary = self.get_trac_summary(bug, url)
if summary:
msg = msg.replace('#%s'%bug, '#%s (%s)'%(bug, summary))
return msg, bug, url, action
def get_bugtracker(self, basedir, type='trac'):
config = os.path.join(basedir, '.bzr', 'branch', 'branch.conf')
bugtracker, nick = None, None
if os.access(config, os.R_OK):
for line in open(config).readlines():
match = re.search(r'%s_(\S+)_url\s*=\s*(\S+)'%type, line)
if match:
nick, bugtracker = match.group(1), match.group(2)
break
return nick, bugtracker
def expand_message(self, msg, tree):
nick, bugtracker = self.get_bugtracker(tree.basedir, type='trac')
if not bugtracker:
return msg
config = branch.Branch.open(tree.basedir).get_config()
msg, bug, url, action = self.expand_bug(msg, nick, config, bugtracker)
return msg, bug, url, action, nick, config
def run(self, message=None, file=None, verbose=False, selected_list=None,
unchanged=False, strict=False, local=False, fixes=None,
author=None, show_diff=False):
if message:
message, bug, url, action, nick, config = \
self.expand_message(message, tree_files(selected_list)[0])
if nick and bug and not fixes:
fixes = [nick+':'+bug]
ret = _cmd_commit.run(self, message=message, file=file, verbose=verbose,
selected_list=selected_list, unchanged=unchanged,
strict=strict, local=local, fixes=fixes,
author=author, show_diff=show_diff)
if message and bug and action and nick and config:
self.close_bug(bug, action, url, config)
return ret
def close_bug(self, bug, action, url, config):
print 'Closing bug #%s'% bug
nick = config.get_nickname()
suffix = config.get_user_option('bug_close_comment')
if suffix is None:
suffix = 'The fix will be in the next release.'
action = action+'ed'
msg = '%s in branch %s. %s'%(action, nick, suffix)
server = xmlrpclib.ServerProxy(url)
server.ticket.update(int(bug), msg,
{'status':'closed', 'resolution':'fixed'},
False)
bzrlib.commands.register_command(cmd_commit)

View File

@ -1,7 +1,7 @@
''' '''
Trac Macro to generate an end use Changelog from the svn logs. Trac Macro to generate an end use Changelog from the svn logs.
''' '''
import re, collections import re, collections, time
from bzrlib import log as blog, branch from bzrlib import log as blog, branch
@ -33,7 +33,8 @@ class ChangelogFormatter(blog.LogFormatter):
if match: if match:
if self.current_entry is not None: if self.current_entry is not None:
self.entries.append((self.current_entry, set(self.messages))) self.entries.append((self.current_entry, set(self.messages)))
self.current_entry = match.group(1) timestamp = r.rev.timezone + r.rev.timestamp
self.current_entry = match.group(1) + time.strftime(' (%d %b, %Y)', time.gmtime(timestamp))
self.messages = collections.deque() self.messages = collections.deque()
else: else:

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -128,30 +128,49 @@ lib.FcConfigParseAndLoad.restype = c_int
lib.FcConfigBuildFonts.argtypes = [c_void_p] lib.FcConfigBuildFonts.argtypes = [c_void_p]
lib.FcConfigBuildFonts.restype = c_int lib.FcConfigBuildFonts.restype = c_int
_init_error = None
# Initialize the fontconfig library. This has to be done manually _initialized = False
# for the OS X bundle as it has its own private fontconfig. from threading import Timer
if hasattr(sys, 'frameworks_dir'): def _do_init():
# Initialize the fontconfig library. This has to be done manually
# for the OS X bundle as it may have its own private fontconfig.
if hasattr(sys, 'frameworks_dir'):
config_dir = os.path.join(os.path.dirname(getattr(sys, 'frameworks_dir')), 'Resources', 'fonts') config_dir = os.path.join(os.path.dirname(getattr(sys, 'frameworks_dir')), 'Resources', 'fonts')
if isinstance(config_dir, unicode): if isinstance(config_dir, unicode):
config_dir = config_dir.encode(sys.getfilesystemencoding()) config_dir = config_dir.encode(sys.getfilesystemencoding())
config = lib.FcConfigCreate() config = lib.FcConfigCreate()
if not lib.FcConfigParseAndLoad(config, os.path.join(config_dir, 'fonts.conf'), 1): if not lib.FcConfigParseAndLoad(config, os.path.join(config_dir, 'fonts.conf'), 1):
raise RuntimeError('Could not parse the fontconfig configuration') _init_error = 'Could not parse the fontconfig configuration'
return
if not lib.FcConfigBuildFonts(config): if not lib.FcConfigBuildFonts(config):
raise RuntimeError('Could not build fonts') _init_error = 'Could not build fonts'
return
if not lib.FcConfigSetCurrent(config): if not lib.FcConfigSetCurrent(config):
raise RuntimeError('Could not set font config') _init_error = 'Could not set font config'
elif not lib.FcInit(): return
raise RuntimeError(_('Could not initialize the fontconfig library')) elif not lib.FcInit():
_init_error = _('Could not initialize the fontconfig library')
return
global _initialized
_initialized = True
def find_font_families(allowed_extensions=['ttf']):
_init_timer = Timer(0.1, _do_init)
_init_timer.start()
def join():
_init_timer.join()
if _init_error is not None:
raise RuntimeError(_init_error)
def find_font_families(allowed_extensions=['ttf', 'otf']):
''' '''
Return an alphabetically sorted list of font families available on the system. Return an alphabetically sorted list of font families available on the system.
`allowed_extensions`: A list of allowed extensions for font file types. Defaults to `allowed_extensions`: A list of allowed extensions for font file types. Defaults to
`['ttf']`. If it is empty, it is ignored. `['ttf', 'otf']`. If it is empty, it is ignored.
''' '''
join()
allowed_extensions = [i.lower() for i in allowed_extensions] allowed_extensions = [i.lower() for i in allowed_extensions]
empty_pattern = lib.FcPatternCreate() empty_pattern = lib.FcPatternCreate()
@ -193,6 +212,7 @@ def files_for_family(family, normalize=True):
they are a tuple (slant, weight) otherwise they are strings from the set they are a tuple (slant, weight) otherwise they are strings from the set
`('normal', 'bold', 'italic', 'bi', 'light', 'li')` `('normal', 'bold', 'italic', 'bi', 'light', 'li')`
''' '''
join()
if isinstance(family, unicode): if isinstance(family, unicode):
family = family.encode(preferred_encoding) family = family.encode(preferred_encoding)
family_pattern = lib.FcPatternBuild(0, 'family', FcTypeString, family, 0) family_pattern = lib.FcPatternBuild(0, 'family', FcTypeString, family, 0)
@ -268,6 +288,7 @@ def match(name, sort=False, verbose=False):
decreasing closeness of matching. decreasing closeness of matching.
`verbose`: If `True` print debugging information to stdout `verbose`: If `True` print debugging information to stdout
''' '''
join()
if isinstance(name, unicode): if isinstance(name, unicode):
name = name.encode(preferred_encoding) name = name.encode(preferred_encoding)
pat = lib.FcNameParse(name) pat = lib.FcNameParse(name)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,523 @@
#!/usr/bin/env python
# encoding: utf-8
__license__ = 'GPL v3'
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
__docformat__ = 'restructuredtext en'
'''
A parser for search queries with a syntax very similar to that used by
the Google search engine.
For details on the search query syntax see :class:`SearchQueryParser`.
To use the parser, subclass :class:`SearchQueryParser` and implement the
methods :method:`SearchQueryParser.universal_set` and
:method:`SearchQueryParser.get_matches`. See for example :class:`Tester`.
If this module is run, it will perform a series of unit tests.
'''
import sys, string, operator
from calibre.utils.pyparsing import Keyword, Group, Forward, CharsNotIn, Suppress, \
OneOrMore, oneOf, CaselessLiteral, Optional, NoMatch
class SearchQueryParser(object):
'''
Parses a search query.
A search query consists of tokens. The tokens can be combined using
the `or`, `and` and `not` operators as well as grouped using parentheses.
When no operator is specified between two tokens, `and` is assumed.
Each token is a string of the form `location:query`. `location` is a string
from :member:`LOCATIONS`. It is optional. If it is omitted, it is assumed to
be `all`. `query` is an arbitrary string that must not contain parentheses.
If it contains whitespace, it should be quoted by enclosing it in `"` marks.
Examples::
* `Asimov` [search for the string "Asimov" in location `all`]
* `comments:"This is a good book"` [search for "This is a good book" in `comments`]
* `author:Asimov tag:unread` [search for books by Asimov that have been tagged as unread]
* `author:Asimov or author:Hardy` [search for books by Asimov or Hardy]
* `(author:Asimov or author:Hardy) and not tag:read` [search for unread books by Asimov or Hardy]
'''
LOCATIONS = [
'tag',
'title',
'author',
'publisher',
'series',
'comments',
'format',
'all',
]
@staticmethod
def run_tests(parser, result, tests):
failed = []
for test in tests:
print '\tTesting:', test[0],
res = parser.parseString(test[0])
if list(res.get(result, None)) == test[1]:
print 'OK'
else:
print 'FAILED:', 'Expected:', test[1], 'Got:', list(res.get(result, None))
failed.append(test[0])
return failed
def __init__(self, test=False):
self._tests_failed = False
# Define a token
locations = map(lambda x : CaselessLiteral(x)+Suppress(':'),
self.LOCATIONS)
location = NoMatch()
for l in locations:
location |= l
location = Optional(location, default='all')
word_query = CharsNotIn(string.whitespace + '()')
quoted_query = Suppress('"')+CharsNotIn('"')+Suppress('"')
query = quoted_query | word_query
Token = Group(location + query).setResultsName('token')
if test:
print 'Testing Token parser:'
failed = SearchQueryParser.run_tests(Token, 'token',
(
('tag:asd', ['tag', 'asd']),
('ddsä', ['all', 'ddsä']),
('"one two"', ['all', 'one two']),
('title:"one two"', ['title', 'one two']),
)
)
Or = Forward()
Parenthesis = Group(
Suppress('(') + Or + Suppress(')')
).setResultsName('parenthesis') | Token
Not = Forward()
Not << (Group(
Suppress(Keyword("not", caseless=True)) + Not
).setResultsName("not") | Parenthesis)
And = Forward()
And << (Group(
Not + Suppress(Keyword("and", caseless=True)) + And
).setResultsName("and") | Group(
Not + OneOrMore(~oneOf("and or") + And)
).setResultsName("and") | Not)
Or << (Group(
And + Suppress(Keyword("or", caseless=True)) + Or
).setResultsName("or") | And)
if test:
Or.validate()
self._tests_failed = bool(failed)
self._parser = Or
#self._parser.setDebug(True)
self.parse('(tolstoy)')
self._parser.setDebug(False)
def parse(self, query):
res = self._parser.parseString(query)[0]
return self.evaluate(res)
def method(self, group_name):
return getattr(self, 'evaluate_'+group_name)
def evaluate(self, parse_result):
return self.method(parse_result.getName())(parse_result)
def evaluate_and(self, argument):
return self.evaluate(argument[0]).intersection(self.evaluate(argument[1]))
def evaluate_or(self, argument):
return self.evaluate(argument[0]).union(self.evaluate(argument[1]))
def evaluate_not(self, argument):
return self.universal_set().difference(self.evaluate(argument[0]))
def evaluate_parenthesis(self, argument):
return self.evaluate(argument[0])
def evaluate_token(self, argument):
return self.get_matches(argument[0], argument[1])
def get_matches(self, location, query):
'''
Should return the set of matches for :param:'location` and :param:`query`.
:param:`location` is one of the items in :member:`SearchQueryParser.LOCATIONS`.
:param:`query` is a string literal.
'''
return set([])
def universal_set(self):
'''
Should return the set of all matches.
'''
return set([])
class Tester(SearchQueryParser):
texts = {
1: [u'Eugenie Grandet', u'Honor\xe9 de Balzac', u'manybooks.net', u'lrf'],
2: [u'Fanny Hill', u'John Cleland', u'manybooks.net', u'lrf'],
3: [u'Persuasion', u'Jane Austen', u'manybooks.net', u'lrf'],
4: [u'Psmith, Journalist', u'P. G. Wodehouse', u'Some Publisher', u'lrf'],
5: [u'The Complete Works of William Shakespeare',
u'William Shakespeare',
u'manybooks.net',
u'lrf'],
6: [u'The History of England, Volume I',
u'David Hume',
u'manybooks.net',
u'lrf'],
7: [u'Someone Comes to Town, Someone Leaves Town',
u'Cory Doctorow',
u'Tor Books',
u'lrf'],
8: [u'Stalky and Co.', u'Rudyard Kipling', u'manybooks.net', u'lrf'],
9: [u'A Game of Thrones', u'George R. R. Martin', None, u'lrf,rar'],
10: [u'A Clash of Kings', u'George R. R. Martin', None, u'lrf,rar'],
11: [u'A Storm of Swords', u'George R. R. Martin', None, u'lrf,rar'],
12: [u'Biggles - Pioneer Air Fighter', u'W. E. Johns', None, u'lrf,rtf'],
13: [u'Biggles of the Camel Squadron',
u'W. E. Johns',
u'London:Thames, (1977)',
u'lrf,rtf'],
14: [u'A Feast for Crows', u'George R. R. Martin', None, u'lrf,rar'],
15: [u'Cryptonomicon', u'Neal Stephenson', None, u'lrf,rar'],
16: [u'Quicksilver', u'Neal Stephenson', None, u'lrf,zip'],
17: [u'The Comedies of William Shakespeare',
u'William Shakespeare',
None,
u'lrf'],
18: [u'The Histories of William Shakespeare',
u'William Shakespeare',
None,
u'lrf'],
19: [u'The Tragedies of William Shakespeare',
u'William Shakespeare',
None,
u'lrf'],
20: [u'An Ideal Husband', u'Oscar Wilde', u'manybooks.net', u'lrf'],
21: [u'Flight of the Nighthawks', u'Raymond E. Feist', None, u'lrf,rar'],
22: [u'Into a Dark Realm', u'Raymond E. Feist', None, u'lrf,rar'],
23: [u'The Sundering', u'Walter Jon Williams', None, u'lrf,rar'],
24: [u'The Praxis', u'Walter Jon Williams', None, u'lrf,rar'],
25: [u'Conventions of War', u'Walter Jon Williams', None, u'lrf,rar'],
26: [u'Banewreaker', u'Jacqueline Carey', None, u'lrf,rar'],
27: [u'Godslayer', u'Jacqueline Carey', None, u'lrf,rar'],
28: [u"Kushiel's Scion", u'Jacqueline Carey', None, u'lrf,rar'],
29: [u'Underworld', u'Don DeLillo', None, u'lrf,rar'],
30: [u'Genghis Khan and The Making of the Modern World',
u'Jack Weatherford',
u'Three Rivers Press',
u'lrf,zip'],
31: [u'The Best and the Brightest',
u'David Halberstam',
u'Modern Library',
u'lrf,zip'],
32: [u'The Killer Angels', u'Michael Shaara', None, u'html,lrf'],
33: [u'Band Of Brothers', u'Stephen E Ambrose', None, u'lrf,txt'],
34: [u'The Gates of Rome', u'Conn Iggulden', None, u'lrf,rar'],
35: [u'The Death of Kings', u'Conn Iggulden', u'Bantam Dell', u'lit,lrf'],
36: [u'The Field of Swords', u'Conn Iggulden', None, u'lrf,rar'],
37: [u'Masterman Ready', u'Marryat, Captain Frederick', None, u'lrf'],
38: [u'With the Lightnings',
u'David Drake',
u'Baen Publishing Enterprises',
u'lit,lrf'],
39: [u'Lt. Leary, Commanding',
u'David Drake',
u'Baen Publishing Enterprises',
u'lit,lrf'],
40: [u'The Far Side of The Stars',
u'David Drake',
u'Baen Publishing Enterprises',
u'lrf,rar'],
41: [u'The Way to Glory',
u'David Drake',
u'Baen Publishing Enterprises',
u'lrf,rar'],
42: [u'Some Golden Harbor', u'David Drake', u'Baen Books', u'lrf,rar'],
43: [u'Harry Potter And The Half-Blood Prince',
u'J. K. Rowling',
None,
u'lrf,rar'],
44: [u'Harry Potter and the Order of the Phoenix',
u'J. K. Rowling',
None,
u'lrf,rtf'],
45: [u'The Stars at War', u'David Weber , Steve White', None, u'lrf,rtf'],
46: [u'The Stars at War II',
u'Steve White',
u'Baen Publishing Enterprises',
u'lrf,rar'],
47: [u'Exodus', u'Steve White,Shirley Meier', u'Baen Books', u'lrf,rar'],
48: [u'Harry Potter and the Goblet of Fire',
u'J. K. Rowling',
None,
u'lrf,rar'],
49: [u'Harry Potter and the Prisoner of Azkaban',
u'J. K. Rowling',
None,
u'lrf,rtf'],
50: [u'Harry Potter and the Chamber of Secrets',
u'J. K. Rowling',
None,
u'lit,lrf'],
51: [u'Harry Potter and the Deathly Hallows',
u'J.K. Rowling',
None,
u'lit,lrf,pdf'],
52: [u"His Majesty's Dragon", u'Naomi Novik', None, u'lrf,rar'],
53: [u'Throne of Jade', u'Naomi Novik', u'Del Rey', u'lit,lrf'],
54: [u'Black Powder War', u'Naomi Novik', u'Del Rey', u'lrf,rar'],
55: [u'War and Peace', u'Leo Tolstoy', u'gutenberg.org', u'lrf,txt'],
56: [u'Anna Karenina', u'Leo Tolstoy', u'gutenberg.org', u'lrf,txt'],
57: [u'A Shorter History of Rome',
u'Eugene Lawrence,Sir William Smith',
u'gutenberg.org',
u'lrf,zip'],
58: [u'The Name of the Rose', u'Umberto Eco', None, u'lrf,rar'],
71: [u"Wind Rider's Oath", u'David Weber', u'Baen', u'lrf'],
74: [u'Rally Cry', u'William R Forstchen', None, u'htm,lrf'],
86: [u'Empire of Ivory', u'Naomi Novik', None, u'lrf,rar'],
87: [u"Renegade's Magic", u'Robin Hobb', None, u'lrf,rar'],
89: [u'Master and commander',
u"Patrick O'Brian",
u'Fontana,\n1971',
u'lit,lrf'],
91: [u'A Companion to Wolves',
u'Sarah Monette,Elizabeth Beär',
None,
u'lrf,rar'],
92: [u'The Lions of al-Rassan', u'Guy Gavriel Kay', u'Eos', u'lit,lrf'],
93: [u'Gardens of the Moon', u'Steven Erikson', u'Tor Fantasy', u'lit,lrf'],
95: [u'The Master and Margarita',
u'Mikhail Bulgakov',
u'N.Y. : Knopf, 1992.',
u'lrf,rtf'],
120: [u'Deadhouse Gates',
u'Steven Erikson',
u'London : Bantam Books, 2001.',
u'lit,lrf'],
121: [u'Memories of Ice', u'Steven Erikson', u'Bantam Books', u'lit,lrf'],
123: [u'House of Chains', u'Steven Erikson', u'Bantam Books', u'lit,lrf'],
125: [u'Midnight Tides', u'Steven Erikson', u'Bantam Books', u'lit,lrf'],
126: [u'The Bonehunters', u'Steven Erikson', u'Bantam Press', u'lit,lrf'],
129: [u'Guns, germs, and steel: the fates of human societies',
u'Jared Diamond',
u'New York : W.W. Norton, c1997.',
u'lit,lrf'],
136: [u'Wildcards', u'George R. R. Martin', None, u'html,lrf'],
138: [u'Off Armageddon Reef', u'David Weber', u'Tor Books', u'lit,lrf'],
144: [u'Atonement',
u'Ian McEwan',
u'New York : Nan A. Talese/Doubleday, 2002.',
u'lrf,rar'],
146: [u'1632', u'Eric Flint', u'Baen Books', u'lit,lrf'],
147: [u'1633', u'David Weber,Eric Flint,Dru Blair', u'Baen', u'lit,lrf'],
148: [u'1634: The Baltic War',
u'David Weber,Eric Flint',
u'Baen',
u'lit,lrf'],
150: [u'The Dragonbone Chair', u'Tad Williams', u'DAW Trade', u'lrf,rtf'],
152: [u'The Little Book That Beats the Market',
u'Joel Greenblatt',
u'Wiley',
u'epub,lrf'],
153: [u'Pride of Carthage', u'David Anthony Durham', u'Anchor', u'lit,lrf'],
154: [u'Stone of farewell',
u'Tad Williams',
u'New York : DAW Books, 1990.',
u'lrf,txt'],
166: [u'American Gods', u'Neil Gaiman', u'HarperTorch', u'lit,lrf'],
176: [u'Pillars of the Earth',
u'Ken Follett',
u'New American Library',
u'lit,lrf'],
182: [u'The Eye of the world',
u'Robert Jordan',
u'New York : T. Doherty Associates, c1990.',
u'lit,lrf'],
188: [u'The Great Hunt', u'Robert Jordan', u'ATOM', u'lrf,zip'],
189: [u'The Dragon Reborn', u'Robert Jordan', None, u'lit,lrf'],
190: [u'The Shadow Rising', u'Robert Jordan', None, u'lit,lrf'],
191: [u'The Fires of Heaven',
u'Robert Jordan',
u'Time Warner Books Uk',
u'lit,lrf'],
216: [u'Lord of chaos',
u'Robert Jordan',
u'New York : TOR, c1994.',
u'lit,lrf'],
217: [u'A Crown of Swords', u'Robert Jordan', None, u'lit,lrf'],
236: [u'The Path of Daggers', u'Robert Jordan', None, u'lit,lrf'],
238: [u'The Client',
u'John Grisham',
u'New York : Island, 1994, c1993.',
u'lit,lrf'],
240: [u"Winter's Heart", u'Robert Jordan', None, u'lit,lrf'],
242: [u'In the Beginning was the Command Line',
u'Neal Stephenson',
None,
u'lrf,txt'],
249: [u'Crossroads of Twilight', u'Robert Jordan', None, u'lit,lrf'],
251: [u'Caves of Steel', u'Isaac Asimov', u'Del Rey', u'lrf,zip'],
253: [u"Hunter's Run",
u'George R. R. Martin,Gardner Dozois,Daniel Abraham',
u'Eos',
u'lrf,rar'],
257: [u'Knife of Dreams', u'Robert Jordan', None, u'lit,lrf'],
258: [u'Saturday',
u'Ian McEwan',
u'London : Jonathan Cape, 2005.',
u'lrf,txt'],
259: [u'My name is Red',
u'Orhan Pamuk; translated from the Turkish by Erda\u011f G\xf6knar',
u'New York : Alfred A. Knopf, 2001.',
u'lit,lrf'],
265: [u'Harbinger', u'David Mack', u'Star Trek', u'lit,lrf'],
267: [u'Summon the Thunder',
u'Dayton Ward,Kevin Dilmore',
u'Pocket Books',
u'lit,lrf'],
268: [u'Shalimar the Clown',
u'Salman Rushdie',
u'New York : Random House, 2005.',
u'lit,lrf'],
269: [u'Reap the Whirlwind', u'David Mack', u'Star Trek', u'lit,lrf'],
272: [u'Mistborn', u'Brandon Sanderson', u'Tor Fantasy', u'lrf,rar'],
273: [u'The Thousandfold Thought',
u'R. Scott Bakker',
u'Overlook TP',
u'lrf,rtf'],
276: [u'Elantris',
u'Brandon Sanderson',
u'New York : Tor, 2005.',
u'lrf,rar'],
291: [u'Sundiver',
u'David Brin',
u'New York : Bantam Books, 1995.',
u'lit,lrf'],
299: [u'Imperium', u'Robert Harris', u'Arrow', u'lrf,rar'],
300: [u'Startide Rising', u'David Brin', u'Bantam', u'htm,lrf'],
301: [u'The Uplift War', u'David Brin', u'Spectra', u'lit,lrf'],
304: [u'Brightness Reef', u'David Brin', u'Orbit', u'lrf,rar'],
305: [u"Infinity's Shore", u'David Brin', u'Spectra', u'txt'],
306: [u"Heaven's Reach", u'David Brin', u'Spectra', u'lrf,rar'],
325: [u"Foundation's Triumph", u'David Brin', u'Easton Press', u'lit,lrf'],
327: [u'I am Charlotte Simmons', u'Tom Wolfe', u'Vintage', u'htm,lrf'],
335: [u'The Currents of Space', u'Isaac Asimov', None, u'lit,lrf'],
340: [u'The Other Boleyn Girl',
u'Philippa Gregory',
u'Touchstone',
u'lit,lrf'],
341: [u"Old Man's War", u'John Scalzi', u'Tor', u'htm,lrf'],
342: [u'The Ghost Brigades',
u'John Scalzi',
u'Tor Science Fiction',
u'html,lrf'],
343: [u'The Last Colony', u'John Scalzi', u'Tor Books', u'html,lrf'],
344: [u'Gossip Girl', u'Cecily von Ziegesar', u'Warner Books', u'lrf,rtf'],
347: [u'Little Brother', u'Cory Doctorow', u'Tor Teen', u'lrf'],
348: [u'The Reality Dysfunction',
u'Peter F. Hamilton',
u'Pan MacMillan',
u'lit,lrf'],
353: [u'A Thousand Splendid Suns',
u'Khaled Hosseini',
u'Center Point Large Print',
u'lit,lrf'],
354: [u'Amsterdam', u'Ian McEwan', u'Anchor', u'lrf,txt'],
355: [u'The Neutronium Alchemist',
u'Peter F. Hamilton',
u'Aspect',
u'lit,lrf'],
356: [u'The Naked God', u'Peter F. Hamilton', u'Aspect', u'lit,lrf'],
421: [u'A Shadow in Summer', u'Daniel Abraham', u'Tor Fantasy', u'lrf,rar'],
427: [u'Lonesome Dove', u'Larry McMurtry', None, u'lit,lrf'],
440: [u'Ghost', u'John Ringo', u'Baen', u'lit,lrf'],
441: [u'Kildar', u'John Ringo', u'Baen', u'lit,lrf'],
443: [u'Hidden Empire ', u'Kevin J. Anderson', u'Aspect', u'lrf,rar'],
444: [u'The Gun Seller',
u'Hugh Laurie',
u'Washington Square Press',
u'lrf,rar']
}
tests = {
'Dysfunction' : set([348]),
'title:Dysfunction' : set([348]),
'title:Dysfunction or author:Laurie': set([348, 444]),
'(tag:txt or tag:pdf)': set([33, 258, 354, 305, 242, 51, 55, 56, 154]),
'(tag:txt or tag:pdf) and author:Tolstoy': set([55, 56]),
'Tolstoy txt': set([55, 56]),
'Hamilton Amsterdam' : set([]),
u'Beär' : set([91]),
'dysfunc or tolstoy': set([348, 55, 56]),
'tag:txt and not tolstoy': set([33, 258, 354, 305, 242, 154]),
'not tag:lrf' : set([305]),
'london:thames': set([13]),
'publisher:london:thames': set([13]),
'"(1977)"': set([13]),
}
fields = {'title':0, 'author':1, 'publisher':2, 'tag':3}
_universal_set = set(texts.keys())
def universal_set(self):
return self._universal_set
def get_matches(self, location, query):
location = location.lower()
if location in self.fields.keys():
getter = operator.itemgetter(self.fields[location])
elif location == 'all':
getter = lambda y: ''.join(x if x else '' for x in y)
else:
getter = lambda x: ''
if not query:
return set([])
query = query.lower()
return set(key for key, val in self.texts.items() \
if query and query in getattr(getter(val), 'lower', lambda : '')())
def run_tests(self):
failed = []
for query in self.tests.keys():
print 'Testing query:', query,
res = self.parse(query)
if res != self.tests[query]:
print 'FAILED', 'Expected:', self.tests[query], 'Got:', res
failed.append(query)
else:
print 'OK'
return failed
def main(args=sys.argv):
tester = Tester(test=True)
failed = tester.run_tests()
if tester._tests_failed or failed:
print '>>>>>>>>>>>>>> Tests Failed <<<<<<<<<<<<<<<'
return 1
return 0
if __name__ == '__main__':
sys.exit(main())

View File

@ -0,0 +1,976 @@
"""
Read and write ZIP files. Modified by Kovid Goyal to allow replacing of files
in the ZIP archive.
"""
import struct, os, time, sys
import binascii, cStringIO
try:
import zlib # We may need its compression method
except ImportError:
zlib = None
__all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile",
"ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile" ]
class BadZipfile(Exception):
pass
class LargeZipFile(Exception):
"""
Raised when writing a zipfile, the zipfile requires ZIP64 extensions
and those extensions are disabled.
"""
error = BadZipfile # The exception raised by this module
ZIP64_LIMIT= (1 << 31) - 1
# constants for Zip file compression methods
ZIP_STORED = 0
ZIP_DEFLATED = 8
# Other ZIP compression methods not supported
# Here are some struct module formats for reading headers
structEndArchive = "<4s4H2LH" # 9 items, end of archive, 22 bytes
stringEndArchive = "PK\005\006" # magic number for end of archive record
structCentralDir = "<4s4B4HlLL5HLL"# 19 items, central directory, 46 bytes
stringCentralDir = "PK\001\002" # magic number for central directory
structFileHeader = "<4s2B4HlLL2H" # 12 items, file header record, 30 bytes
stringFileHeader = "PK\003\004" # magic number for file header
structEndArchive64Locator = "<4slql" # 4 items, locate Zip64 header, 20 bytes
stringEndArchive64Locator = "PK\x06\x07" # magic token for locator header
structEndArchive64 = "<4sqhhllqqqq" # 10 items, end of archive (Zip64), 56 bytes
stringEndArchive64 = "PK\x06\x06" # magic token for Zip64 header
# indexes of entries in the central directory structure
_CD_SIGNATURE = 0
_CD_CREATE_VERSION = 1
_CD_CREATE_SYSTEM = 2
_CD_EXTRACT_VERSION = 3
_CD_EXTRACT_SYSTEM = 4 # is this meaningful?
_CD_FLAG_BITS = 5
_CD_COMPRESS_TYPE = 6
_CD_TIME = 7
_CD_DATE = 8
_CD_CRC = 9
_CD_COMPRESSED_SIZE = 10
_CD_UNCOMPRESSED_SIZE = 11
_CD_FILENAME_LENGTH = 12
_CD_EXTRA_FIELD_LENGTH = 13
_CD_COMMENT_LENGTH = 14
_CD_DISK_NUMBER_START = 15
_CD_INTERNAL_FILE_ATTRIBUTES = 16
_CD_EXTERNAL_FILE_ATTRIBUTES = 17
_CD_LOCAL_HEADER_OFFSET = 18
# indexes of entries in the local file header structure
_FH_SIGNATURE = 0
_FH_EXTRACT_VERSION = 1
_FH_EXTRACT_SYSTEM = 2 # is this meaningful?
_FH_GENERAL_PURPOSE_FLAG_BITS = 3
_FH_COMPRESSION_METHOD = 4
_FH_LAST_MOD_TIME = 5
_FH_LAST_MOD_DATE = 6
_FH_CRC = 7
_FH_COMPRESSED_SIZE = 8
_FH_UNCOMPRESSED_SIZE = 9
_FH_FILENAME_LENGTH = 10
_FH_EXTRA_FIELD_LENGTH = 11
def is_zipfile(filename):
"""Quickly see if file is a ZIP file by checking the magic number."""
try:
fpin = open(filename, "rb")
endrec = _EndRecData(fpin)
fpin.close()
if endrec:
return True # file has correct magic number
except IOError:
pass
return False
def _EndRecData64(fpin, offset, endrec):
"""
Read the ZIP64 end-of-archive records and use that to update endrec
"""
locatorSize = struct.calcsize(structEndArchive64Locator)
fpin.seek(offset - locatorSize, 2)
data = fpin.read(locatorSize)
sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data)
if sig != stringEndArchive64Locator:
return endrec
if diskno != 0 or disks != 1:
raise BadZipfile("zipfiles that span multiple disks are not supported")
# Assume no 'zip64 extensible data'
endArchiveSize = struct.calcsize(structEndArchive64)
fpin.seek(offset - locatorSize - endArchiveSize, 2)
data = fpin.read(endArchiveSize)
sig, sz, create_version, read_version, disk_num, disk_dir, \
dircount, dircount2, dirsize, diroffset = \
struct.unpack(structEndArchive64, data)
if sig != stringEndArchive64:
return endrec
# Update the original endrec using data from the ZIP64 record
endrec[1] = disk_num
endrec[2] = disk_dir
endrec[3] = dircount
endrec[4] = dircount2
endrec[5] = dirsize
endrec[6] = diroffset
return endrec
def _EndRecData(fpin):
"""Return data from the "End of Central Directory" record, or None.
The data is a list of the nine items in the ZIP "End of central dir"
record followed by a tenth item, the file seek offset of this record."""
fpin.seek(-22, 2) # Assume no archive comment.
filesize = fpin.tell() + 22 # Get file size
data = fpin.read()
if data[0:4] == stringEndArchive and data[-2:] == "\000\000":
endrec = struct.unpack(structEndArchive, data)
endrec = list(endrec)
endrec.append("") # Append the archive comment
endrec.append(filesize - 22) # Append the record start offset
if endrec[-4] == -1 or endrec[-4] == 0xffffffff:
return _EndRecData64(fpin, -22, endrec)
return endrec
# Search the last END_BLOCK bytes of the file for the record signature.
# The comment is appended to the ZIP file and has a 16 bit length.
# So the comment may be up to 64K long. We limit the search for the
# signature to a few Kbytes at the end of the file for efficiency.
# also, the signature must not appear in the comment.
END_BLOCK = min(filesize, 1024 * 4)
fpin.seek(filesize - END_BLOCK, 0)
data = fpin.read()
start = data.rfind(stringEndArchive)
if start >= 0: # Correct signature string was found
endrec = struct.unpack(structEndArchive, data[start:start+22])
endrec = list(endrec)
comment = data[start+22:]
if endrec[7] == len(comment): # Comment length checks out
# Append the archive comment and start offset
endrec.append(comment)
endrec.append(filesize - END_BLOCK + start)
if endrec[-4] == -1 or endrec[-4] == 0xffffffff:
return _EndRecData64(fpin, - END_BLOCK + start, endrec)
return endrec
return # Error, return None
class ZipInfo (object):
"""Class with attributes describing each file in the ZIP archive."""
__slots__ = (
'orig_filename',
'filename',
'date_time',
'compress_type',
'comment',
'extra',
'create_system',
'create_version',
'extract_version',
'reserved',
'flag_bits',
'volume',
'internal_attr',
'external_attr',
'header_offset',
'CRC',
'compress_size',
'file_size',
'file_offset',
)
def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)):
self.orig_filename = filename # Original file name in archive
# Terminate the file name at the first null byte. Null bytes in file
# names are used as tricks by viruses in archives.
null_byte = filename.find(chr(0))
if null_byte >= 0:
filename = filename[0:null_byte]
# This is used to ensure paths in generated ZIP files always use
# forward slashes as the directory separator, as required by the
# ZIP format specification.
if os.sep != "/" and os.sep in filename:
filename = filename.replace(os.sep, "/")
self.filename = filename # Normalized file name
self.date_time = date_time # year, month, day, hour, min, sec
# Standard values:
self.compress_type = ZIP_STORED # Type of compression for the file
self.comment = "" # Comment for each file
self.extra = "" # ZIP extra data
if sys.platform == 'win32':
self.create_system = 0 # System which created ZIP archive
else:
# Assume everything else is unix-y
self.create_system = 3 # System which created ZIP archive
self.create_version = 20 # Version which created ZIP archive
self.extract_version = 20 # Version needed to extract archive
self.reserved = 0 # Must be zero
self.flag_bits = 0 # ZIP flag bits
self.volume = 0 # Volume number of file header
self.internal_attr = 0 # Internal attributes
self.external_attr = 0 # External file attributes
self.file_offset = 0
# Other attributes are set by class ZipFile:
# header_offset Byte offset to the file header
# CRC CRC-32 of the uncompressed file
# compress_size Size of the compressed file
# file_size Size of the uncompressed file
def FileHeader(self):
"""Return the per-file header as a string."""
dt = self.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
if self.flag_bits & 0x08:
# Set these to zero because we write them after the file data
CRC = compress_size = file_size = 0
else:
CRC = self.CRC
compress_size = self.compress_size
file_size = self.file_size
extra = self.extra
if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT:
# File is larger than what fits into a 4 byte integer,
# fall back to the ZIP64 extension
fmt = '<hhqq'
extra = extra + struct.pack(fmt,
1, struct.calcsize(fmt)-4, file_size, compress_size)
file_size = 0xffffffff # -1
compress_size = 0xffffffff # -1
self.extract_version = max(45, self.extract_version)
self.create_version = max(45, self.extract_version)
header = struct.pack(structFileHeader, stringFileHeader,
self.extract_version, self.reserved, self.flag_bits,
self.compress_type, dostime, dosdate, CRC,
compress_size, file_size,
len(self.filename), len(extra))
return header + self.filename + extra
def _decodeExtra(self):
# Try to decode the extra field.
extra = self.extra
unpack = struct.unpack
while extra:
tp, ln = unpack('<hh', extra[:4])
if tp == 1:
if ln >= 24:
counts = unpack('<qqq', extra[4:28])
elif ln == 16:
counts = unpack('<qq', extra[4:20])
elif ln == 8:
counts = unpack('<q', extra[4:12])
elif ln == 0:
counts = ()
else:
raise RuntimeError, "Corrupt extra field %s"%(ln,)
idx = 0
# ZIP64 extension (large files and/or large archives)
if self.file_size == -1 or self.file_size == 0xFFFFFFFFL:
self.file_size = counts[idx]
idx += 1
if self.compress_size == -1 or self.compress_size == 0xFFFFFFFFL:
self.compress_size = counts[idx]
idx += 1
if self.header_offset == -1 or self.header_offset == 0xffffffffL:
self.header_offset = counts[idx]
idx+=1
extra = extra[ln+4:]
class ZipFile:
""" Class with methods to open, read, write, close, list and update
zip files.
z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=True)
file: Either the path to the file, or a file-like object.
If it is a path, the file will be opened and closed by ZipFile.
mode: The mode can be either read "r", write "w" or append "a".
compression: ZIP_STORED (no compression) or ZIP_DEFLATED (requires zlib).
allowZip64: if True ZipFile will create files with ZIP64 extensions when
needed, otherwise it will raise an exception when this would
be necessary.
"""
fp = None # Set here since __del__ checks it
def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=False):
"""Open the ZIP file with mode read "r", write "w" or append "a"."""
self._allowZip64 = allowZip64
self._didModify = False
if compression == ZIP_STORED:
pass
elif compression == ZIP_DEFLATED:
if not zlib:
raise RuntimeError,\
"Compression requires the (missing) zlib module"
else:
raise RuntimeError, "That compression method is not supported"
self.debug = 0 # Level of printing: 0 through 3
self.NameToInfo = {} # Find file info given name
self.filelist = [] # List of ZipInfo instances for archive
self.compression = compression # Method of compression
self.mode = key = mode.replace('b', '')[0]
# Check if we were passed a file-like object
if isinstance(file, basestring):
self._filePassed = 0
self.filename = file
modeDict = {'r' : 'rb', 'w': 'wb', 'a' : 'r+b'}
self.fp = open(file, modeDict[mode])
else:
self._filePassed = 1
self.fp = file
self.filename = getattr(file, 'name', None)
if key == 'r':
self._GetContents()
elif key == 'w':
pass
elif key == 'a':
try: # See if file is a zip file
self._RealGetContents()
self._calculate_file_offsets()
# seek to start of directory and overwrite
self.fp.seek(self.start_dir, 0)
except BadZipfile: # file is not a zip file, just append
self.fp.seek(0, 2)
else:
if not self._filePassed:
self.fp.close()
self.fp = None
raise RuntimeError, 'Mode must be "r", "w" or "a"'
def _GetContents(self):
"""Read the directory, making sure we close the file if the format
is bad."""
try:
self._RealGetContents()
except BadZipfile:
if not self._filePassed:
self.fp.close()
self.fp = None
raise
def _RealGetContents(self):
"""Read in the table of contents for the ZIP file."""
fp = self.fp
endrec = _EndRecData(fp)
if not endrec:
raise BadZipfile, "File is not a zip file"
if self.debug > 1:
print endrec
size_cd = endrec[5] # bytes in central directory
offset_cd = endrec[6] # offset of central directory
self.comment = endrec[8] # archive comment
# endrec[9] is the offset of the "End of Central Dir" record
if endrec[9] > ZIP64_LIMIT:
x = endrec[9] - size_cd - 56 - 20
else:
x = endrec[9] - size_cd
# "concat" is zero, unless zip was concatenated to another file
concat = x - offset_cd
if self.debug > 2:
print "given, inferred, offset", offset_cd, x, concat
# self.start_dir: Position of start of central directory
self.start_dir = offset_cd + concat
fp.seek(self.start_dir, 0)
data = fp.read(size_cd)
fp = cStringIO.StringIO(data)
total = 0
while total < size_cd:
centdir = fp.read(46)
total = total + 46
if centdir[0:4] != stringCentralDir:
raise BadZipfile, "Bad magic number for central directory"
centdir = struct.unpack(structCentralDir, centdir)
if self.debug > 2:
print centdir
filename = fp.read(centdir[_CD_FILENAME_LENGTH])
# Create ZipInfo instance to store file information
x = ZipInfo(filename)
x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH])
x.comment = fp.read(centdir[_CD_COMMENT_LENGTH])
total = (total + centdir[_CD_FILENAME_LENGTH]
+ centdir[_CD_EXTRA_FIELD_LENGTH]
+ centdir[_CD_COMMENT_LENGTH])
x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET]
(x.create_version, x.create_system, x.extract_version, x.reserved,
x.flag_bits, x.compress_type, t, d,
x.CRC, x.compress_size, x.file_size) = centdir[1:12]
x.volume, x.internal_attr, x.external_attr = centdir[15:18]
# Convert date/time code to (year, month, day, hour, min, sec)
x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F,
t>>11, (t>>5)&0x3F, (t&0x1F) * 2 )
x._decodeExtra()
x.header_offset = x.header_offset + concat
self.filelist.append(x)
self.NameToInfo[x.filename] = x
if self.debug > 2:
print "total", total
def _calculate_file_offsets(self):
for zip_info in self.filelist:
self.fp.seek(zip_info.header_offset, 0)
fheader = self.fp.read(30)
if fheader[0:4] != stringFileHeader:
raise BadZipfile, "Bad magic number for file header"
fheader = struct.unpack(structFileHeader, fheader)
# file_offset is computed here, since the extra field for
# the central directory and for the local file header
# refer to different fields, and they can have different
# lengths
file_offset = (zip_info.header_offset + 30
+ fheader[_FH_FILENAME_LENGTH]
+ fheader[_FH_EXTRA_FIELD_LENGTH])
fname = self.fp.read(fheader[_FH_FILENAME_LENGTH])
if fname != zip_info.orig_filename:
raise RuntimeError, \
'File name in directory "%s" and header "%s" differ.' % (
zip_info.orig_filename, fname)
zip_info.file_offset = file_offset
def replace(self, filename, arcname=None, compress_type=None):
"""Delete arcname, and put the bytes from filename into the
archive under the name arcname."""
deleteName = arcname
if deleteName is None:
deleteName = filename
self.delete(deleteName)
self.write(filename, arcname, compress_type)
def replacestr(self, zinfo, bytes):
"""Delete zinfo.filename, and write a new file into the archive. The
contents is the string 'bytes'."""
self.delete(zinfo.filename)
self.writestr(zinfo, bytes)
def delete(self, name):
"""Delete the file from the archive. If it appears multiple
times only the first instance will be deleted."""
for i in range (0, len(self.filelist)):
if self.filelist[i].filename == name:
if self.debug:
print "Removing", name
deleted_offset = self.filelist[i].header_offset
deleted_size = (self.filelist[i].file_offset - self.filelist[i].header_offset) + self.filelist[i].compress_size
zinfo_size = struct.calcsize(structCentralDir) + len(self.filelist[i].filename) + len(self.filelist[i].extra)
# Remove the file's data from the archive.
current_offset = self.fp.tell()
self.fp.seek(0, 2)
archive_size = self.fp.tell()
self.fp.seek(deleted_offset + deleted_size)
buf = self.fp.read()
self.fp.seek(deleted_offset)
self.fp.write(buf)
self.fp.truncate(archive_size - deleted_size - zinfo_size)
if current_offset > deleted_offset + deleted_size:
current_offset -= deleted_size
elif current_offset > deleted_offset:
current_offset = deleted_offset
self.fp.seek(current_offset, 0)
# Remove file from central directory.
del self.filelist[i]
# Adjust the remaining offsets in the central directory.
for j in range (i, len(self.filelist)):
if self.filelist[j].header_offset > deleted_offset:
self.filelist[j].header_offset -= deleted_size
if self.filelist[j].file_offset > deleted_offset:
self.filelist[j].file_offset -= deleted_size
return
if self.debug:
print name, "not in archive"
def namelist(self):
"""Return a list of file names in the archive."""
l = []
for data in self.filelist:
l.append(data.filename)
return l
def infolist(self):
"""Return a list of class ZipInfo instances for files in the
archive."""
return self.filelist
def printdir(self):
"""Print a table of contents for the zip file."""
print "%-46s %19s %12s" % ("File Name", "Modified ", "Size")
for zinfo in self.filelist:
date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time[:6]
print "%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size)
def testzip(self):
"""Read all the files and check the CRC."""
for zinfo in self.filelist:
try:
self.read(zinfo.filename) # Check CRC-32
except BadZipfile:
return zinfo.filename
def getinfo(self, name):
"""Return the instance of ZipInfo given 'name'."""
return self.NameToInfo[name]
def read(self, name):
"""Return file bytes (as a string) for name."""
if self.mode not in ("r", "a"):
raise RuntimeError, 'read() requires mode "r" or "a"'
if not self.fp:
raise RuntimeError, \
"Attempt to read ZIP archive that was already closed"
zinfo = self.getinfo(name)
filepos = self.fp.tell()
self.fp.seek(zinfo.header_offset, 0)
# Skip the file header:
fheader = self.fp.read(30)
if fheader[0:4] != stringFileHeader:
raise BadZipfile, "Bad magic number for file header"
fheader = struct.unpack(structFileHeader, fheader)
fname = self.fp.read(fheader[_FH_FILENAME_LENGTH])
if fheader[_FH_EXTRA_FIELD_LENGTH]:
self.fp.read(fheader[_FH_EXTRA_FIELD_LENGTH])
if fname != zinfo.orig_filename:
raise BadZipfile, \
'File name in directory "%s" and header "%s" differ.' % (
zinfo.orig_filename, fname)
bytes = self.fp.read(zinfo.compress_size)
self.fp.seek(filepos, 0)
if zinfo.compress_type == ZIP_STORED:
pass
elif zinfo.compress_type == ZIP_DEFLATED:
if not zlib:
raise RuntimeError, \
"De-compression requires the (missing) zlib module"
# zlib compress/decompress code by Jeremy Hylton of CNRI
dc = zlib.decompressobj(-15)
bytes = dc.decompress(bytes)
# need to feed in unused pad byte so that zlib won't choke
ex = dc.decompress('Z') + dc.flush()
if ex:
bytes = bytes + ex
else:
raise BadZipfile, \
"Unsupported compression method %d for file %s" % \
(zinfo.compress_type, name)
crc = binascii.crc32(bytes)
if crc != zinfo.CRC:
raise BadZipfile, "Bad CRC-32 for file %s" % name
return bytes
def _writecheck(self, zinfo):
"""Check for errors before writing a file to the archive."""
if zinfo.filename in self.NameToInfo:
if self.debug: # Warning for duplicate names
print "Duplicate name:", zinfo.filename
if self.mode not in ("w", "a"):
raise RuntimeError, 'write() requires mode "w" or "a"'
if not self.fp:
raise RuntimeError, \
"Attempt to write ZIP archive that was already closed"
if zinfo.compress_type == ZIP_DEFLATED and not zlib:
raise RuntimeError, \
"Compression requires the (missing) zlib module"
if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED):
raise RuntimeError, \
"That compression method is not supported"
if zinfo.file_size > ZIP64_LIMIT:
if not self._allowZip64:
raise LargeZipFile("Filesize would require ZIP64 extensions")
if zinfo.header_offset > ZIP64_LIMIT:
if not self._allowZip64:
raise LargeZipFile("Zipfile size would require ZIP64 extensions")
def write(self, filename, arcname=None, compress_type=None):
"""Put the bytes from filename into the archive under the name
arcname."""
st = os.stat(filename)
mtime = time.localtime(st.st_mtime)
date_time = mtime[0:6]
# Create ZipInfo instance to store file information
if arcname is None:
arcname = filename
arcname = os.path.normpath(os.path.splitdrive(arcname)[1])
while arcname[0] in (os.sep, os.altsep):
arcname = arcname[1:]
zinfo = ZipInfo(arcname, date_time)
zinfo.external_attr = (st[0] & 0xFFFF) << 16L # Unix attributes
if compress_type is None:
zinfo.compress_type = self.compression
else:
zinfo.compress_type = compress_type
zinfo.file_size = st.st_size
zinfo.flag_bits = 0x00
zinfo.header_offset = self.fp.tell() # Start of header bytes
self._writecheck(zinfo)
self._didModify = True
fp = open(filename, "rb")
# Must overwrite CRC and sizes with correct data later
zinfo.CRC = CRC = 0
zinfo.compress_size = compress_size = 0
zinfo.file_size = file_size = 0
self.fp.write(zinfo.FileHeader())
if zinfo.compress_type == ZIP_DEFLATED:
cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
zlib.DEFLATED, -15)
else:
cmpr = None
while 1:
buf = fp.read(1024 * 8)
if not buf:
break
file_size = file_size + len(buf)
CRC = binascii.crc32(buf, CRC)
if cmpr:
buf = cmpr.compress(buf)
compress_size = compress_size + len(buf)
self.fp.write(buf)
fp.close()
if cmpr:
buf = cmpr.flush()
compress_size = compress_size + len(buf)
self.fp.write(buf)
zinfo.compress_size = compress_size
else:
zinfo.compress_size = file_size
zinfo.CRC = CRC
zinfo.file_size = file_size
# Seek backwards and write CRC and file sizes
position = self.fp.tell() # Preserve current position in file
self.fp.seek(zinfo.header_offset + 14, 0)
self.fp.write(struct.pack("<lLL", zinfo.CRC, zinfo.compress_size,
zinfo.file_size))
self.fp.seek(position, 0)
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
def writestr(self, zinfo_or_arcname, bytes):
"""Write a file into the archive. The contents is the string
'bytes'. 'zinfo_or_arcname' is either a ZipInfo instance or
the name of the file in the archive."""
if not isinstance(zinfo_or_arcname, ZipInfo):
zinfo = ZipInfo(filename=zinfo_or_arcname,
date_time=time.localtime(time.time())[:6])
zinfo.compress_type = self.compression
else:
zinfo = zinfo_or_arcname
zinfo.file_size = len(bytes) # Uncompressed size
zinfo.header_offset = self.fp.tell() # Start of header bytes
self._writecheck(zinfo)
self._didModify = True
zinfo.CRC = binascii.crc32(bytes) # CRC-32 checksum
if zinfo.compress_type == ZIP_DEFLATED:
co = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
zlib.DEFLATED, -15)
bytes = co.compress(bytes) + co.flush()
zinfo.compress_size = len(bytes) # Compressed size
else:
zinfo.compress_size = zinfo.file_size
zinfo.header_offset = self.fp.tell() # Start of header bytes
self.fp.write(zinfo.FileHeader())
self.fp.write(bytes)
self.fp.flush()
if zinfo.flag_bits & 0x08:
# Write CRC and file sizes after the file data
self.fp.write(struct.pack("<lLL", zinfo.CRC, zinfo.compress_size,
zinfo.file_size))
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
def __del__(self):
"""Call the "close()" method in case the user forgot."""
self.close()
def close(self):
"""Close the file, and for mode "w" and "a" write the ending
records."""
if self.fp is None:
return
if self.mode in ("w", "a") and self._didModify: # write ending records
count = 0
pos1 = self.fp.tell()
for zinfo in self.filelist: # write central directory
count = count + 1
dt = zinfo.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
extra = []
if zinfo.file_size > ZIP64_LIMIT \
or zinfo.compress_size > ZIP64_LIMIT:
extra.append(zinfo.file_size)
extra.append(zinfo.compress_size)
file_size = 0xffffffff #-1
compress_size = 0xffffffff #-1
else:
file_size = zinfo.file_size
compress_size = zinfo.compress_size
if zinfo.header_offset > ZIP64_LIMIT:
extra.append(zinfo.header_offset)
header_offset = -1 # struct "l" format: 32 one bits
else:
header_offset = zinfo.header_offset
extra_data = zinfo.extra
if extra:
# Append a ZIP64 field to the extra's
extra_data = struct.pack(
'<hh' + 'q'*len(extra),
1, 8*len(extra), *extra) + extra_data
extract_version = max(45, zinfo.extract_version)
create_version = max(45, zinfo.create_version)
else:
extract_version = zinfo.extract_version
create_version = zinfo.create_version
centdir = struct.pack(structCentralDir,
stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
zinfo.flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(zinfo.filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset)
self.fp.write(centdir)
self.fp.write(zinfo.filename)
self.fp.write(extra_data)
self.fp.write(zinfo.comment)
pos2 = self.fp.tell()
# Write end-of-zip-archive record
if pos1 > ZIP64_LIMIT:
# Need to write the ZIP64 end-of-archive records
zip64endrec = struct.pack(
structEndArchive64, stringEndArchive64,
44, 45, 45, 0, 0, count, count, pos2 - pos1, pos1)
self.fp.write(zip64endrec)
zip64locrec = struct.pack(
structEndArchive64Locator,
stringEndArchive64Locator, 0, pos2, 1)
self.fp.write(zip64locrec)
endrec = struct.pack(structEndArchive, stringEndArchive,
0, 0, count, count, pos2 - pos1, -1, 0)
self.fp.write(endrec)
else:
endrec = struct.pack(structEndArchive, stringEndArchive,
0, 0, count, count, pos2 - pos1, pos1, 0)
self.fp.write(endrec)
self.fp.flush()
if not self._filePassed:
self.fp.close()
self.fp = None
class PyZipFile(ZipFile):
"""Class to create ZIP archives with Python library files and packages."""
def writepy(self, pathname, basename = ""):
"""Add all files from "pathname" to the ZIP archive.
If pathname is a package directory, search the directory and
all package subdirectories recursively for all *.py and enter
the modules into the archive. If pathname is a plain
directory, listdir *.py and enter all modules. Else, pathname
must be a Python *.py file and the module will be put into the
archive. Added modules are always module.pyo or module.pyc.
This method will compile the module.py into module.pyc if
necessary.
"""
dir, name = os.path.split(pathname)
if os.path.isdir(pathname):
initname = os.path.join(pathname, "__init__.py")
if os.path.isfile(initname):
# This is a package directory, add it
if basename:
basename = "%s/%s" % (basename, name)
else:
basename = name
if self.debug:
print "Adding package in", pathname, "as", basename
fname, arcname = self._get_codename(initname[0:-3], basename)
if self.debug:
print "Adding", arcname
self.write(fname, arcname)
dirlist = os.listdir(pathname)
dirlist.remove("__init__.py")
# Add all *.py files and package subdirectories
for filename in dirlist:
path = os.path.join(pathname, filename)
ext = os.path.splitext(filename)[-1]
if os.path.isdir(path):
if os.path.isfile(os.path.join(path, "__init__.py")):
# This is a package directory, add it
self.writepy(path, basename) # Recursive call
elif ext == ".py":
fname, arcname = self._get_codename(path[0:-3],
basename)
if self.debug:
print "Adding", arcname
self.write(fname, arcname)
else:
# This is NOT a package directory, add its files at top level
if self.debug:
print "Adding files from directory", pathname
for filename in os.listdir(pathname):
path = os.path.join(pathname, filename)
ext = os.path.splitext(filename)[-1]
if ext == ".py":
fname, arcname = self._get_codename(path[0:-3],
basename)
if self.debug:
print "Adding", arcname
self.write(fname, arcname)
else:
if pathname[-3:] != ".py":
raise RuntimeError, \
'Files added with writepy() must end with ".py"'
fname, arcname = self._get_codename(pathname[0:-3], basename)
if self.debug:
print "Adding file", arcname
self.write(fname, arcname)
def _get_codename(self, pathname, basename):
"""Return (filename, archivename) for the path.
Given a module name path, return the correct file path and
archive name, compiling if necessary. For example, given
/python/lib/string, return (/python/lib/string.pyc, string).
"""
file_py = pathname + ".py"
file_pyc = pathname + ".pyc"
file_pyo = pathname + ".pyo"
if os.path.isfile(file_pyo) and \
os.stat(file_pyo).st_mtime >= os.stat(file_py).st_mtime:
fname = file_pyo # Use .pyo file
elif not os.path.isfile(file_pyc) or \
os.stat(file_pyc).st_mtime < os.stat(file_py).st_mtime:
import py_compile
if self.debug:
print "Compiling", file_py
try:
py_compile.compile(file_py, file_pyc, None, True)
except py_compile.PyCompileError,err:
print err.msg
fname = file_pyc
else:
fname = file_pyc
archivename = os.path.split(fname)[1]
if basename:
archivename = "%s/%s" % (basename, archivename)
return (fname, archivename)
def main(args = None):
import textwrap
USAGE=textwrap.dedent("""\
Usage:
zipfile.py -l zipfile.zip # Show listing of a zipfile
zipfile.py -t zipfile.zip # Test if a zipfile is valid
zipfile.py -e zipfile.zip target # Extract zipfile into target dir
zipfile.py -c zipfile.zip src ... # Create zipfile from sources
""")
if args is None:
args = sys.argv[1:]
if not args or args[0] not in ('-l', '-c', '-e', '-t'):
print USAGE
sys.exit(1)
if args[0] == '-l':
if len(args) != 2:
print USAGE
sys.exit(1)
zf = ZipFile(args[1], 'r')
zf.printdir()
zf.close()
elif args[0] == '-t':
if len(args) != 2:
print USAGE
sys.exit(1)
zf = ZipFile(args[1], 'r')
zf.testzip()
print "Done testing"
elif args[0] == '-e':
if len(args) != 3:
print USAGE
sys.exit(1)
zf = ZipFile(args[1], 'r')
out = args[2]
for path in zf.namelist():
if path.startswith('./'):
tgt = os.path.join(out, path[2:])
else:
tgt = os.path.join(out, path)
tgtdir = os.path.dirname(tgt)
if not os.path.exists(tgtdir):
os.makedirs(tgtdir)
fp = open(tgt, 'wb')
fp.write(zf.read(path))
fp.close()
zf.close()
elif args[0] == '-c':
if len(args) < 3:
print USAGE
sys.exit(1)
def addToZip(zf, path, zippath):
if os.path.isfile(path):
zf.write(path, zippath, ZIP_DEFLATED)
elif os.path.isdir(path):
for nm in os.listdir(path):
addToZip(zf,
os.path.join(path, nm), os.path.join(zippath, nm))
# else: ignore
zf = ZipFile(args[1], 'w', allowZip64=True)
for src in args[2:]:
addToZip(zf, src, os.path.basename(src))
zf.close()
if __name__ == "__main__":
main()

View File

@ -4,7 +4,7 @@ sys.path.append('src')
import subprocess import subprocess
from subprocess import check_call as _check_call from subprocess import check_call as _check_call
from functools import partial from functools import partial
#from pyvix.vix import Host, VIX_SERVICEPROVIDER_VMWARE_WORKSTATION
def get_ip_address(ifname): def get_ip_address(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl( return socket.inet_ntoa(fcntl.ioctl(
@ -66,12 +66,19 @@ def start_vm(vm, ssh_host, build_script, sleep=75):
def build_windows(shutdown=True): def build_windows(shutdown=True):
installer = installer_name('exe') installer = installer_name('exe')
vm = '/vmware/Windows XP/Windows XP Professional.vmx' vm = '/vmware/Windows XP/Windows XP Professional.vmx'
start_vm(vm, 'windows', BUILD_SCRIPT%('python setup.py develop', 'python','windows_installer.py')) start_vm(vm, 'windows', BUILD_SCRIPT%('python setup.py develop', 'python','installer\\\\windows\\\\freeze.py'))
subprocess.check_call(('scp', 'windows:build/%s/dist/*.exe'%PROJECT, 'dist')) subprocess.check_call(('scp', '-rp', 'windows:build/%s/build/py2exe'%PROJECT, 'build'))
if not os.path.exists(installer): if not os.path.exists('build/py2exe'):
raise Exception('Failed to build installer '+installer) raise Exception('Failed to run py2exe')
if shutdown: if shutdown:
subprocess.Popen(('ssh', 'windows', 'shutdown', '-s', '-t', '0')) subprocess.Popen(('ssh', 'windows', 'shutdown', '-s', '-t', '0'))
ibp = os.path.abspath('installer/windows')
sys.path.insert(0, ibp)
import build_installer
sys.path.remove(ibp)
build_installer.run_install_jammer(installer_name=os.path.basename(installer))
if not os.path.exists(installer):
raise Exception('Failed to run installjammer')
return os.path.basename(installer) return os.path.basename(installer)
def build_osx(shutdown=True): def build_osx(shutdown=True):

View File

@ -538,7 +538,7 @@ def main():
sys.argv[1:2] = ['py2exe'] sys.argv[1:2] = ['py2exe']
console = [dict(dest_base=basenames['console'][i], script=scripts['console'][i]) console = [dict(dest_base=basenames['console'][i], script=scripts['console'][i])
for i in range(len(scripts['console']))]# if not 'parallel.py' in scripts['console'][i] ] for i in range(len(scripts['console']))]
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'src')) sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'src'))
setup( setup(
cmdclass = {'py2exe': BuildEXE}, cmdclass = {'py2exe': BuildEXE},
@ -567,7 +567,8 @@ def main():
'calibre.ebooks.lrf.feeds.*', 'calibre.ebooks.lrf.feeds.*',
'lxml', 'lxml._elementpath', 'genshi', 'lxml', 'lxml._elementpath', 'genshi',
'path', 'pydoc', 'IPython.Extensions.*', 'path', 'pydoc', 'IPython.Extensions.*',
'calibre.web.feeds.recipes.*', 'PyQt4.QtWebKit', 'calibre.web.feeds.recipes.*',
'PyQt4.QtWebKit', 'PyQt4.QtNetwork',
], ],
'packages' : ['PIL'], 'packages' : ['PIL'],
'excludes' : ["Tkconstants", "Tkinter", "tcl", 'excludes' : ["Tkconstants", "Tkinter", "tcl",