mirror of
https://github.com/kovidgoyal/calibre.git
synced 2026-02-11 05:54:14 -05:00
Describe your change here
This commit is contained in:
commit
269c330512
@ -5,9 +5,5 @@
|
||||
<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.5</pydev_property>
|
||||
<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
|
||||
<path>/calibre/src</path>
|
||||
<path>/calibre/devices</path>
|
||||
<path>/calibre/libprs500.devices.prs500</path>
|
||||
<path>/calibre/prs500</path>
|
||||
<path>/calibre/gui2</path>
|
||||
</pydev_pathproperty>
|
||||
</pydev_project>
|
||||
|
||||
244
installer/cx_Freeze/HISTORY.txt
Normal file
244
installer/cx_Freeze/HISTORY.txt
Normal file
@ -0,0 +1,244 @@
|
||||
Changes from 4.0 to 4.0.1
|
||||
1) Added support for Python 2.6. On Windows a manifest file is now required
|
||||
because of the switch to using the new Microsoft C runtime.
|
||||
2) Ensure that hooks are run for builtin modules.
|
||||
|
||||
Changes from 4.0b1 to 4.0
|
||||
1) Added support for copying files to the target directory.
|
||||
2) Added support for a hook that runs when a module is missing.
|
||||
3) Added support for binary path includes as well as excludes; use sequences
|
||||
rather than dictionaries as a more convenient API; exclude the standard
|
||||
locations for 32-bit and 64-bit libaries in multi-architecture systems.
|
||||
4) Added support for searching zip files (egg files) for modules.
|
||||
5) Added support for handling system exit exceptions similarly to what Python
|
||||
does itself as requested by Sylvain.
|
||||
6) Added code to wait for threads to shut down like the normal Python
|
||||
interpreter does. Thanks to Mariano Disanzo for discovering this
|
||||
discrepancy.
|
||||
7) Hooks added or modified based on feedback from many people.
|
||||
8) Don't include the version name in the display name of the MSI.
|
||||
9) Use the OS dependent path normalization routines rather than simply use the
|
||||
lowercase value as on Unix case is important; thanks to Artie Eoff for
|
||||
pointing this out.
|
||||
10) Include a version attribute in the cx_Freeze package and display it in the
|
||||
output for the --version option to the script.
|
||||
11) Include build instructions as requested by Norbert Sebok.
|
||||
12) Add support for copying files when modules are included which require data
|
||||
files to operate properly; add support for copying the necessary files for
|
||||
the Tkinter and matplotlib modules.
|
||||
13) Handle deferred imports recursively as needed; ensure that from lists do
|
||||
not automatically indicate that they are part of the module or the deferred
|
||||
import processing doesn't actually work!
|
||||
14) Handle the situation where a module imports everything from a package and
|
||||
the __all__ variable has been defined but the package has not actually
|
||||
imported everything in the __all__ variable during initialization.
|
||||
15) Modified license text to more closely match the Python Software Foundation
|
||||
license as was intended.
|
||||
16) Added sample script for freezing an application using matplotlib.
|
||||
17) Renamed freeze to cxfreeze to avoid conflict with another package that uses
|
||||
that executable as requested by Siegfried Gevatter.
|
||||
|
||||
Changes from 3.0.3 to 4.0b1
|
||||
1) Added support for placing modules in library.zip or in a separate zip file
|
||||
for each executable that is produced.
|
||||
2) Added support for copying binary dependent files (DLLs and shared
|
||||
libraries)
|
||||
3) Added support for including all submodules in a package
|
||||
4) Added support for including icons in Windows executables
|
||||
5) Added support for constants module which can be used for determining
|
||||
certain build constants at runtime
|
||||
6) Added support for relative imports available in Python 2.5 and up
|
||||
7) Added support for building Windows installers (Python 2.5 and up) and
|
||||
RPM packages
|
||||
8) Added support for distutils configuration scripts
|
||||
9) Added support for hooks which can force inclusion or exclusion of modules
|
||||
when certain modules are included
|
||||
10) Added documentation and samples
|
||||
11) Added setup.py for building the cx_Freeze package instead of a script
|
||||
used to build only the frozen bases
|
||||
12) FreezePython renamed to a script called freeze in the Python distribution
|
||||
13) On Linux and other platforms that support it set LD_RUN_PATH to include
|
||||
the directory in which the executable is located
|
||||
|
||||
Changes from 3.0.2 to 3.0.3
|
||||
1) In Common.c, used MAXPATHLEN defined in the Python OS independent include
|
||||
file rather than the PATH_MAX define which is OS dependent and is not
|
||||
available on IRIX as noted by Andrew Jones.
|
||||
2) In the initscript ConsoleSetLibPath.py, added lines from initscript
|
||||
Console.py that should have been there since the only difference between
|
||||
that script and this one is the automatic re-execution of the executable.
|
||||
3) Added an explicit "import encodings" to the initscripts in order to handle
|
||||
Unicode encodings a little better. Thanks to Ralf Schmitt for pointing out
|
||||
the problem and its solution.
|
||||
4) Generated a meaningful name for the extension loader script so that it is
|
||||
clear which particular extension module is being loaded when an exception
|
||||
is being raised.
|
||||
5) In MakeFrozenBases.py, use distutils to figure out a few more
|
||||
platform-dependent linker flags as suggested by Ralf Schmitt.
|
||||
|
||||
Changes from 3.0.1 to 3.0.2
|
||||
1) Add support for compressing the byte code in the zip files that are
|
||||
produced.
|
||||
2) Add better support for the win32com package as requested by Barry Scott.
|
||||
3) Prevent deletion of target file if it happens to be identical to the
|
||||
source file.
|
||||
4) Include additional flags for local modifications to a Python build as
|
||||
suggested by Benjamin Rutt.
|
||||
5) Expanded instructions for building cx_Freeze from source based on a
|
||||
suggestion from Gregg Lind.
|
||||
6) Fix typo in help string.
|
||||
|
||||
Changes from 3.0 to 3.0.1
|
||||
1) Added option --default-path which is used to specify the path used when
|
||||
finding modules. This is particularly useful when performing cross
|
||||
compilations (such as for building a frozen executable for Windows CE).
|
||||
2) Added option --shared-lib-name which can be used to specify the name of
|
||||
the shared library (DLL) implementing the Python runtime that is required
|
||||
for the frozen executable to work. This option is also particularly useful
|
||||
when cross compiling since the normal method for determining this
|
||||
information cannot be used.
|
||||
3) Added option --zip-include which allows for additional files to be added
|
||||
to the zip file that contains the modules that implement the Python
|
||||
script. Thanks to Barray Warsaw for providing the initial patch.
|
||||
4) Added support for handling read-only files properly. Thanks to Peter
|
||||
Grayson for pointing out the problem and providing a solution.
|
||||
5) Added support for a frozen executable to be a symbolic link. Thanks to
|
||||
Robert Kiendl for providing the initial patch.
|
||||
6) Enhanced the support for running a frozen executable that uses an existing
|
||||
Python installation to locate modules it requires. This is primarily of
|
||||
use for embedding Python where the interface is C but the ability to run
|
||||
from source is still desired.
|
||||
7) Modified the documentation to indicate that building from source on
|
||||
Windows currently requires the mingw compiler (http://www.mingw.org).
|
||||
8) Workaround the problem in Python 2.3 (fixed in Python 2.4) which causes a
|
||||
broken module to be left in sys.modules if an ImportError takes place
|
||||
during the execution of the code in that module. Thanks to Roger Binns
|
||||
for pointing this out.
|
||||
|
||||
Changes from 3.0 beta3 to 3.0
|
||||
1) Ensure that ldd is only run on extension modules.
|
||||
2) Allow for using a compiler other than gcc for building the frozen base
|
||||
executables by setting the environment variable CC.
|
||||
3) Ensure that the import lock is not held while executing the main script;
|
||||
otherwise, attempts to import a module within a thread will hang that
|
||||
thread as noted by Roger Binns.
|
||||
4) Added support for replacing the paths in all frozen modules with something
|
||||
else (so that for example the path of the machine on which the freezing
|
||||
was done is not displayed in tracebacks)
|
||||
|
||||
Changes from 3.0 beta2 to 3.0 beta3
|
||||
1) Explicitly include the warnings module so that at runtime warnings are
|
||||
suppressed as when running Python normally.
|
||||
2) Improve the extension loader so that an ImportError is raised when the
|
||||
dynamic module is not located; otherwise an error about missing attributes
|
||||
is raised instead.
|
||||
3) Extension loaders are only created when copying dependencies since the
|
||||
normal module should be loadable in the situation where a Python
|
||||
installation is available.
|
||||
4) Added support for Python 2.4.
|
||||
5) Fixed the dependency checking for wxPython to be a little more
|
||||
intelligent.
|
||||
|
||||
Changes from 3.0 beta1 to 3.0 beta2
|
||||
1) Fix issues with locating the initscripts and bases relative to the
|
||||
directory in which the executable was started.
|
||||
2) Added new base executable ConsoleKeepPath which is used when an existing
|
||||
Python installation is required (such as for FreezePython itself).
|
||||
3) Forced the existence of a Python installation to be ignored when using the
|
||||
standard Console base executable.
|
||||
4) Remove the existing file when copying dependent files; otherwise, an error
|
||||
is raised when attempting to overwrite read-only files.
|
||||
5) Added option -O (or -OO) to FreezePython to set the optimization used when
|
||||
generating bytecode.
|
||||
|
||||
Changes from 2.2 to 3.0 beta1
|
||||
1) cx_Freeze now requires Python 2.3 or higher since it takes advantage of
|
||||
the ability of Python 2.3 and higher to import modules from zip files.
|
||||
This makes the freezing process considerably simpler and also allows for
|
||||
the execution of multiple frozen packages (such as found in COM servers or
|
||||
shared libraries) without requiring modification to the Python modules.
|
||||
2) All external dependencies have been removed. cx_Freeze now only requires
|
||||
a standard Python distribution to do its work.
|
||||
3) Added the ability to define the initialization scripts that cx_Freeze uses
|
||||
on startup of the frozen program. Previously, these scripts were written
|
||||
in C and could not easily be changed; now they are written in Python and
|
||||
can be found in the initscripts directory (and chosen with the
|
||||
new --init-script option to FreezePython).
|
||||
4) The base executable ConsoleSetLibPath has been removed and replaced with
|
||||
the initscript ConsoleSetLibPath.
|
||||
5) Removed base executables for Win32 services and Win32 COM servers. This
|
||||
functionality will be restored in the future but it is not currently in a
|
||||
state that is ready for release. If this functionality is required, please
|
||||
use py2exe or contact me for my work in progress.
|
||||
6) The attribute sys.frozen is now set so that more recent pywin32 modules
|
||||
work as expected when frozen.
|
||||
7) Added option --include-path to FreezePython to allow overriding of
|
||||
sys.path without modifying the environment variable PYTHONPATH.
|
||||
8) Added option --target-dir/--install-dir to specify the directory in which
|
||||
the frozen executable and its dependencies will be placed.
|
||||
9) Removed the option --shared-lib since it was used for building shared
|
||||
libraries and can be managed with the initscript SharedLib.py.
|
||||
10) MakeFrozenBases.py now checks the platform specific include directory as
|
||||
requested by Michael Partridge.
|
||||
|
||||
|
||||
Changes from 2.1 to 2.2
|
||||
1) Add option (--ext-list-file) to FreezePython to write the list of
|
||||
extensions copied to the installation directory to a file. This option is
|
||||
useful in cases where multiple builds are performed into the same
|
||||
installation directory.
|
||||
2) Pass the arguments on the command line through to Win32 GUI applications.
|
||||
Thanks to Michael Porter for pointing this out.
|
||||
3) Link directly against the python DLL when building the frozen bases on
|
||||
Windows, thus eliminating the need for building an import library.
|
||||
4) Force sys.path to include the directory in which the script to be frozen
|
||||
is found.
|
||||
5) Make sure that the installation directory exists before attempting to
|
||||
copy the target binary into it.
|
||||
6) The Win32GUI base has been modified to display fatal errors in message
|
||||
boxes, rather than printing errors to stderr, since on Windows the
|
||||
standard file IO handles are all closed.
|
||||
|
||||
Changes from 2.0 to 2.1
|
||||
1) Remove dependency on Python 2.2. Thanks to Paul Moore for not only
|
||||
pointing it out but providing patches.
|
||||
2) Set up the list of frozen modules in advance, rather than doing it after
|
||||
Python is initialized so that implicit imports done by Python can be
|
||||
satisfied. The bug in Python 2.3 that demonstrated this issue has been
|
||||
fixed in the first release candidate. Thanks to Thomas Heller for pointing
|
||||
out the obvious in this instance!
|
||||
3) Added additional base executable (ConsoleSetLibPath) to support setting
|
||||
the LD_LIBRARY_PATH variable on Unix platforms and restarting the
|
||||
executable to put the new setting into effect. This is primarily of use
|
||||
in distributing wxPython applications on Unix where the shared library
|
||||
has an embedded RPATH value which can cause problems.
|
||||
4) Small improvements of documentation based on feedback from several people.
|
||||
5) Print information about the files written or copied during the freezing
|
||||
process.
|
||||
6) Do not copy extensions when freezing if the path is being overridden since
|
||||
it is expected that a full Python installation is available to the target
|
||||
users of the frozen binary.
|
||||
7) Provide meaningful error message when the wxPython library cannot be
|
||||
found during the freezing process.
|
||||
|
||||
Changes from 1.1 to 2.0
|
||||
1) Added support for in process (DLL) COM servers using PythonCOM.
|
||||
2) Ensured that the frozen flag is set prior to determining the full path for
|
||||
the program in order to avoid warnings about Python not being found on
|
||||
some platforms.
|
||||
3) Added include file and resource file to the source tree to avoid the
|
||||
dependency on the Wine message compiler for Win32 builds.
|
||||
4) Dropped the option --copy-extensions; this now happens automatically since
|
||||
the resulting binary is useless without them.
|
||||
5) Added a sample for building a Win32 service.
|
||||
6) Make use of improved modules from Python 2.3 (which function under 2.2)
|
||||
|
||||
Changes from 1.0 to 1.1
|
||||
1) Fixed import error with C extensions in packages; thanks to Thomas Heller
|
||||
for pointing out the solution to this problem.
|
||||
2) Added options to FreezePython to allow for the inclusion of modules which
|
||||
will not be found by the module finder (--include-modules) and the
|
||||
exclusion of modules which will be found by the module finder but should
|
||||
not be included (--exclude-modules).
|
||||
3) Fixed typo in README.txt.
|
||||
|
||||
53
installer/cx_Freeze/LICENSE.txt
Normal file
53
installer/cx_Freeze/LICENSE.txt
Normal file
@ -0,0 +1,53 @@
|
||||
Copyright © 2007-2008, Colt Engineering, Edmonton, Alberta, Canada.
|
||||
Copyright © 2001-2006, Computronix (Canada) Ltd., Edmonton, Alberta, Canada.
|
||||
All rights reserved.
|
||||
|
||||
NOTE: this license is derived from the Python Software Foundation License
|
||||
which can be found at http://www.python.org/psf/license
|
||||
|
||||
License for cx_Freeze 4.0.1
|
||||
---------------------------
|
||||
|
||||
1. This LICENSE AGREEMENT is between the copyright holders and the Individual
|
||||
or Organization ("Licensee") accessing and otherwise using cx_Freeze
|
||||
software in source or binary form and its associated documentation.
|
||||
|
||||
2. Subject to the terms and conditions of this License Agreement, the
|
||||
copyright holders hereby grant Licensee a nonexclusive, royalty-free,
|
||||
world-wide license to reproduce, analyze, test, perform and/or display
|
||||
publicly, prepare derivative works, distribute, and otherwise use cx_Freeze
|
||||
alone or in any derivative version, provided, however, that this License
|
||||
Agreement and this notice of copyright are retained in cx_Freeze alone or in
|
||||
any derivative version prepared by Licensee.
|
||||
|
||||
3. In the event Licensee prepares a derivative work that is based on or
|
||||
incorporates cx_Freeze or any part thereof, and wants to make the derivative
|
||||
work available to others as provided herein, then Licensee hereby agrees to
|
||||
include in any such work a brief summary of the changes made to cx_Freeze.
|
||||
|
||||
4. The copyright holders are making cx_Freeze available to Licensee on an
|
||||
"AS IS" basis. THE COPYRIGHT HOLDERS MAKE NO REPRESENTATIONS OR WARRANTIES,
|
||||
EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, THE COPYRIGHT
|
||||
HOLDERS MAKE NO AND DISCLAIM ANY REPRESENTATION OR WARRANTY OF
|
||||
MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF
|
||||
CX_FREEZE WILL NOT INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
|
||||
5. THE COPYRIGHT HOLDERS SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF
|
||||
CX_FREEZE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING CX_FREEZE, OR ANY
|
||||
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
|
||||
6. This License Agreement will automatically terminate upon a material breach
|
||||
of its terms and conditions.
|
||||
|
||||
7. Nothing in this License Agreement shall be deemed to create any relationship
|
||||
of agency, partnership, or joint venture between the copyright holders and
|
||||
Licensee. This License Agreement does not grant permission to use
|
||||
copyright holder's trademarks or trade name in a trademark sense to endorse
|
||||
or promote products or services of Licensee, or any third party.
|
||||
|
||||
8. By copying, installing or otherwise using cx_Freeze, Licensee agrees to be
|
||||
bound by the terms and conditions of this License Agreement.
|
||||
|
||||
Computronix® is a registered trademark of Computronix (Canada) Ltd.
|
||||
|
||||
6
installer/cx_Freeze/MANIFEST.in
Normal file
6
installer/cx_Freeze/MANIFEST.in
Normal file
@ -0,0 +1,6 @@
|
||||
include MANIFEST.in
|
||||
include *.txt
|
||||
recursive-include doc *.html
|
||||
recursive-include initscripts *.py
|
||||
recursive-include samples *.py
|
||||
recursive-include source *.c *.rc
|
||||
22
installer/cx_Freeze/PKG-INFO
Normal file
22
installer/cx_Freeze/PKG-INFO
Normal file
@ -0,0 +1,22 @@
|
||||
Metadata-Version: 1.0
|
||||
Name: cx_Freeze
|
||||
Version: 4.0.1
|
||||
Summary: create standalone executables from Python scripts
|
||||
Home-page: http://cx-freeze.sourceforge.net
|
||||
Author: Anthony Tuininga
|
||||
Author-email: anthony.tuininga@gmail.com
|
||||
License: Python Software Foundation License
|
||||
Description: create standalone executables from Python scripts
|
||||
Keywords: freeze
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: Python Software Foundation License
|
||||
Classifier: Natural Language :: English
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: C
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Topic :: Software Development :: Build Tools
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Classifier: Topic :: System :: Software Distribution
|
||||
Classifier: Topic :: Utilities
|
||||
12
installer/cx_Freeze/README.txt
Normal file
12
installer/cx_Freeze/README.txt
Normal file
@ -0,0 +1,12 @@
|
||||
Please see cx_Freeze.html for documentation on how to use cx_Freeze.
|
||||
|
||||
To build:
|
||||
|
||||
python setup.py build
|
||||
python setup.py install
|
||||
|
||||
On Windows I have used the MinGW compiler (http://www.mingw.org)
|
||||
|
||||
python setup.py build --compiler=mingw32
|
||||
python setup.py build --compiler=mingw32 install
|
||||
|
||||
14
installer/cx_Freeze/cx_Freeze/__init__.py
Normal file
14
installer/cx_Freeze/cx_Freeze/__init__.py
Normal file
@ -0,0 +1,14 @@
|
||||
version = "4.0.1"
|
||||
|
||||
import sys
|
||||
from dist import *
|
||||
if sys.platform == "win32" and sys.version_info[:2] >= (2, 5):
|
||||
from windist import *
|
||||
from finder import *
|
||||
from freezer import *
|
||||
from main import *
|
||||
|
||||
del dist
|
||||
del finder
|
||||
del freezer
|
||||
|
||||
279
installer/cx_Freeze/cx_Freeze/dist.py
Normal file
279
installer/cx_Freeze/cx_Freeze/dist.py
Normal file
@ -0,0 +1,279 @@
|
||||
import distutils.command.bdist_rpm
|
||||
import distutils.command.build
|
||||
import distutils.command.install
|
||||
import distutils.core
|
||||
import distutils.dir_util
|
||||
import distutils.dist
|
||||
import distutils.util
|
||||
import distutils.version
|
||||
import os
|
||||
import sys
|
||||
|
||||
import cx_Freeze
|
||||
|
||||
__all__ = [ "bdist_rpm", "build", "build_exe", "install", "install_exe",
|
||||
"setup" ]
|
||||
|
||||
class Distribution(distutils.dist.Distribution):
|
||||
|
||||
def __init__(self, attrs):
|
||||
self.executables = []
|
||||
distutils.dist.Distribution.__init__(self, attrs)
|
||||
|
||||
|
||||
class bdist_rpm(distutils.command.bdist_rpm.bdist_rpm):
|
||||
|
||||
def finalize_options(self):
|
||||
distutils.command.bdist_rpm.bdist_rpm.finalize_options(self)
|
||||
self.use_rpm_opt_flags = 1
|
||||
|
||||
def _make_spec_file(self):
|
||||
contents = distutils.command.bdist_rpm.bdist_rpm._make_spec_file(self)
|
||||
return [c for c in contents if c != 'BuildArch: noarch']
|
||||
|
||||
|
||||
class build(distutils.command.build.build):
|
||||
user_options = distutils.command.build.build.user_options + [
|
||||
('build-exe=', None, 'build directory for executables')
|
||||
]
|
||||
|
||||
def get_sub_commands(self):
|
||||
subCommands = distutils.command.build.build.get_sub_commands(self)
|
||||
if self.distribution.executables:
|
||||
subCommands.append("build_exe")
|
||||
return subCommands
|
||||
|
||||
def initialize_options(self):
|
||||
distutils.command.build.build.initialize_options(self)
|
||||
self.build_exe = None
|
||||
|
||||
def finalize_options(self):
|
||||
distutils.command.build.build.finalize_options(self)
|
||||
if self.build_exe is None:
|
||||
dirName = "exe.%s-%s" % \
|
||||
(distutils.util.get_platform(), sys.version[0:3])
|
||||
self.build_exe = os.path.join(self.build_base, dirName)
|
||||
|
||||
|
||||
class build_exe(distutils.core.Command):
|
||||
description = "build executables from Python scripts"
|
||||
user_options = [
|
||||
('build-exe=', 'b',
|
||||
'directory for built executables'),
|
||||
('optimize=', 'O',
|
||||
'optimization level: -O1 for "python -O", '
|
||||
'-O2 for "python -OO" and -O0 to disable [default: -O0]'),
|
||||
('excludes=', 'e',
|
||||
'comma-separated list of modules to exclude'),
|
||||
('includes=', 'i',
|
||||
'comma-separated list of modules to include'),
|
||||
('packages=', 'p',
|
||||
'comma-separated list of packages to include'),
|
||||
('replace-paths=', None,
|
||||
'comma-separated list of paths to replace in included modules'),
|
||||
('path=', None,
|
||||
'comma-separated list of paths to search'),
|
||||
('init-script=', 'i',
|
||||
'name of script to use during initialization'),
|
||||
('base=', None,
|
||||
'name of base executable to use'),
|
||||
('compressed', 'c',
|
||||
'create a compressed zipfile'),
|
||||
('copy-dependent-files', None,
|
||||
'copy all dependent files'),
|
||||
('create-shared-zip', None,
|
||||
'create a shared zip file containing shared modules'),
|
||||
('append-script-to-exe', None,
|
||||
'append the script module to the exe'),
|
||||
('include-in-shared-zip', None,
|
||||
'include the script module in the shared zip file'),
|
||||
('icon', None,
|
||||
'include the icon along with the frozen executable(s)'),
|
||||
('constants=', None,
|
||||
'comma-separated list of constants to include'),
|
||||
('include-files=', 'f',
|
||||
'list of tuples of additional files to include in distribution'),
|
||||
('bin-includes', None,
|
||||
'list of names of files to include when determining dependencies'),
|
||||
('bin-excludes', None,
|
||||
'list of names of files to exclude when determining dependencies')
|
||||
]
|
||||
boolean_options = ["compressed", "copy_dependent_files",
|
||||
"create_shared_zip", "append_script_to_exe",
|
||||
"include_in_shared_zip"]
|
||||
|
||||
def _normalize(self, attrName):
|
||||
value = getattr(self, attrName)
|
||||
if value is None:
|
||||
normalizedValue = []
|
||||
elif isinstance(value, basestring):
|
||||
normalizedValue = value.split()
|
||||
else:
|
||||
normalizedValue = list(value)
|
||||
setattr(self, attrName, normalizedValue)
|
||||
|
||||
def initialize_options(self):
|
||||
self.optimize = 0
|
||||
self.build_exe = None
|
||||
self.excludes = []
|
||||
self.includes = []
|
||||
self.packages = []
|
||||
self.replace_paths = []
|
||||
self.compressed = None
|
||||
self.copy_dependent_files = None
|
||||
self.init_script = None
|
||||
self.base = None
|
||||
self.path = None
|
||||
self.create_shared_zip = None
|
||||
self.append_script_to_exe = None
|
||||
self.include_in_shared_zip = None
|
||||
self.icon = None
|
||||
self.constants = []
|
||||
self.include_files = []
|
||||
self.bin_excludes = []
|
||||
self.bin_includes = []
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('build', ('build_exe', 'build_exe'))
|
||||
self.optimize = int(self.optimize)
|
||||
self._normalize("excludes")
|
||||
self._normalize("includes")
|
||||
self._normalize("packages")
|
||||
self._normalize("constants")
|
||||
|
||||
def run(self):
|
||||
metadata = self.distribution.metadata
|
||||
constantsModule = cx_Freeze.ConstantsModule(metadata.version)
|
||||
for constant in self.constants:
|
||||
parts = constant.split("=")
|
||||
if len(parts) == 1:
|
||||
name = constant
|
||||
value = None
|
||||
else:
|
||||
name, stringValue = parts
|
||||
value = eval(stringValue)
|
||||
constantsModule.values[name] = value
|
||||
freezer = cx_Freeze.Freezer(self.distribution.executables,
|
||||
[constantsModule], self.includes, self.excludes, self.packages,
|
||||
self.replace_paths, self.compressed, self.optimize,
|
||||
self.copy_dependent_files, self.init_script, self.base,
|
||||
self.path, self.create_shared_zip, self.append_script_to_exe,
|
||||
self.include_in_shared_zip, self.build_exe, icon = self.icon,
|
||||
includeFiles = self.include_files,
|
||||
binIncludes = self.bin_includes,
|
||||
binExcludes = self.bin_excludes)
|
||||
freezer.Freeze()
|
||||
|
||||
|
||||
class install(distutils.command.install.install):
|
||||
user_options = distutils.command.install.install.user_options + [
|
||||
('install-exe=', None,
|
||||
'installation directory for executables')
|
||||
]
|
||||
|
||||
def expand_dirs(self):
|
||||
distutils.command.install.install.expand_dirs(self)
|
||||
self._expand_attrs(['install_exe'])
|
||||
|
||||
def get_sub_commands(self):
|
||||
subCommands = distutils.command.install.install.get_sub_commands(self)
|
||||
if self.distribution.executables:
|
||||
subCommands.append("install_exe")
|
||||
return [s for s in subCommands if s != "install_egg_info"]
|
||||
|
||||
def initialize_options(self):
|
||||
distutils.command.install.install.initialize_options(self)
|
||||
self.install_exe = None
|
||||
|
||||
def finalize_options(self):
|
||||
if self.prefix is None and sys.platform == "win32":
|
||||
import _winreg
|
||||
key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,
|
||||
r"Software\Microsoft\Windows\CurrentVersion")
|
||||
prefix = str(_winreg.QueryValueEx(key, "ProgramFilesDir")[0])
|
||||
metadata = self.distribution.metadata
|
||||
dirName = "%s-%s" % (metadata.name, metadata.version)
|
||||
self.prefix = "%s/%s" % (prefix, dirName)
|
||||
distutils.command.install.install.finalize_options(self)
|
||||
self.convert_paths('exe')
|
||||
if self.root is not None:
|
||||
self.change_roots('exe')
|
||||
|
||||
def select_scheme(self, name):
|
||||
distutils.command.install.install.select_scheme(self, name)
|
||||
if self.install_exe is None:
|
||||
if sys.platform == "win32":
|
||||
self.install_exe = '$base'
|
||||
else:
|
||||
metadata = self.distribution.metadata
|
||||
dirName = "%s-%s" % (metadata.name, metadata.version)
|
||||
self.install_exe = '$base/lib/%s' % dirName
|
||||
|
||||
|
||||
class install_exe(distutils.core.Command):
|
||||
description = "install executables built from Python scripts"
|
||||
user_options = [
|
||||
('install-dir=', 'd', 'directory to install executables to'),
|
||||
('build-dir=', 'b', 'build directory (where to install from)'),
|
||||
('force', 'f', 'force installation (overwrite existing files)'),
|
||||
('skip-build', None, 'skip the build steps')
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.install_dir = None
|
||||
self.force = 0
|
||||
self.build_dir = None
|
||||
self.skip_build = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('build', ('build_exe', 'build_dir'))
|
||||
self.set_undefined_options('install',
|
||||
('install_exe', 'install_dir'),
|
||||
('force', 'force'),
|
||||
('skip_build', 'skip_build'))
|
||||
|
||||
def run(self):
|
||||
if not self.skip_build:
|
||||
self.run_command('build_exe')
|
||||
self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
|
||||
if sys.platform != "win32":
|
||||
baseDir = os.path.dirname(os.path.dirname(self.install_dir))
|
||||
binDir = os.path.join(baseDir, "bin")
|
||||
if not os.path.exists(binDir):
|
||||
os.makedirs(binDir)
|
||||
sourceDir = os.path.join("..", self.install_dir[len(baseDir) + 1:])
|
||||
for executable in self.distribution.executables:
|
||||
name = os.path.basename(executable.targetName)
|
||||
source = os.path.join(sourceDir, name)
|
||||
target = os.path.join(binDir, name)
|
||||
if os.path.exists(target):
|
||||
os.unlink(target)
|
||||
os.symlink(source, target)
|
||||
self.outfiles.append(target)
|
||||
|
||||
def get_inputs(self):
|
||||
return self.distribution.executables or []
|
||||
|
||||
def get_outputs(self):
|
||||
return self.outfiles or []
|
||||
|
||||
|
||||
def _AddCommandClass(commandClasses, name, cls):
|
||||
if name not in commandClasses:
|
||||
commandClasses[name] = cls
|
||||
|
||||
|
||||
def setup(**attrs):
|
||||
attrs["distclass"] = Distribution
|
||||
commandClasses = attrs.setdefault("cmdclass", {})
|
||||
if sys.platform == "win32":
|
||||
if sys.version_info[:2] >= (2, 5):
|
||||
_AddCommandClass(commandClasses, "bdist_msi", cx_Freeze.bdist_msi)
|
||||
else:
|
||||
_AddCommandClass(commandClasses, "bdist_rpm", cx_Freeze.bdist_rpm)
|
||||
_AddCommandClass(commandClasses, "build", build)
|
||||
_AddCommandClass(commandClasses, "build_exe", build_exe)
|
||||
_AddCommandClass(commandClasses, "install", install)
|
||||
_AddCommandClass(commandClasses, "install_exe", install_exe)
|
||||
distutils.core.setup(**attrs)
|
||||
|
||||
455
installer/cx_Freeze/cx_Freeze/finder.py
Normal file
455
installer/cx_Freeze/cx_Freeze/finder.py
Normal file
@ -0,0 +1,455 @@
|
||||
"""
|
||||
Base class for finding modules.
|
||||
"""
|
||||
|
||||
import dis
|
||||
import imp
|
||||
import marshal
|
||||
import new
|
||||
import opcode
|
||||
import os
|
||||
import sys
|
||||
import zipfile
|
||||
|
||||
import cx_Freeze.hooks
|
||||
|
||||
BUILD_LIST = opcode.opmap["BUILD_LIST"]
|
||||
INPLACE_ADD = opcode.opmap["INPLACE_ADD"]
|
||||
LOAD_CONST = opcode.opmap["LOAD_CONST"]
|
||||
IMPORT_NAME = opcode.opmap["IMPORT_NAME"]
|
||||
IMPORT_FROM = opcode.opmap["IMPORT_FROM"]
|
||||
STORE_NAME = opcode.opmap["STORE_NAME"]
|
||||
STORE_GLOBAL = opcode.opmap["STORE_GLOBAL"]
|
||||
STORE_OPS = (STORE_NAME, STORE_GLOBAL)
|
||||
|
||||
__all__ = [ "Module", "ModuleFinder" ]
|
||||
|
||||
class ModuleFinder(object):
|
||||
|
||||
def __init__(self, includeFiles, excludes, path, replacePaths):
|
||||
self.includeFiles = includeFiles
|
||||
self.excludes = dict.fromkeys(excludes)
|
||||
self.replacePaths = replacePaths
|
||||
self.path = path or sys.path
|
||||
self.modules = []
|
||||
self.aliases = {}
|
||||
self._modules = dict.fromkeys(excludes)
|
||||
self._builtinModules = dict.fromkeys(sys.builtin_module_names)
|
||||
self._badModules = {}
|
||||
self._zipFileEntries = {}
|
||||
self._zipFiles = {}
|
||||
cx_Freeze.hooks.initialize(self)
|
||||
|
||||
def _AddModule(self, name):
|
||||
"""Add a module to the list of modules but if one is already found,
|
||||
then return it instead; this is done so that packages can be
|
||||
handled properly."""
|
||||
module = self._modules.get(name)
|
||||
if module is None:
|
||||
module = self._modules[name] = Module(name)
|
||||
self.modules.append(module)
|
||||
if name in self._badModules:
|
||||
del self._badModules[name]
|
||||
return module
|
||||
|
||||
def _DetermineParent(self, caller):
|
||||
"""Determine the parent to use when searching packages."""
|
||||
if caller is not None:
|
||||
if caller.path is not None:
|
||||
return caller
|
||||
return self._GetParentByName(caller.name)
|
||||
|
||||
def _EnsureFromList(self, caller, packageModule, fromList,
|
||||
deferredImports):
|
||||
"""Ensure that the from list is satisfied. This is only necessary for
|
||||
package modules. If the caller is the package itself, actually
|
||||
attempt to import right then since it must be a submodule; otherwise
|
||||
defer until after all global names are defined in order to avoid
|
||||
spurious complaints about missing modules."""
|
||||
if caller is not packageModule:
|
||||
deferredImports.append((packageModule, fromList))
|
||||
else:
|
||||
if fromList == ("*",):
|
||||
fromList = packageModule.allNames
|
||||
for name in fromList:
|
||||
if name in packageModule.globalNames:
|
||||
continue
|
||||
subModuleName = "%s.%s" % (packageModule.name, name)
|
||||
self._ImportModule(subModuleName, deferredImports, caller)
|
||||
|
||||
def _FindModule(self, name, path):
|
||||
try:
|
||||
return imp.find_module(name, path)
|
||||
except ImportError:
|
||||
if not path:
|
||||
path = []
|
||||
for location in path:
|
||||
if name in self._zipFileEntries:
|
||||
break
|
||||
if location in self._zipFiles:
|
||||
continue
|
||||
if os.path.isdir(location) or not zipfile.is_zipfile(location):
|
||||
self._zipFiles[location] = None
|
||||
continue
|
||||
zip = zipfile.ZipFile(location)
|
||||
for archiveName in zip.namelist():
|
||||
baseName, ext = os.path.splitext(archiveName)
|
||||
if ext not in ('.pyc', '.pyo'):
|
||||
continue
|
||||
moduleName = ".".join(baseName.split("/"))
|
||||
if moduleName in self._zipFileEntries:
|
||||
continue
|
||||
self._zipFileEntries[moduleName] = (zip, archiveName)
|
||||
self._zipFiles[location] = None
|
||||
info = self._zipFileEntries.get(name)
|
||||
if info is not None:
|
||||
zip, archiveName = info
|
||||
fp = zip.read(archiveName)
|
||||
info = (".pyc", "rb", imp.PY_COMPILED)
|
||||
return fp, os.path.join(zip.filename, archiveName), info
|
||||
raise
|
||||
|
||||
def _GetParentByName(self, name):
|
||||
"""Return the parent module given the name of a module."""
|
||||
pos = name.rfind(".")
|
||||
if pos > 0:
|
||||
parentName = name[:pos]
|
||||
return self._modules[parentName]
|
||||
|
||||
def _ImportAllSubModules(self, module, deferredImports, recursive = True):
|
||||
"""Import all sub modules to the given package."""
|
||||
suffixes = dict.fromkeys([s[0] for s in imp.get_suffixes()])
|
||||
for dir in module.path:
|
||||
try:
|
||||
fileNames = os.listdir(dir)
|
||||
except os.error:
|
||||
continue
|
||||
for fileName in fileNames:
|
||||
name, ext = os.path.splitext(fileName)
|
||||
if ext not in suffixes:
|
||||
continue
|
||||
if name == "__init__":
|
||||
continue
|
||||
subModuleName = "%s.%s" % (module.name, name)
|
||||
subModule, returnError = \
|
||||
self._InternalImportModule(subModuleName,
|
||||
deferredImports)
|
||||
if returnError and subModule is None:
|
||||
raise ImportError, "No module named %s" % subModuleName
|
||||
module.globalNames[name] = None
|
||||
if subModule.path and recursive:
|
||||
self._ImportAllSubModules(subModule, deferredImports,
|
||||
recursive)
|
||||
|
||||
def _ImportDeferredImports(self, deferredImports):
|
||||
"""Import any sub modules that were deferred, if applicable."""
|
||||
while deferredImports:
|
||||
newDeferredImports = []
|
||||
for packageModule, subModuleNames in deferredImports:
|
||||
self._EnsureFromList(packageModule, packageModule,
|
||||
subModuleNames, newDeferredImports)
|
||||
deferredImports = newDeferredImports
|
||||
|
||||
def _ImportModule(self, name, deferredImports, caller = None,
|
||||
relativeImportIndex = 0):
|
||||
"""Attempt to find the named module and return it or None if no module
|
||||
by that name could be found."""
|
||||
|
||||
# absolute import (available in Python 2.5 and up)
|
||||
# the name given is the only name that will be searched
|
||||
if relativeImportIndex == 0:
|
||||
module, returnError = self._InternalImportModule(name,
|
||||
deferredImports)
|
||||
|
||||
# old style relative import (only possibility in Python 2.4 and prior)
|
||||
# the name given is tried in all parents until a match is found and if
|
||||
# no match is found, the global namespace is searched
|
||||
elif relativeImportIndex < 0:
|
||||
parent = self._DetermineParent(caller)
|
||||
while parent is not None:
|
||||
fullName = "%s.%s" % (parent.name, name)
|
||||
module, returnError = self._InternalImportModule(fullName,
|
||||
deferredImports)
|
||||
if module is not None:
|
||||
parent.globalNames[name] = None
|
||||
return module
|
||||
parent = self._GetParentByName(parent.name)
|
||||
module, returnError = self._InternalImportModule(name,
|
||||
deferredImports)
|
||||
|
||||
# new style relative import (available in Python 2.5 and up)
|
||||
# the index indicates how many levels to traverse and only that level
|
||||
# is searched for the named module
|
||||
elif relativeImportIndex > 0:
|
||||
parent = caller
|
||||
if parent.path is not None:
|
||||
relativeImportIndex -= 1
|
||||
while parent is not None and relativeImportIndex > 0:
|
||||
parent = self._GetParentByName(parent.name)
|
||||
relativeImportIndex -= 1
|
||||
if parent is None:
|
||||
module = None
|
||||
returnError = True
|
||||
elif not name:
|
||||
module = parent
|
||||
else:
|
||||
name = "%s.%s" % (parent.name, name)
|
||||
module, returnError = self._InternalImportModule(name,
|
||||
deferredImports)
|
||||
|
||||
# if module not found, track that fact
|
||||
if module is None:
|
||||
if caller is None:
|
||||
raise ImportError, "No module named %s" % name
|
||||
self._RunHook("missing", name, caller)
|
||||
if returnError and name not in caller.ignoreNames:
|
||||
callers = self._badModules.setdefault(name, {})
|
||||
callers[caller.name] = None
|
||||
|
||||
return module
|
||||
|
||||
def _InternalImportModule(self, name, deferredImports):
|
||||
"""Internal method used for importing a module which assumes that the
|
||||
name given is an absolute name. None is returned if the module
|
||||
cannot be found."""
|
||||
try:
|
||||
return self._modules[name], False
|
||||
except KeyError:
|
||||
pass
|
||||
if name in self._builtinModules:
|
||||
module = self._AddModule(name)
|
||||
self._RunHook("load", module.name, module)
|
||||
return module, False
|
||||
pos = name.rfind(".")
|
||||
if pos < 0:
|
||||
path = self.path
|
||||
searchName = name
|
||||
parentModule = None
|
||||
else:
|
||||
parentName = name[:pos]
|
||||
parentModule, returnError = \
|
||||
self._InternalImportModule(parentName, deferredImports)
|
||||
if parentModule is None:
|
||||
return None, returnError
|
||||
path = parentModule.path
|
||||
searchName = name[pos + 1:]
|
||||
if name in self.aliases:
|
||||
actualName = self.aliases[name]
|
||||
module, returnError = \
|
||||
self._InternalImportModule(actualName, deferredImports)
|
||||
self._modules[name] = module
|
||||
return module, returnError
|
||||
try:
|
||||
fp, path, info = self._FindModule(searchName, path)
|
||||
except ImportError:
|
||||
self._modules[name] = None
|
||||
return None, True
|
||||
module = self._LoadModule(name, fp, path, info, deferredImports,
|
||||
parentModule)
|
||||
return module, False
|
||||
|
||||
def _LoadModule(self, name, fp, path, info, deferredImports,
|
||||
parent = None):
|
||||
"""Load the module, given the information acquired by the finder."""
|
||||
suffix, mode, type = info
|
||||
if type == imp.PKG_DIRECTORY:
|
||||
return self._LoadPackage(name, path, parent, deferredImports)
|
||||
module = self._AddModule(name)
|
||||
module.file = path
|
||||
module.parent = parent
|
||||
if type == imp.PY_SOURCE:
|
||||
module.code = compile(fp.read() + "\n", path, "exec")
|
||||
elif type == imp.PY_COMPILED:
|
||||
if isinstance(fp, str):
|
||||
magic = fp[:4]
|
||||
else:
|
||||
magic = fp.read(4)
|
||||
if magic != imp.get_magic():
|
||||
raise ImportError, "Bad magic number in %s" % path
|
||||
if isinstance(fp, str):
|
||||
module.code = marshal.loads(fp[8:])
|
||||
module.inZipFile = True
|
||||
else:
|
||||
fp.read(4)
|
||||
module.code = marshal.load(fp)
|
||||
self._RunHook("load", module.name, module)
|
||||
if module.code is not None:
|
||||
if self.replacePaths:
|
||||
topLevelModule = module
|
||||
while topLevelModule.parent is not None:
|
||||
topLevelModule = topLevelModule.parent
|
||||
module.code = self._ReplacePathsInCode(topLevelModule,
|
||||
module.code)
|
||||
self._ScanCode(module.code, module, deferredImports)
|
||||
return module
|
||||
|
||||
def _LoadPackage(self, name, path, parent, deferredImports):
|
||||
"""Load the package, given its name and path."""
|
||||
module = self._AddModule(name)
|
||||
module.path = [path]
|
||||
fp, path, info = imp.find_module("__init__", module.path)
|
||||
self._LoadModule(name, fp, path, info, deferredImports, parent)
|
||||
return module
|
||||
|
||||
def _ReplacePathsInCode(self, topLevelModule, co):
|
||||
"""Replace paths in the code as directed, returning a new code object
|
||||
with the modified paths in place."""
|
||||
origFileName = newFileName = os.path.normpath(co.co_filename)
|
||||
for searchValue, replaceValue in self.replacePaths:
|
||||
if searchValue == "*":
|
||||
searchValue = os.path.dirname(topLevelModule.file)
|
||||
if topLevelModule.path:
|
||||
searchValue = os.path.dirname(searchValue)
|
||||
if searchValue:
|
||||
searchValue = searchValue + os.pathsep
|
||||
elif not origFileName.startswith(searchValue):
|
||||
continue
|
||||
newFileName = replaceValue + origFileName[len(searchValue):]
|
||||
break
|
||||
constants = list(co.co_consts)
|
||||
for i, value in enumerate(constants):
|
||||
if isinstance(value, type(co)):
|
||||
constants[i] = self._ReplacePathsInCode(topLevelModule, value)
|
||||
return new.code(co.co_argcount, co.co_nlocals, co.co_stacksize,
|
||||
co.co_flags, co.co_code, tuple(constants), co.co_names,
|
||||
co.co_varnames, newFileName, co.co_name, co.co_firstlineno,
|
||||
co.co_lnotab, co.co_freevars, co.co_cellvars)
|
||||
|
||||
def _RunHook(self, hookName, moduleName, *args):
|
||||
"""Run hook for the given module if one is present."""
|
||||
name = "%s_%s" % (hookName, moduleName.replace(".", "_"))
|
||||
method = getattr(cx_Freeze.hooks, name, None)
|
||||
if method is not None:
|
||||
method(self, *args)
|
||||
|
||||
def _ScanCode(self, co, module, deferredImports):
|
||||
"""Scan code, looking for imported modules and keeping track of the
|
||||
constants that have been created in order to better tell which
|
||||
modules are truly missing."""
|
||||
opIndex = 0
|
||||
arguments = []
|
||||
code = co.co_code
|
||||
numOps = len(code)
|
||||
while opIndex < numOps:
|
||||
op = ord(code[opIndex])
|
||||
opIndex += 1
|
||||
if op >= dis.HAVE_ARGUMENT:
|
||||
opArg = ord(code[opIndex]) + ord(code[opIndex + 1]) * 256
|
||||
opIndex += 2
|
||||
if op == LOAD_CONST:
|
||||
arguments.append(co.co_consts[opArg])
|
||||
elif op == IMPORT_NAME:
|
||||
name = co.co_names[opArg]
|
||||
if len(arguments) == 2:
|
||||
relativeImportIndex, fromList = arguments
|
||||
else:
|
||||
relativeImportIndex = -1
|
||||
fromList, = arguments
|
||||
if name not in module.excludeNames:
|
||||
subModule = self._ImportModule(name, deferredImports,
|
||||
module, relativeImportIndex)
|
||||
if subModule is not None:
|
||||
module.globalNames.update(subModule.globalNames)
|
||||
if fromList and subModule.path is not None:
|
||||
self._EnsureFromList(module, subModule, fromList,
|
||||
deferredImports)
|
||||
elif op == IMPORT_FROM:
|
||||
opIndex += 3
|
||||
elif op not in (BUILD_LIST, INPLACE_ADD):
|
||||
if op in STORE_OPS:
|
||||
name = co.co_names[opArg]
|
||||
if name == "__all__":
|
||||
module.allNames.extend(arguments)
|
||||
module.globalNames[name] = None
|
||||
arguments = []
|
||||
for constant in co.co_consts:
|
||||
if isinstance(constant, type(co)):
|
||||
self._ScanCode(constant, module, deferredImports)
|
||||
|
||||
def AddAlias(self, name, aliasFor):
|
||||
"""Add an alias for a particular module; when an attempt is made to
|
||||
import a module using the alias name, import the actual name
|
||||
instead."""
|
||||
self.aliases[name] = aliasFor
|
||||
|
||||
def ExcludeModule(self, name):
|
||||
"""Exclude the named module from the resulting frozen executable."""
|
||||
self.excludes[name] = None
|
||||
self._modules[name] = None
|
||||
|
||||
def IncludeFile(self, path, moduleName = None):
|
||||
"""Include the named file as a module in the frozen executable."""
|
||||
name, ext = os.path.splitext(os.path.basename(path))
|
||||
if moduleName is None:
|
||||
moduleName = name
|
||||
info = (ext, "r", imp.PY_SOURCE)
|
||||
deferredImports = []
|
||||
module = self._LoadModule(moduleName, file(path, "U"), path, info,
|
||||
deferredImports)
|
||||
self._ImportDeferredImports(deferredImports)
|
||||
return module
|
||||
|
||||
def IncludeFiles(self, sourcePath, targetPath):
|
||||
"""Include the files in the given directory in the target build."""
|
||||
self.includeFiles.append((sourcePath, targetPath))
|
||||
|
||||
def IncludeModule(self, name):
|
||||
"""Include the named module in the frozen executable."""
|
||||
deferredImports = []
|
||||
module = self._ImportModule(name, deferredImports)
|
||||
self._ImportDeferredImports(deferredImports)
|
||||
return module
|
||||
|
||||
def IncludePackage(self, name):
|
||||
"""Include the named package and any submodules in the frozen
|
||||
executable."""
|
||||
deferredImports = []
|
||||
module = self._ImportModule(name, deferredImports)
|
||||
if module.path:
|
||||
self._ImportAllSubModules(module, deferredImports)
|
||||
self._ImportDeferredImports(deferredImports)
|
||||
return module
|
||||
|
||||
def ReportMissingModules(self):
|
||||
if self._badModules:
|
||||
print "Missing modules:"
|
||||
names = self._badModules.keys()
|
||||
names.sort()
|
||||
for name in names:
|
||||
callers = self._badModules[name].keys()
|
||||
callers.sort()
|
||||
print "?", name, "imported from", ", ".join(callers)
|
||||
print
|
||||
|
||||
|
||||
class Module(object):
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.file = None
|
||||
self.path = None
|
||||
self.code = None
|
||||
self.parent = None
|
||||
self.globalNames = {}
|
||||
self.excludeNames = {}
|
||||
self.ignoreNames = {}
|
||||
self.allNames = []
|
||||
self.inZipFile = False
|
||||
|
||||
def __repr__(self):
|
||||
parts = ["name=%s" % repr(self.name)]
|
||||
if self.file is not None:
|
||||
parts.append("file=%s" % repr(self.file))
|
||||
if self.path is not None:
|
||||
parts.append("path=%s" % repr(self.path))
|
||||
return "<Module %s>" % ", ".join(parts)
|
||||
|
||||
def AddGlobalName(self, name):
|
||||
self.globalNames[name] = None
|
||||
|
||||
def ExcludeName(self, name):
|
||||
self.excludeNames[name] = None
|
||||
|
||||
def IgnoreName(self, name):
|
||||
self.ignoreNames[name] = None
|
||||
|
||||
550
installer/cx_Freeze/cx_Freeze/freezer.py
Normal file
550
installer/cx_Freeze/cx_Freeze/freezer.py
Normal file
@ -0,0 +1,550 @@
|
||||
"""
|
||||
Base class for freezing scripts into executables.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import distutils.sysconfig
|
||||
import imp
|
||||
import marshal
|
||||
import os
|
||||
import shutil
|
||||
import socket
|
||||
import stat
|
||||
import struct
|
||||
import sys
|
||||
import time
|
||||
import zipfile
|
||||
|
||||
import cx_Freeze
|
||||
import cx_Freeze.util
|
||||
|
||||
__all__ = [ "ConfigError", "ConstantsModule", "Executable", "Freezer" ]
|
||||
|
||||
if sys.platform == "win32":
|
||||
pythonDll = "python%s%s.dll" % sys.version_info[:2]
|
||||
GLOBAL_BIN_PATH_EXCLUDES = [cx_Freeze.util.GetSystemDir()]
|
||||
GLOBAL_BIN_INCLUDES = [
|
||||
pythonDll,
|
||||
"gdiplus.dll",
|
||||
"mfc71.dll",
|
||||
"msvcp71.dll",
|
||||
"msvcr71.dll"
|
||||
]
|
||||
GLOBAL_BIN_EXCLUDES = [
|
||||
"comctl32.dll",
|
||||
"oci.dll",
|
||||
"cx_Logging.pyd"
|
||||
]
|
||||
else:
|
||||
extension = distutils.sysconfig.get_config_var("SO")
|
||||
pythonSharedLib = "libpython%s.%s%s" % \
|
||||
(sys.version_info[:2] + (extension,))
|
||||
GLOBAL_BIN_INCLUDES = [pythonSharedLib]
|
||||
GLOBAL_BIN_EXCLUDES = [
|
||||
"libclntsh.so",
|
||||
"libwtc9.so"
|
||||
]
|
||||
GLOBAL_BIN_PATH_EXCLUDES = ["/lib", "/lib32", "/lib64", "/usr/lib",
|
||||
"/usr/lib32", "/usr/lib64"]
|
||||
|
||||
|
||||
# NOTE: the try: except: block in this code is not necessary under Python 2.4
|
||||
# and higher and can be removed once support for Python 2.3 is no longer needed
|
||||
EXTENSION_LOADER_SOURCE = \
|
||||
"""
|
||||
import imp, os, sys
|
||||
|
||||
found = False
|
||||
for p in sys.path:
|
||||
if not os.path.isdir(p):
|
||||
continue
|
||||
f = os.path.join(p, "%s")
|
||||
if not os.path.exists(f):
|
||||
continue
|
||||
try:
|
||||
m = imp.load_dynamic(__name__, f)
|
||||
except ImportError:
|
||||
del sys.modules[__name__]
|
||||
raise
|
||||
sys.modules[__name__] = m
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
del sys.modules[__name__]
|
||||
raise ImportError, "No module named %%s" %% __name__
|
||||
"""
|
||||
|
||||
|
||||
class Freezer(object):
|
||||
|
||||
def __init__(self, executables, constantsModules = [], includes = [],
|
||||
excludes = [], packages = [], replacePaths = [], compress = None,
|
||||
optimizeFlag = 0, copyDependentFiles = None, initScript = None,
|
||||
base = None, path = None, createLibraryZip = None,
|
||||
appendScriptToExe = None, appendScriptToLibrary = None,
|
||||
targetDir = None, binIncludes = [], binExcludes = [],
|
||||
binPathIncludes = [], binPathExcludes = [], icon = None,
|
||||
includeFiles = []):
|
||||
self.executables = executables
|
||||
self.constantsModules = constantsModules
|
||||
self.includes = includes
|
||||
self.excludes = excludes
|
||||
self.packages = packages
|
||||
self.replacePaths = replacePaths
|
||||
self.compress = compress
|
||||
self.optimizeFlag = optimizeFlag
|
||||
self.copyDependentFiles = copyDependentFiles
|
||||
self.initScript = initScript
|
||||
self.base = base
|
||||
self.path = path
|
||||
self.createLibraryZip = createLibraryZip
|
||||
self.appendScriptToExe = appendScriptToExe
|
||||
self.appendScriptToLibrary = appendScriptToLibrary
|
||||
self.targetDir = targetDir
|
||||
self.binIncludes = [os.path.normcase(n) \
|
||||
for n in GLOBAL_BIN_INCLUDES + binIncludes]
|
||||
self.binExcludes = [os.path.normcase(n) \
|
||||
for n in GLOBAL_BIN_EXCLUDES + binExcludes]
|
||||
self.binPathIncludes = [os.path.normcase(n) for n in binPathIncludes]
|
||||
self.binPathExcludes = [os.path.normcase(n) \
|
||||
for n in GLOBAL_BIN_PATH_EXCLUDES + binPathExcludes]
|
||||
self.icon = icon
|
||||
self.includeFiles = includeFiles
|
||||
self._VerifyConfiguration()
|
||||
|
||||
def _CopyFile(self, source, target, copyDependentFiles,
|
||||
includeMode = False):
|
||||
normalizedSource = os.path.normcase(os.path.normpath(source))
|
||||
normalizedTarget = os.path.normcase(os.path.normpath(target))
|
||||
if normalizedTarget in self.filesCopied:
|
||||
return
|
||||
if normalizedSource == normalizedTarget:
|
||||
return
|
||||
self._RemoveFile(target)
|
||||
targetDir = os.path.dirname(target)
|
||||
self._CreateDirectory(targetDir)
|
||||
print "copying", source, "->", target
|
||||
shutil.copyfile(source, target)
|
||||
if includeMode:
|
||||
shutil.copymode(source, target)
|
||||
self.filesCopied[normalizedTarget] = None
|
||||
if copyDependentFiles:
|
||||
for source in self._GetDependentFiles(source):
|
||||
target = os.path.join(targetDir, os.path.basename(source))
|
||||
self._CopyFile(source, target, copyDependentFiles)
|
||||
|
||||
def _CreateDirectory(self, path):
|
||||
if not os.path.isdir(path):
|
||||
print "creating directory", path
|
||||
os.makedirs(path)
|
||||
|
||||
def _FreezeExecutable(self, exe):
|
||||
if self.createLibraryZip:
|
||||
finder = self.finder
|
||||
else:
|
||||
finder = self._GetModuleFinder(exe)
|
||||
if exe.script is None:
|
||||
scriptModule = None
|
||||
else:
|
||||
scriptModule = finder.IncludeFile(exe.script, exe.moduleName)
|
||||
self._CopyFile(exe.base, exe.targetName, exe.copyDependentFiles,
|
||||
includeMode = True)
|
||||
if exe.icon is not None:
|
||||
if sys.platform == "win32":
|
||||
cx_Freeze.util.AddIcon(exe.targetName, exe.icon)
|
||||
else:
|
||||
targetName = os.path.join(os.path.dirname(exe.targetName),
|
||||
os.path.basename(exe.icon))
|
||||
self._CopyFile(exe.icon, targetName,
|
||||
copyDependentFiles = False)
|
||||
if not os.access(exe.targetName, os.W_OK):
|
||||
mode = os.stat(exe.targetName).st_mode
|
||||
os.chmod(exe.targetName, mode | stat.S_IWUSR)
|
||||
if not exe.appendScriptToLibrary:
|
||||
if exe.appendScriptToExe:
|
||||
fileName = exe.targetName
|
||||
else:
|
||||
baseFileName, ext = os.path.splitext(exe.targetName)
|
||||
fileName = baseFileName + ".zip"
|
||||
self._RemoveFile(fileName)
|
||||
if not self.createLibraryZip and exe.copyDependentFiles:
|
||||
scriptModule = None
|
||||
self._WriteModules(fileName, exe.initScript, finder, exe.compress,
|
||||
exe.copyDependentFiles, scriptModule)
|
||||
|
||||
def _GetBaseFileName(self, argsSource = None):
|
||||
if argsSource is None:
|
||||
argsSource = self
|
||||
name = argsSource.base
|
||||
if name is None:
|
||||
if argsSource.copyDependentFiles:
|
||||
name = "Console"
|
||||
else:
|
||||
name = "ConsoleKeepPath"
|
||||
argsSource.base = self._GetFileName("bases", name)
|
||||
if argsSource.base is None:
|
||||
raise ConfigError("no base named %s", name)
|
||||
|
||||
def _GetDependentFiles(self, path):
|
||||
dependentFiles = self.dependentFiles.get(path)
|
||||
if dependentFiles is None:
|
||||
if sys.platform == "win32":
|
||||
origPath = os.environ["PATH"]
|
||||
os.environ["PATH"] = origPath + os.pathsep + \
|
||||
os.pathsep.join(sys.path)
|
||||
dependentFiles = cx_Freeze.util.GetDependentFiles(path)
|
||||
os.environ["PATH"] = origPath
|
||||
else:
|
||||
dependentFiles = []
|
||||
for line in os.popen('ldd "%s"' % path):
|
||||
parts = line.strip().split(" => ")
|
||||
if len(parts) != 2:
|
||||
continue
|
||||
dependentFile = parts[1]
|
||||
if dependentFile == "not found":
|
||||
print "WARNING: cannot find", parts[0]
|
||||
continue
|
||||
pos = dependentFile.find(" (")
|
||||
if pos >= 0:
|
||||
dependentFile = dependentFile[:pos].strip()
|
||||
if dependentFile:
|
||||
dependentFiles.append(dependentFile)
|
||||
dependentFiles = self.dependentFiles[path] = \
|
||||
[f for f in dependentFiles if self._ShouldCopyFile(f)]
|
||||
return dependentFiles
|
||||
|
||||
def _GetFileName(self, dir, name):
|
||||
if os.path.isabs(name):
|
||||
return name
|
||||
name = os.path.normcase(name)
|
||||
fullDir = os.path.join(os.path.dirname(cx_Freeze.__file__), dir)
|
||||
if os.path.isdir(fullDir):
|
||||
for fileName in os.listdir(fullDir):
|
||||
if name == os.path.splitext(os.path.normcase(fileName))[0]:
|
||||
return os.path.join(fullDir, fileName)
|
||||
|
||||
def _GetInitScriptFileName(self, argsSource = None):
|
||||
if argsSource is None:
|
||||
argsSource = self
|
||||
name = argsSource.initScript
|
||||
if name is None:
|
||||
if argsSource.copyDependentFiles:
|
||||
name = "Console"
|
||||
else:
|
||||
name = "ConsoleKeepPath"
|
||||
argsSource.initScript = self._GetFileName("initscripts", name)
|
||||
if argsSource.initScript is None:
|
||||
raise ConfigError("no initscript named %s", name)
|
||||
|
||||
def _GetModuleFinder(self, argsSource = None):
|
||||
if argsSource is None:
|
||||
argsSource = self
|
||||
finder = cx_Freeze.ModuleFinder(self.includeFiles, argsSource.excludes,
|
||||
argsSource.path, argsSource.replacePaths)
|
||||
if argsSource.copyDependentFiles:
|
||||
finder.IncludeModule("imp")
|
||||
finder.IncludeModule("os")
|
||||
finder.IncludeModule("sys")
|
||||
if argsSource.compress:
|
||||
finder.IncludeModule("zlib")
|
||||
for name in argsSource.includes:
|
||||
finder.IncludeModule(name)
|
||||
for name in argsSource.packages:
|
||||
finder.IncludePackage(name)
|
||||
return finder
|
||||
|
||||
def _PrintReport(self, fileName, modules):
|
||||
print "writing zip file", fileName
|
||||
print
|
||||
print " %-25s %s" % ("Name", "File")
|
||||
print " %-25s %s" % ("----", "----")
|
||||
for module in modules:
|
||||
if module.path:
|
||||
print "P",
|
||||
else:
|
||||
print "m",
|
||||
print "%-25s" % module.name, module.file or ""
|
||||
print
|
||||
|
||||
def _RemoveFile(self, path):
|
||||
if os.path.exists(path):
|
||||
os.chmod(path, 0777)
|
||||
os.remove(path)
|
||||
|
||||
def _ShouldCopyFile(self, path):
|
||||
dir, name = os.path.split(os.path.normcase(path))
|
||||
parts = name.split(".")
|
||||
tweaked = False
|
||||
while True:
|
||||
if not parts[-1].isdigit():
|
||||
break
|
||||
parts.pop(-1)
|
||||
tweaked = True
|
||||
if tweaked:
|
||||
name = ".".join(parts)
|
||||
if name in self.binIncludes:
|
||||
return True
|
||||
if name in self.binExcludes:
|
||||
return False
|
||||
for path in self.binPathIncludes:
|
||||
if dir.startswith(path):
|
||||
return True
|
||||
for path in self.binPathExcludes:
|
||||
if dir.startswith(path):
|
||||
return False
|
||||
return True
|
||||
|
||||
def _VerifyCanAppendToLibrary(self):
|
||||
if not self.createLibraryZip:
|
||||
raise ConfigError("script cannot be appended to library zip if "
|
||||
"one is not being created")
|
||||
|
||||
def _VerifyConfiguration(self):
|
||||
if self.compress is None:
|
||||
self.compress = True
|
||||
if self.copyDependentFiles is None:
|
||||
self.copyDependentFiles = True
|
||||
if self.createLibraryZip is None:
|
||||
self.createLibraryZip = True
|
||||
if self.appendScriptToExe is None:
|
||||
self.appendScriptToExe = False
|
||||
if self.appendScriptToLibrary is None:
|
||||
self.appendScriptToLibrary = \
|
||||
self.createLibraryZip and not self.appendScriptToExe
|
||||
if self.targetDir is None:
|
||||
self.targetDir = os.path.abspath("dist")
|
||||
self._GetInitScriptFileName()
|
||||
self._GetBaseFileName()
|
||||
if self.path is None:
|
||||
self.path = sys.path
|
||||
if self.appendScriptToLibrary:
|
||||
self._VerifyCanAppendToLibrary()
|
||||
for sourceFileName, targetFileName in self.includeFiles:
|
||||
if not os.path.exists(sourceFileName):
|
||||
raise ConfigError("cannot find file/directory named %s",
|
||||
sourceFileName)
|
||||
if os.path.isabs(targetFileName):
|
||||
raise ConfigError("target file/directory cannot be absolute")
|
||||
for executable in self.executables:
|
||||
executable._VerifyConfiguration(self)
|
||||
|
||||
def _WriteModules(self, fileName, initScript, finder, compress,
|
||||
copyDependentFiles, scriptModule = None):
|
||||
initModule = finder.IncludeFile(initScript, "cx_Freeze__init__")
|
||||
if scriptModule is None:
|
||||
for module in self.constantsModules:
|
||||
module.Create(finder)
|
||||
modules = [m for m in finder.modules \
|
||||
if m.name not in self.excludeModules]
|
||||
else:
|
||||
modules = [initModule, scriptModule]
|
||||
self.excludeModules[initModule.name] = None
|
||||
self.excludeModules[scriptModule.name] = None
|
||||
itemsToSort = [(m.name, m) for m in modules]
|
||||
itemsToSort.sort()
|
||||
modules = [m for n, m in itemsToSort]
|
||||
self._PrintReport(fileName, modules)
|
||||
if scriptModule is None:
|
||||
finder.ReportMissingModules()
|
||||
targetDir = os.path.dirname(fileName)
|
||||
self._CreateDirectory(targetDir)
|
||||
filesToCopy = []
|
||||
if os.path.exists(fileName):
|
||||
mode = "a"
|
||||
else:
|
||||
mode = "w"
|
||||
outFile = zipfile.PyZipFile(fileName, mode, zipfile.ZIP_DEFLATED)
|
||||
for module in modules:
|
||||
if module.code is None and module.file is not None:
|
||||
fileName = os.path.basename(module.file)
|
||||
baseFileName, ext = os.path.splitext(fileName)
|
||||
if baseFileName != module.name and module.name != "zlib":
|
||||
if "." in module.name:
|
||||
fileName = module.name + ext
|
||||
generatedFileName = "ExtensionLoader_%s.py" % \
|
||||
module.name.replace(".", "_")
|
||||
module.code = compile(EXTENSION_LOADER_SOURCE % fileName,
|
||||
generatedFileName, "exec")
|
||||
target = os.path.join(targetDir, fileName)
|
||||
filesToCopy.append((module, target))
|
||||
if module.code is None:
|
||||
continue
|
||||
fileName = "/".join(module.name.split("."))
|
||||
if module.path:
|
||||
fileName += "/__init__"
|
||||
if module.file is not None and os.path.exists(module.file):
|
||||
mtime = os.stat(module.file).st_mtime
|
||||
else:
|
||||
mtime = time.time()
|
||||
zipTime = time.localtime(mtime)[:6]
|
||||
data = imp.get_magic() + struct.pack("<i", mtime) + \
|
||||
marshal.dumps(module.code)
|
||||
zinfo = zipfile.ZipInfo(fileName + ".pyc", zipTime)
|
||||
if compress:
|
||||
zinfo.compress_type = zipfile.ZIP_DEFLATED
|
||||
outFile.writestr(zinfo, data)
|
||||
origPath = os.environ["PATH"]
|
||||
for module, target in filesToCopy:
|
||||
try:
|
||||
if module.parent is not None:
|
||||
path = os.pathsep.join([origPath] + module.parent.path)
|
||||
os.environ["PATH"] = path
|
||||
self._CopyFile(module.file, target, copyDependentFiles)
|
||||
finally:
|
||||
os.environ["PATH"] = origPath
|
||||
|
||||
def Freeze(self):
|
||||
self.finder = None
|
||||
self.excludeModules = {}
|
||||
self.dependentFiles = {}
|
||||
self.filesCopied = {}
|
||||
cx_Freeze.util.SetOptimizeFlag(self.optimizeFlag)
|
||||
if self.createLibraryZip:
|
||||
self.finder = self._GetModuleFinder()
|
||||
for executable in self.executables:
|
||||
self._FreezeExecutable(executable)
|
||||
if self.createLibraryZip:
|
||||
fileName = os.path.join(self.targetDir, "library.zip")
|
||||
self._RemoveFile(fileName)
|
||||
self._WriteModules(fileName, self.initScript, self.finder,
|
||||
self.compress, self.copyDependentFiles)
|
||||
for sourceFileName, targetFileName in self.includeFiles:
|
||||
fullName = os.path.join(self.targetDir, targetFileName)
|
||||
if os.path.isdir(sourceFileName):
|
||||
for path, dirNames, fileNames in os.walk(sourceFileName):
|
||||
shortPath = path[len(sourceFileName) + 1:]
|
||||
if ".svn" in dirNames:
|
||||
dirNames.remove(".svn")
|
||||
if "CVS" in dirNames:
|
||||
dirNames.remove("CVS")
|
||||
for fileName in fileNames:
|
||||
fullSourceName = os.path.join(path, fileName)
|
||||
fullTargetName = os.path.join(self.targetDir,
|
||||
targetFileName, shortPath, fileName)
|
||||
self._CopyFile(fullSourceName, fullTargetName,
|
||||
copyDependentFiles = False)
|
||||
else:
|
||||
self._CopyFile(sourceFileName, fullName,
|
||||
copyDependentFiles = False)
|
||||
|
||||
|
||||
class ConfigError(Exception):
|
||||
|
||||
def __init__(self, format, *args):
|
||||
self.what = format % args
|
||||
|
||||
def __str__(self):
|
||||
return self.what
|
||||
|
||||
|
||||
class Executable(object):
|
||||
|
||||
def __init__(self, script, initScript = None, base = None, path = None,
|
||||
targetDir = None, targetName = None, includes = None,
|
||||
excludes = None, packages = None, replacePaths = None,
|
||||
compress = None, copyDependentFiles = None,
|
||||
appendScriptToExe = None, appendScriptToLibrary = None,
|
||||
icon = None):
|
||||
self.script = script
|
||||
self.initScript = initScript
|
||||
self.base = base
|
||||
self.path = path
|
||||
self.targetDir = targetDir
|
||||
self.targetName = targetName
|
||||
self.includes = includes
|
||||
self.excludes = excludes
|
||||
self.packages = packages
|
||||
self.replacePaths = replacePaths
|
||||
self.compress = compress
|
||||
self.copyDependentFiles = copyDependentFiles
|
||||
self.appendScriptToExe = appendScriptToExe
|
||||
self.appendScriptToLibrary = appendScriptToLibrary
|
||||
self.icon = icon
|
||||
|
||||
def __repr__(self):
|
||||
return "<Executable script=%s>" % self.script
|
||||
|
||||
def _VerifyConfiguration(self, freezer):
|
||||
if self.path is None:
|
||||
self.path = freezer.path
|
||||
if self.targetDir is None:
|
||||
self.targetDir = freezer.targetDir
|
||||
if self.includes is None:
|
||||
self.includes = freezer.includes
|
||||
if self.excludes is None:
|
||||
self.excludes = freezer.excludes
|
||||
if self.packages is None:
|
||||
self.packages = freezer.packages
|
||||
if self.replacePaths is None:
|
||||
self.replacePaths = freezer.replacePaths
|
||||
if self.compress is None:
|
||||
self.compress = freezer.compress
|
||||
if self.copyDependentFiles is None:
|
||||
self.copyDependentFiles = freezer.copyDependentFiles
|
||||
if self.appendScriptToExe is None:
|
||||
self.appendScriptToExe = freezer.appendScriptToExe
|
||||
if self.appendScriptToLibrary is None:
|
||||
self.appendScriptToLibrary = freezer.appendScriptToLibrary
|
||||
if self.initScript is None:
|
||||
self.initScript = freezer.initScript
|
||||
else:
|
||||
freezer._GetInitScriptFileName(self)
|
||||
if self.base is None:
|
||||
self.base = freezer.base
|
||||
else:
|
||||
freezer._GetBaseFileName(self)
|
||||
if self.appendScriptToLibrary:
|
||||
freezer._VerifyCanAppendToLibrary()
|
||||
if self.icon is None:
|
||||
self.icon = freezer.icon
|
||||
if self.script is not None:
|
||||
name, ext = os.path.splitext(os.path.basename(self.script))
|
||||
if self.appendScriptToLibrary:
|
||||
self.moduleName = "%s__main__" % os.path.normcase(name)
|
||||
else:
|
||||
self.moduleName = "__main__"
|
||||
if self.targetName is None:
|
||||
baseName, ext = os.path.splitext(self.base)
|
||||
self.targetName = name + ext
|
||||
self.targetName = os.path.join(self.targetDir, self.targetName)
|
||||
|
||||
|
||||
class ConstantsModule(object):
|
||||
|
||||
def __init__(self, releaseString = None, copyright = None,
|
||||
moduleName = "BUILD_CONSTANTS", timeFormat = "%B %d, %Y %H:%M:%S"):
|
||||
self.moduleName = moduleName
|
||||
self.timeFormat = timeFormat
|
||||
self.values = {}
|
||||
self.values["BUILD_RELEASE_STRING"] = releaseString
|
||||
self.values["BUILD_COPYRIGHT"] = copyright
|
||||
|
||||
def Create(self, finder):
|
||||
"""Create the module which consists of declaration statements for each
|
||||
of the values."""
|
||||
today = datetime.datetime.today()
|
||||
sourceTimestamp = 0
|
||||
for module in finder.modules:
|
||||
if module.file is None:
|
||||
continue
|
||||
if module.inZipFile:
|
||||
continue
|
||||
if not os.path.exists(module.file):
|
||||
raise ConfigError("no file named %s", module.file)
|
||||
timestamp = os.stat(module.file).st_mtime
|
||||
sourceTimestamp = max(sourceTimestamp, timestamp)
|
||||
sourceTimestamp = datetime.datetime.fromtimestamp(sourceTimestamp)
|
||||
self.values["BUILD_TIMESTAMP"] = today.strftime(self.timeFormat)
|
||||
self.values["BUILD_HOST"] = socket.gethostname().split(".")[0]
|
||||
self.values["SOURCE_TIMESTAMP"] = \
|
||||
sourceTimestamp.strftime(self.timeFormat)
|
||||
module = finder._AddModule(self.moduleName)
|
||||
sourceParts = []
|
||||
names = self.values.keys()
|
||||
names.sort()
|
||||
for name in names:
|
||||
value = self.values[name]
|
||||
sourceParts.append("%s = %r" % (name, value))
|
||||
source = "\n".join(sourceParts)
|
||||
module.code = compile(source, "%s.py" % self.moduleName, "exec")
|
||||
|
||||
281
installer/cx_Freeze/cx_Freeze/hooks.py
Normal file
281
installer/cx_Freeze/cx_Freeze/hooks.py
Normal file
@ -0,0 +1,281 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
def initialize(finder):
|
||||
"""upon initialization of the finder, this routine is called to set up some
|
||||
automatic exclusions for various platforms."""
|
||||
finder.ExcludeModule("FCNTL")
|
||||
finder.ExcludeModule("os.path")
|
||||
if os.name == "nt":
|
||||
finder.ExcludeModule("fcntl")
|
||||
finder.ExcludeModule("grp")
|
||||
finder.ExcludeModule("pwd")
|
||||
finder.ExcludeModule("termios")
|
||||
else:
|
||||
finder.ExcludeModule("_winreg")
|
||||
finder.ExcludeModule("msilib")
|
||||
finder.ExcludeModule("msvcrt")
|
||||
finder.ExcludeModule("nt")
|
||||
if os.name not in ("os2", "ce"):
|
||||
finder.ExcludeModule("ntpath")
|
||||
finder.ExcludeModule("nturl2path")
|
||||
finder.ExcludeModule("pythoncom")
|
||||
finder.ExcludeModule("pywintypes")
|
||||
finder.ExcludeModule("winerror")
|
||||
finder.ExcludeModule("winsound")
|
||||
finder.ExcludeModule("win32api")
|
||||
finder.ExcludeModule("win32con")
|
||||
finder.ExcludeModule("win32event")
|
||||
finder.ExcludeModule("win32file")
|
||||
finder.ExcludeModule("win32pdh")
|
||||
finder.ExcludeModule("win32pipe")
|
||||
finder.ExcludeModule("win32process")
|
||||
finder.ExcludeModule("win32security")
|
||||
finder.ExcludeModule("win32service")
|
||||
finder.ExcludeModule("wx.activex")
|
||||
if os.name != "posix":
|
||||
finder.ExcludeModule("posix")
|
||||
if os.name != "mac":
|
||||
finder.ExcludeModule("Carbon")
|
||||
finder.ExcludeModule("gestalt")
|
||||
finder.ExcludeModule("ic")
|
||||
finder.ExcludeModule("mac")
|
||||
finder.ExcludeModule("MacOS")
|
||||
finder.ExcludeModule("macpath")
|
||||
finder.ExcludeModule("macurl2path")
|
||||
if os.name != "nt":
|
||||
finder.ExcludeModule("EasyDialogs")
|
||||
if os.name != "os2":
|
||||
finder.ExcludeModule("os2")
|
||||
finder.ExcludeModule("os2emxpath")
|
||||
finder.ExcludeModule("_emx_link")
|
||||
if os.name != "ce":
|
||||
finder.ExcludeModule("ce")
|
||||
if os.name != "riscos":
|
||||
finder.ExcludeModule("riscos")
|
||||
finder.ExcludeModule("riscosenviron")
|
||||
finder.ExcludeModule("riscospath")
|
||||
finder.ExcludeModule("rourl2path")
|
||||
if sys.platform[:4] != "java":
|
||||
finder.ExcludeModule("java.lang")
|
||||
finder.ExcludeModule("org.python.core")
|
||||
|
||||
|
||||
def load_cElementTree(finder, module):
|
||||
"""the cElementTree module implicitly loads the elementtree.ElementTree
|
||||
module; make sure this happens."""
|
||||
finder.IncludeModule("elementtree.ElementTree")
|
||||
|
||||
|
||||
def load_ceODBC(finder, module):
|
||||
"""the ceODBC module implicitly imports both datetime and decimal; make
|
||||
sure this happens."""
|
||||
finder.IncludeModule("datetime")
|
||||
finder.IncludeModule("decimal")
|
||||
|
||||
|
||||
def load_cx_Oracle(finder, module):
|
||||
"""the cx_Oracle module implicitly imports datetime; make sure this
|
||||
happens."""
|
||||
finder.IncludeModule("datetime")
|
||||
|
||||
|
||||
def load_docutils_frontend(finder, module):
|
||||
"""The optik module is the old name for the optparse module; ignore the
|
||||
module if it cannot be found."""
|
||||
module.IgnoreName("optik")
|
||||
|
||||
|
||||
def load_dummy_threading(finder, module):
|
||||
"""the dummy_threading module plays games with the name of the threading
|
||||
module for its own purposes; ignore that here"""
|
||||
finder.ExcludeModule("_dummy_threading")
|
||||
|
||||
|
||||
def load_email(finder, module):
|
||||
"""the email package has a bunch of aliases as the submodule names were
|
||||
all changed to lowercase in Python 2.5; mimic that here."""
|
||||
if sys.version_info[:2] >= (2, 5):
|
||||
for name in ("Charset", "Encoders", "Errors", "FeedParser",
|
||||
"Generator", "Header", "Iterators", "Message", "Parser",
|
||||
"Utils", "base64MIME", "quopriMIME"):
|
||||
finder.AddAlias("email.%s" % name, "email.%s" % name.lower())
|
||||
|
||||
|
||||
def load_ftplib(finder, module):
|
||||
"""the ftplib module attempts to import the SOCKS module; ignore this
|
||||
module if it cannot be found"""
|
||||
module.IgnoreName("SOCKS")
|
||||
|
||||
|
||||
def load_matplotlib(finder, module):
|
||||
"""the matplotlib module requires data to be found in mpl-data in the
|
||||
same directory as the frozen executable so oblige it"""
|
||||
dir = os.path.join(module.path[0], "mpl-data")
|
||||
finder.IncludeFiles(dir, "mpl-data")
|
||||
|
||||
|
||||
def load_matplotlib_numerix(finder, module):
|
||||
"""the numpy.numerix module loads a number of modules dynamically"""
|
||||
for name in ("ma", "fft", "linear_algebra", "random_array", "mlab"):
|
||||
finder.IncludeModule("%s.%s" % (module.name, name))
|
||||
|
||||
|
||||
def load_numpy_linalg(finder, module):
|
||||
"""the numpy.linalg module implicitly loads the lapack_lite module; make
|
||||
sure this happens"""
|
||||
finder.IncludeModule("numpy.linalg.lapack_lite")
|
||||
|
||||
|
||||
def load_pty(finder, module):
|
||||
"""The sgi module is not needed for this module to function."""
|
||||
module.IgnoreName("sgi")
|
||||
|
||||
|
||||
def load_pythoncom(finder, module):
|
||||
"""the pythoncom module is actually contained in a DLL but since those
|
||||
cannot be loaded directly in Python 2.5 and higher a special module is
|
||||
used to perform that task; simply use that technique directly to
|
||||
determine the name of the DLL and ensure it is included as a normal
|
||||
extension; also load the pywintypes module which is implicitly
|
||||
loaded."""
|
||||
import pythoncom
|
||||
module.file = pythoncom.__file__
|
||||
module.code = None
|
||||
finder.IncludeModule("pywintypes")
|
||||
|
||||
|
||||
def load_pywintypes(finder, module):
|
||||
"""the pywintypes module is actually contained in a DLL but since those
|
||||
cannot be loaded directly in Python 2.5 and higher a special module is
|
||||
used to perform that task; simply use that technique directly to
|
||||
determine the name of the DLL and ensure it is included as a normal
|
||||
extension."""
|
||||
import pywintypes
|
||||
module.file = pywintypes.__file__
|
||||
module.code = None
|
||||
|
||||
|
||||
def load_PyQt4_Qt(finder, module):
|
||||
"""the PyQt4.Qt module is an extension module which imports a number of
|
||||
other modules and injects their namespace into its own. It seems a
|
||||
foolish way of doing things but perhaps there is some hidden advantage
|
||||
to this technique over pure Python; ignore the absence of some of
|
||||
the modules since not every installation includes all of them."""
|
||||
finder.IncludeModule("PyQt4.QtCore")
|
||||
finder.IncludeModule("PyQt4.QtGui")
|
||||
finder.IncludeModule("sip")
|
||||
for name in ("PyQt4.QtSvg", "PyQt4.Qsci", "PyQt4.QtAssistant",
|
||||
"PyQt4.QtNetwork", "PyQt4.QtOpenGL", "PyQt4.QtScript", "PyQt4._qt",
|
||||
"PyQt4.QtSql", "PyQt4.QtSvg", "PyQt4.QtTest", "PyQt4.QtXml"):
|
||||
try:
|
||||
finder.IncludeModule(name)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
def load_Tkinter(finder, module):
|
||||
"""the Tkinter module has data files that are required to be loaded so
|
||||
ensure that they are copied into the directory that is expected at
|
||||
runtime."""
|
||||
import Tkinter
|
||||
import _tkinter
|
||||
tk = _tkinter.create()
|
||||
tclDir = os.path.dirname(tk.call("info", "library"))
|
||||
tclSourceDir = os.path.join(tclDir, "tcl%s" % _tkinter.TCL_VERSION)
|
||||
tkSourceDir = os.path.join(tclDir, "tk%s" % _tkinter.TK_VERSION)
|
||||
finder.IncludeFiles(tclSourceDir, "tcl")
|
||||
finder.IncludeFiles(tkSourceDir, "tk")
|
||||
|
||||
|
||||
def load_tempfile(finder, module):
|
||||
"""the tempfile module attempts to load the fcntl and thread modules but
|
||||
continues if these modules cannot be found; ignore these modules if they
|
||||
cannot be found."""
|
||||
module.IgnoreName("fcntl")
|
||||
module.IgnoreName("thread")
|
||||
|
||||
|
||||
def load_time(finder, module):
|
||||
"""the time module implicitly loads _strptime; make sure this happens."""
|
||||
finder.IncludeModule("_strptime")
|
||||
|
||||
|
||||
def load_win32api(finder, module):
|
||||
"""the win32api module implicitly loads the pywintypes module; make sure
|
||||
this happens."""
|
||||
finder.IncludeModule("pywintypes")
|
||||
|
||||
|
||||
def load_win32com(finder, module):
|
||||
"""the win32com package manipulates its search path at runtime to include
|
||||
the sibling directory called win32comext; simulate that by changing the
|
||||
search path in a similar fashion here."""
|
||||
baseDir = os.path.dirname(os.path.dirname(module.file))
|
||||
module.path.append(os.path.join(baseDir, "win32comext"))
|
||||
|
||||
|
||||
def load_win32file(finder, module):
|
||||
"""the win32api module implicitly loads the pywintypes module; make sure
|
||||
this happens."""
|
||||
finder.IncludeModule("pywintypes")
|
||||
|
||||
|
||||
def load_xml(finder, module):
|
||||
"""the builtin xml package attempts to load the _xmlplus module to see if
|
||||
that module should take its role instead; ignore the failure to find
|
||||
this module, though."""
|
||||
module.IgnoreName("_xmlplus")
|
||||
|
||||
|
||||
def load_xml_etree_cElementTree(finder, module):
|
||||
"""the xml.etree.cElementTree module implicitly loads the
|
||||
xml.etree.ElementTree module; make sure this happens."""
|
||||
finder.IncludeModule("xml.etree.ElementTree")
|
||||
|
||||
def load_IPython(finder, module):
|
||||
ipy = os.path.join(os.path.dirname(module.file), 'Extensions')
|
||||
extensions = set([])
|
||||
for m in os.listdir(ipy):
|
||||
extensions.add(os.path.splitext(m)[0])
|
||||
extensions.remove('__init__')
|
||||
for m in extensions:
|
||||
finder.IncludeModule('IPython.Extensions.'+m)
|
||||
|
||||
def load_lxml(finder, module):
|
||||
finder.IncludeModule('lxml._elementpath')
|
||||
|
||||
def load_cherrypy(finder, module):
|
||||
finder.IncludeModule('cherrypy.lib.encoding')
|
||||
|
||||
def missing_cElementTree(finder, caller):
|
||||
"""the cElementTree has been incorporated into the standard library in
|
||||
Python 2.5 so ignore its absence if it cannot found."""
|
||||
if sys.version_info[:2] >= (2, 5):
|
||||
caller.IgnoreName("cElementTree")
|
||||
|
||||
|
||||
def missing_EasyDialogs(finder, caller):
|
||||
"""the EasyDialogs module is not normally present on Windows but it also
|
||||
may be so instead of excluding it completely, ignore it if it can't be
|
||||
found"""
|
||||
if sys.platform == "win32":
|
||||
caller.IgnoreName("EasyDialogs")
|
||||
|
||||
|
||||
def missing_readline(finder, caller):
|
||||
"""the readline module is not normally present on Windows but it also may
|
||||
be so instead of excluding it completely, ignore it if it can't be
|
||||
found"""
|
||||
if sys.platform == "win32":
|
||||
caller.IgnoreName("readline")
|
||||
|
||||
|
||||
def missing_xml_etree(finder, caller):
|
||||
"""the xml.etree package is new for Python 2.5 but it is common practice
|
||||
to use a try..except.. block in order to support versions earlier than
|
||||
Python 2.5 transparently; ignore the absence of the package in this
|
||||
situation."""
|
||||
if sys.version_info[:2] < (2, 5):
|
||||
caller.IgnoreName("xml.etree")
|
||||
|
||||
171
installer/cx_Freeze/cx_Freeze/main.py
Normal file
171
installer/cx_Freeze/cx_Freeze/main.py
Normal file
@ -0,0 +1,171 @@
|
||||
import optparse
|
||||
import os
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
|
||||
import cx_Freeze
|
||||
|
||||
__all__ = ["main"]
|
||||
|
||||
USAGE = \
|
||||
"""
|
||||
%prog [options] [SCRIPT]
|
||||
|
||||
Freeze a Python script and all of its referenced modules to a base
|
||||
executable which can then be distributed without requiring a Python
|
||||
installation."""
|
||||
|
||||
VERSION = \
|
||||
"""
|
||||
%%prog %s
|
||||
Copyright (c) 2007-2008 Colt Engineering. All rights reserved.
|
||||
Copyright (c) 2001-2006 Computronix Corporation. All rights reserved.""" % \
|
||||
cx_Freeze.version
|
||||
|
||||
|
||||
def ParseCommandLine():
|
||||
parser = optparse.OptionParser(version = VERSION.strip(),
|
||||
usage = USAGE.strip())
|
||||
parser.add_option("-O",
|
||||
action = "count",
|
||||
default = 0,
|
||||
dest = "optimized",
|
||||
help = "optimize generated bytecode as per PYTHONOPTIMIZE; "
|
||||
"use -OO in order to remove doc strings")
|
||||
parser.add_option("-c", "--compress",
|
||||
action = "store_true",
|
||||
dest = "compress",
|
||||
help = "compress byte code in zip files")
|
||||
parser.add_option("--base-name",
|
||||
dest = "baseName",
|
||||
metavar = "NAME",
|
||||
help = "file on which to base the target file; if the name of the "
|
||||
"file is not an absolute file name, the subdirectory bases "
|
||||
"(rooted in the directory in which the freezer is found) "
|
||||
"will be searched for a file matching the name")
|
||||
parser.add_option("--init-script",
|
||||
dest = "initScript",
|
||||
metavar = "NAME",
|
||||
help = "script which will be executed upon startup; if the name "
|
||||
"of the file is not an absolute file name, the "
|
||||
"subdirectory initscripts (rooted in the directory in "
|
||||
"which the cx_Freeze package is found) will be searched "
|
||||
"for a file matching the name")
|
||||
parser.add_option("--target-dir", "--install-dir",
|
||||
dest = "targetDir",
|
||||
metavar = "DIR",
|
||||
help = "the directory in which to place the target file and "
|
||||
"any dependent files")
|
||||
parser.add_option("--target-name",
|
||||
dest = "targetName",
|
||||
metavar = "NAME",
|
||||
help = "the name of the file to create instead of the base name "
|
||||
"of the script and the extension of the base binary")
|
||||
parser.add_option("--no-copy-deps",
|
||||
dest = "copyDeps",
|
||||
default = True,
|
||||
action = "store_false",
|
||||
help = "do not copy the dependent files (extensions, shared "
|
||||
"libraries, etc.) to the target directory; this also "
|
||||
"modifies the default init script to ConsoleKeepPath.py "
|
||||
"and means that the target executable requires a Python "
|
||||
"installation to execute properly")
|
||||
parser.add_option("--default-path",
|
||||
action = "append",
|
||||
dest = "defaultPath",
|
||||
metavar = "DIRS",
|
||||
help = "list of paths separated by the standard path separator "
|
||||
"for the platform which will be used to initialize "
|
||||
"sys.path prior to running the module finder")
|
||||
parser.add_option("--include-path",
|
||||
action = "append",
|
||||
dest = "includePath",
|
||||
metavar = "DIRS",
|
||||
help = "list of paths separated by the standard path separator "
|
||||
"for the platform which will be used to modify sys.path "
|
||||
"prior to running the module finder")
|
||||
parser.add_option("--replace-paths",
|
||||
dest = "replacePaths",
|
||||
metavar = "DIRECTIVES",
|
||||
help = "replace all the paths in modules found in the given paths "
|
||||
"with the given replacement string; multiple values are "
|
||||
"separated by the standard path separator and each value "
|
||||
"is of the form path=replacement_string; path can be * "
|
||||
"which means all paths not already specified")
|
||||
parser.add_option("--include-modules",
|
||||
dest = "includeModules",
|
||||
metavar = "NAMES",
|
||||
help = "comma separated list of modules to include")
|
||||
parser.add_option("--exclude-modules",
|
||||
dest = "excludeModules",
|
||||
metavar = "NAMES",
|
||||
help = "comma separated list of modules to exclude")
|
||||
parser.add_option("--ext-list-file",
|
||||
dest = "extListFile",
|
||||
metavar = "NAME",
|
||||
help = "name of file in which to place the list of dependent files "
|
||||
"which were copied into the target directory")
|
||||
parser.add_option("-z", "--zip-include",
|
||||
dest = "zipIncludes",
|
||||
action = "append",
|
||||
default = [],
|
||||
metavar = "SPEC",
|
||||
help = "name of file to add to the zip file or a specification of "
|
||||
"the form name=arcname which will specify the archive name "
|
||||
"to use; multiple --zip-include arguments can be used")
|
||||
options, args = parser.parse_args()
|
||||
if len(args) == 0:
|
||||
options.script = None
|
||||
elif len(args) == 1:
|
||||
options.script, = args
|
||||
else:
|
||||
parser.error("only one script can be specified")
|
||||
if not args and options.includeModules is None and options.copyDeps:
|
||||
parser.error("script or a list of modules must be specified")
|
||||
if not args and options.targetName is None:
|
||||
parser.error("script or a target name must be specified")
|
||||
if options.excludeModules:
|
||||
options.excludeModules = options.excludeModules.split(",")
|
||||
else:
|
||||
options.excludeModules = []
|
||||
if options.includeModules:
|
||||
options.includeModules = options.includeModules.split(",")
|
||||
else:
|
||||
options.includeModules = []
|
||||
replacePaths = []
|
||||
if options.replacePaths:
|
||||
for directive in options.replacePaths.split(os.pathsep):
|
||||
fromPath, replacement = directive.split("=")
|
||||
replacePaths.append((fromPath, replacement))
|
||||
options.replacePaths = replacePaths
|
||||
if options.defaultPath is not None:
|
||||
sys.path = [p for mp in options.defaultPath \
|
||||
for p in mp.split(os.pathsep)]
|
||||
if options.includePath is not None:
|
||||
paths = [p for mp in options.includePath for p in mp.split(os.pathsep)]
|
||||
sys.path = paths + sys.path
|
||||
if options.script is not None:
|
||||
sys.path.insert(0, os.path.dirname(options.script))
|
||||
return options
|
||||
|
||||
|
||||
def main():
|
||||
options = ParseCommandLine()
|
||||
executables = [cx_Freeze.Executable(options.script,
|
||||
targetName = options.targetName)]
|
||||
freezer = cx_Freeze.Freezer(executables,
|
||||
includes = options.includeModules,
|
||||
excludes = options.excludeModules,
|
||||
replacePaths = options.replacePaths,
|
||||
compress = options.compress,
|
||||
optimizeFlag = options.optimized,
|
||||
copyDependentFiles = options.copyDeps,
|
||||
initScript = options.initScript,
|
||||
base = options.baseName,
|
||||
path = None,
|
||||
createLibraryZip = False,
|
||||
appendScriptToExe = True,
|
||||
targetDir = options.targetDir)
|
||||
freezer.Freeze()
|
||||
|
||||
337
installer/cx_Freeze/cx_Freeze/windist.py
Normal file
337
installer/cx_Freeze/cx_Freeze/windist.py
Normal file
@ -0,0 +1,337 @@
|
||||
import distutils.command.bdist_msi
|
||||
import msilib
|
||||
import os
|
||||
|
||||
__all__ = [ "bdist_msi" ]
|
||||
|
||||
# force the remove existing products action to happen first since Windows
|
||||
# installer appears to be braindead and doesn't handle files shared between
|
||||
# different "products" very well
|
||||
sequence = msilib.sequence.InstallExecuteSequence
|
||||
for index, info in enumerate(sequence):
|
||||
if info[0] == u'RemoveExistingProducts':
|
||||
sequence[index] = (info[0], info[1], 1450)
|
||||
|
||||
|
||||
class bdist_msi(distutils.command.bdist_msi.bdist_msi):
|
||||
user_options = distutils.command.bdist_msi.bdist_msi.user_options + [
|
||||
('add-to-path=', None, 'add target dir to PATH environment variable'),
|
||||
('upgrade-code=', None, 'upgrade code to use')
|
||||
]
|
||||
x = y = 50
|
||||
width = 370
|
||||
height = 300
|
||||
title = "[ProductName] Setup"
|
||||
modeless = 1
|
||||
modal = 3
|
||||
|
||||
def add_config(self, fullname):
|
||||
initialTargetDir = self.get_initial_target_dir(fullname)
|
||||
if self.add_to_path is None:
|
||||
self.add_to_path = False
|
||||
for executable in self.distribution.executables:
|
||||
if os.path.basename(executable.base).startswith("Console"):
|
||||
self.add_to_path = True
|
||||
break
|
||||
if self.add_to_path:
|
||||
msilib.add_data(self.db, 'Environment',
|
||||
[("E_PATH", "Path", r"[~];[TARGETDIR]", "TARGETDIR")])
|
||||
msilib.add_data(self.db, 'CustomAction',
|
||||
[("InitialTargetDir", 256 + 51, "TARGETDIR", initialTargetDir)
|
||||
])
|
||||
msilib.add_data(self.db, 'InstallExecuteSequence',
|
||||
[("InitialTargetDir", 'TARGETDIR=""', 401)])
|
||||
msilib.add_data(self.db, 'InstallUISequence',
|
||||
[("PrepareDlg", None, 140),
|
||||
("InitialTargetDir", 'TARGETDIR=""', 401),
|
||||
("SelectDirectoryDlg", "not Installed", 1230),
|
||||
("MaintenanceTypeDlg",
|
||||
"Installed and not Resume and not Preselected", 1250),
|
||||
("ProgressDlg", None, 1280)
|
||||
])
|
||||
|
||||
def add_cancel_dialog(self):
|
||||
dialog = msilib.Dialog(self.db, "CancelDlg", 50, 10, 260, 85, 3,
|
||||
self.title, "No", "No", "No")
|
||||
dialog.text("Text", 48, 15, 194, 30, 3,
|
||||
"Are you sure you want to cancel [ProductName] installation?")
|
||||
button = dialog.pushbutton("Yes", 72, 57, 56, 17, 3, "Yes", "No")
|
||||
button.event("EndDialog", "Exit")
|
||||
button = dialog.pushbutton("No", 132, 57, 56, 17, 3, "No", "Yes")
|
||||
button.event("EndDialog", "Return")
|
||||
|
||||
def add_error_dialog(self):
|
||||
dialog = msilib.Dialog(self.db, "ErrorDlg", 50, 10, 330, 101, 65543,
|
||||
self.title, "ErrorText", None, None)
|
||||
dialog.text("ErrorText", 50, 9, 280, 48, 3, "")
|
||||
for text, x in [("No", 120), ("Yes", 240), ("Abort", 0),
|
||||
("Cancel", 42), ("Ignore", 81), ("Ok", 159), ("Retry", 198)]:
|
||||
button = dialog.pushbutton(text[0], x, 72, 81, 21, 3, text, None)
|
||||
button.event("EndDialog", "Error%s" % text)
|
||||
|
||||
def add_exit_dialog(self):
|
||||
dialog = distutils.command.bdist_msi.PyDialog(self.db, "ExitDialog",
|
||||
self.x, self.y, self.width, self.height, self.modal,
|
||||
self.title, "Finish", "Finish", "Finish")
|
||||
dialog.title("Completing the [ProductName] installer")
|
||||
dialog.back("< Back", "Finish", active = False)
|
||||
dialog.cancel("Cancel", "Back", active = False)
|
||||
dialog.text("Description", 15, 235, 320, 20, 0x30003,
|
||||
"Click the Finish button to exit the installer.")
|
||||
button = dialog.next("Finish", "Cancel", name = "Finish")
|
||||
button.event("EndDialog", "Return")
|
||||
|
||||
def add_fatal_error_dialog(self):
|
||||
dialog = distutils.command.bdist_msi.PyDialog(self.db, "FatalError",
|
||||
self.x, self.y, self.width, self.height, self.modal,
|
||||
self.title, "Finish", "Finish", "Finish")
|
||||
dialog.title("[ProductName] installer ended prematurely")
|
||||
dialog.back("< Back", "Finish", active = False)
|
||||
dialog.cancel("Cancel", "Back", active = False)
|
||||
dialog.text("Description1", 15, 70, 320, 80, 0x30003,
|
||||
"[ProductName] setup ended prematurely because of an error. "
|
||||
"Your system has not been modified. To install this program "
|
||||
"at a later time, please run the installation again.")
|
||||
dialog.text("Description2", 15, 155, 320, 20, 0x30003,
|
||||
"Click the Finish button to exit the installer.")
|
||||
button = dialog.next("Finish", "Cancel", name = "Finish")
|
||||
button.event("EndDialog", "Exit")
|
||||
|
||||
def add_files_in_use_dialog(self):
|
||||
dialog = distutils.command.bdist_msi.PyDialog(self.db, "FilesInUse",
|
||||
self.x, self.y, self.width, self.height, 19, self.title,
|
||||
"Retry", "Retry", "Retry", bitmap = False)
|
||||
dialog.text("Title", 15, 6, 200, 15, 0x30003,
|
||||
r"{\DlgFontBold8}Files in Use")
|
||||
dialog.text("Description", 20, 23, 280, 20, 0x30003,
|
||||
"Some files that need to be updated are currently in use.")
|
||||
dialog.text("Text", 20, 55, 330, 50, 3,
|
||||
"The following applications are using files that need to be "
|
||||
"updated by this setup. Close these applications and then "
|
||||
"click Retry to continue the installation or Cancel to exit "
|
||||
"it.")
|
||||
dialog.control("List", "ListBox", 20, 107, 330, 130, 7,
|
||||
"FileInUseProcess", None, None, None)
|
||||
button = dialog.back("Exit", "Ignore", name = "Exit")
|
||||
button.event("EndDialog", "Exit")
|
||||
button = dialog.next("Ignore", "Retry", name = "Ignore")
|
||||
button.event("EndDialog", "Ignore")
|
||||
button = dialog.cancel("Retry", "Exit", name = "Retry")
|
||||
button.event("EndDialog", "Retry")
|
||||
|
||||
def add_maintenance_type_dialog(self):
|
||||
dialog = distutils.command.bdist_msi.PyDialog(self.db,
|
||||
"MaintenanceTypeDlg", self.x, self.y, self.width, self.height,
|
||||
self.modal, self.title, "Next", "Next", "Cancel")
|
||||
dialog.title("Welcome to the [ProductName] Setup Wizard")
|
||||
dialog.text("BodyText", 15, 63, 330, 42, 3,
|
||||
"Select whether you want to repair or remove [ProductName].")
|
||||
group = dialog.radiogroup("RepairRadioGroup", 15, 108, 330, 60, 3,
|
||||
"MaintenanceForm_Action", "", "Next")
|
||||
group.add("Repair", 0, 18, 300, 17, "&Repair [ProductName]")
|
||||
group.add("Remove", 0, 36, 300, 17, "Re&move [ProductName]")
|
||||
dialog.back("< Back", None, active = False)
|
||||
button = dialog.next("Finish", "Cancel")
|
||||
button.event("[REINSTALL]", "ALL",
|
||||
'MaintenanceForm_Action="Repair"', 5)
|
||||
button.event("[Progress1]", "Repairing",
|
||||
'MaintenanceForm_Action="Repair"', 6)
|
||||
button.event("[Progress2]", "repairs",
|
||||
'MaintenanceForm_Action="Repair"', 7)
|
||||
button.event("Reinstall", "ALL",
|
||||
'MaintenanceForm_Action="Repair"', 8)
|
||||
button.event("[REMOVE]", "ALL",
|
||||
'MaintenanceForm_Action="Remove"', 11)
|
||||
button.event("[Progress1]", "Removing",
|
||||
'MaintenanceForm_Action="Remove"', 12)
|
||||
button.event("[Progress2]", "removes",
|
||||
'MaintenanceForm_Action="Remove"', 13)
|
||||
button.event("Remove", "ALL",
|
||||
'MaintenanceForm_Action="Remove"', 14)
|
||||
button.event("EndDialog", "Return",
|
||||
'MaintenanceForm_Action<>"Change"', 20)
|
||||
button = dialog.cancel("Cancel", "RepairRadioGroup")
|
||||
button.event("SpawnDialog", "CancelDlg")
|
||||
|
||||
def add_prepare_dialog(self):
|
||||
dialog = distutils.command.bdist_msi.PyDialog(self.db, "PrepareDlg",
|
||||
self.x, self.y, self.width, self.height, self.modeless,
|
||||
self.title, "Cancel", "Cancel", "Cancel")
|
||||
dialog.text("Description", 15, 70, 320, 40, 0x30003,
|
||||
"Please wait while the installer prepares to guide you through"
|
||||
"the installation.")
|
||||
dialog.title("Welcome to the [ProductName] installer")
|
||||
text = dialog.text("ActionText", 15, 110, 320, 20, 0x30003,
|
||||
"Pondering...")
|
||||
text.mapping("ActionText", "Text")
|
||||
text = dialog.text("ActionData", 15, 135, 320, 30, 0x30003, None)
|
||||
text.mapping("ActionData", "Text")
|
||||
dialog.back("Back", None, active = False)
|
||||
dialog.next("Next", None, active = False)
|
||||
button = dialog.cancel("Cancel", None)
|
||||
button.event("SpawnDialog", "CancelDlg")
|
||||
|
||||
def add_progress_dialog(self):
|
||||
dialog = distutils.command.bdist_msi.PyDialog(self.db, "ProgressDlg",
|
||||
self.x, self.y, self.width, self.height, self.modeless,
|
||||
self.title, "Cancel", "Cancel", "Cancel", bitmap = False)
|
||||
dialog.text("Title", 20, 15, 200, 15, 0x30003,
|
||||
r"{\DlgFontBold8}[Progress1] [ProductName]")
|
||||
dialog.text("Text", 35, 65, 300, 30, 3,
|
||||
"Please wait while the installer [Progress2] [ProductName].")
|
||||
dialog.text("StatusLabel", 35, 100 ,35, 20, 3, "Status:")
|
||||
text = dialog.text("ActionText", 70, 100, self.width - 70, 20, 3,
|
||||
"Pondering...")
|
||||
text.mapping("ActionText", "Text")
|
||||
control = dialog.control("ProgressBar", "ProgressBar", 35, 120, 300,
|
||||
10, 65537, None, "Progress done", None, None)
|
||||
control.mapping("SetProgress", "Progress")
|
||||
dialog.back("< Back", "Next", active = False)
|
||||
dialog.next("Next >", "Cancel", active = False)
|
||||
button = dialog.cancel("Cancel", "Back")
|
||||
button.event("SpawnDialog", "CancelDlg")
|
||||
|
||||
def add_properties(self):
|
||||
metadata = self.distribution.metadata
|
||||
props = [
|
||||
('DistVersion', metadata.get_version()),
|
||||
('DefaultUIFont', 'DlgFont8'),
|
||||
('ErrorDialog', 'ErrorDlg'),
|
||||
('Progress1', 'Install'),
|
||||
('Progress2', 'installs'),
|
||||
('MaintenanceForm_Action', 'Repair')
|
||||
]
|
||||
email = metadata.author_email or metadata.maintainer_email
|
||||
if email:
|
||||
props.append(("ARPCONTACT", email))
|
||||
if metadata.url:
|
||||
props.append(("ARPURLINFOABOUT", metadata.url))
|
||||
if self.upgrade_code is not None:
|
||||
props.append(("UpgradeCode", self.upgrade_code))
|
||||
msilib.add_data(self.db, 'Property', props)
|
||||
|
||||
def add_select_directory_dialog(self):
|
||||
dialog = distutils.command.bdist_msi.PyDialog(self.db,
|
||||
"SelectDirectoryDlg", self.x, self.y, self.width, self.height,
|
||||
self.modal, self.title, "Next", "Next", "Cancel")
|
||||
dialog.title("Select destination directory")
|
||||
dialog.back("< Back", None, active = False)
|
||||
button = dialog.next("Next >", "Cancel")
|
||||
button.event("SetTargetPath", "TARGETDIR", ordering = 1)
|
||||
button.event("SpawnWaitDialog", "WaitForCostingDlg", ordering = 2)
|
||||
button.event("EndDialog", "Return", ordering = 3)
|
||||
button = dialog.cancel("Cancel", "DirectoryCombo")
|
||||
button.event("SpawnDialog", "CancelDlg")
|
||||
dialog.control("DirectoryCombo", "DirectoryCombo", 15, 70, 272, 80,
|
||||
393219, "TARGETDIR", None, "DirectoryList", None)
|
||||
dialog.control("DirectoryList", "DirectoryList", 15, 90, 308, 136, 3,
|
||||
"TARGETDIR", None, "PathEdit", None)
|
||||
dialog.control("PathEdit", "PathEdit", 15, 230, 306, 16, 3,
|
||||
"TARGETDIR", None, "Next", None)
|
||||
button = dialog.pushbutton("Up", 306, 70, 18, 18, 3, "Up", None)
|
||||
button.event("DirectoryListUp", "0")
|
||||
button = dialog.pushbutton("NewDir", 324, 70, 30, 18, 3, "New", None)
|
||||
button.event("DirectoryListNew", "0")
|
||||
|
||||
def add_text_styles(self):
|
||||
msilib.add_data(self.db, 'TextStyle',
|
||||
[("DlgFont8", "Tahoma", 9, None, 0),
|
||||
("DlgFontBold8", "Tahoma", 8, None, 1),
|
||||
("VerdanaBold10", "Verdana", 10, None, 1),
|
||||
("VerdanaRed9", "Verdana", 9, 255, 0)
|
||||
])
|
||||
|
||||
def add_ui(self):
|
||||
self.add_text_styles()
|
||||
self.add_error_dialog()
|
||||
self.add_fatal_error_dialog()
|
||||
self.add_cancel_dialog()
|
||||
self.add_exit_dialog()
|
||||
self.add_user_exit_dialog()
|
||||
self.add_files_in_use_dialog()
|
||||
self.add_wait_for_costing_dialog()
|
||||
self.add_prepare_dialog()
|
||||
self.add_select_directory_dialog()
|
||||
self.add_progress_dialog()
|
||||
self.add_maintenance_type_dialog()
|
||||
|
||||
def add_upgrade_config(self, sversion):
|
||||
if self.upgrade_code is not None:
|
||||
msilib.add_data(self.db, 'Upgrade',
|
||||
[(self.upgrade_code, None, sversion, None, 513, None,
|
||||
"REMOVEOLDVERSION"),
|
||||
(self.upgrade_code, sversion, None, None, 257, None,
|
||||
"REMOVENEWVERSION")
|
||||
])
|
||||
|
||||
def add_user_exit_dialog(self):
|
||||
dialog = distutils.command.bdist_msi.PyDialog(self.db, "UserExit",
|
||||
self.x, self.y, self.width, self.height, self.modal,
|
||||
self.title, "Finish", "Finish", "Finish")
|
||||
dialog.title("[ProductName] installer was interrupted")
|
||||
dialog.back("< Back", "Finish", active = False)
|
||||
dialog.cancel("Cancel", "Back", active = False)
|
||||
dialog.text("Description1", 15, 70, 320, 80, 0x30003,
|
||||
"[ProductName] setup was interrupted. Your system has not "
|
||||
"been modified. To install this program at a later time, "
|
||||
"please run the installation again.")
|
||||
dialog.text("Description2", 15, 155, 320, 20, 0x30003,
|
||||
"Click the Finish button to exit the installer.")
|
||||
button = dialog.next("Finish", "Cancel", name = "Finish")
|
||||
button.event("EndDialog", "Exit")
|
||||
|
||||
def add_wait_for_costing_dialog(self):
|
||||
dialog = msilib.Dialog(self.db, "WaitForCostingDlg", 50, 10, 260, 85,
|
||||
self.modal, self.title, "Return", "Return", "Return")
|
||||
dialog.text("Text", 48, 15, 194, 30, 3,
|
||||
"Please wait while the installer finishes determining your "
|
||||
"disk space requirements.")
|
||||
button = dialog.pushbutton("Return", 102, 57, 56, 17, 3, "Return",
|
||||
None)
|
||||
button.event("EndDialog", "Exit")
|
||||
|
||||
def get_initial_target_dir(self, fullname):
|
||||
return r"[ProgramFilesFolder]\%s" % fullname
|
||||
|
||||
def get_installer_filename(self, fullname):
|
||||
return os.path.join(self.dist_dir, "%s.msi" % fullname)
|
||||
|
||||
def initialize_options(self):
|
||||
distutils.command.bdist_msi.bdist_msi.initialize_options(self)
|
||||
self.upgrade_code = None
|
||||
self.add_to_path = None
|
||||
|
||||
def run(self):
|
||||
if not self.skip_build:
|
||||
self.run_command('build')
|
||||
install = self.reinitialize_command('install', reinit_subcommands = 1)
|
||||
install.prefix = self.bdist_dir
|
||||
install.skip_build = self.skip_build
|
||||
install.warn_dir = 0
|
||||
distutils.log.info("installing to %s", self.bdist_dir)
|
||||
install.ensure_finalized()
|
||||
install.run()
|
||||
self.mkpath(self.dist_dir)
|
||||
fullname = self.distribution.get_fullname()
|
||||
filename = os.path.abspath(self.get_installer_filename(fullname))
|
||||
if os.path.exists(filename):
|
||||
os.unlink(filename)
|
||||
metadata = self.distribution.metadata
|
||||
author = metadata.author or metadata.maintainer or "UNKNOWN"
|
||||
version = metadata.get_version()
|
||||
sversion = "%d.%d.%d" % \
|
||||
distutils.version.StrictVersion(version).version
|
||||
self.db = msilib.init_database(filename, msilib.schema,
|
||||
self.distribution.metadata.name, msilib.gen_uuid(), sversion,
|
||||
author)
|
||||
msilib.add_tables(self.db, msilib.sequence)
|
||||
self.add_properties()
|
||||
self.add_config(fullname)
|
||||
self.add_upgrade_config(sversion)
|
||||
self.add_ui()
|
||||
self.add_files()
|
||||
self.db.Commit()
|
||||
if not self.keep_temp:
|
||||
distutils.dir_util.remove_tree(self.bdist_dir,
|
||||
dry_run = self.dry_run)
|
||||
|
||||
6
installer/cx_Freeze/cxfreeze
Executable file
6
installer/cx_Freeze/cxfreeze
Executable file
@ -0,0 +1,6 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
from cx_Freeze import main
|
||||
|
||||
main()
|
||||
|
||||
35
installer/cx_Freeze/initscripts/Console.py
Executable file
35
installer/cx_Freeze/initscripts/Console.py
Executable file
@ -0,0 +1,35 @@
|
||||
#------------------------------------------------------------------------------
|
||||
# Console.py
|
||||
# Initialization script for cx_Freeze which manipulates the path so that the
|
||||
# directory in which the executable is found is searched for extensions but
|
||||
# no other directory is searched. It also sets the attribute sys.frozen so that
|
||||
# the Win32 extensions behave as expected.
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
import encodings
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
import zipimport
|
||||
|
||||
sys.frozen = True
|
||||
sys.path = sys.path[:4]
|
||||
|
||||
os.environ["TCL_LIBRARY"] = os.path.join(DIR_NAME, "tcl")
|
||||
os.environ["TK_LIBRARY"] = os.path.join(DIR_NAME, "tk")
|
||||
|
||||
m = __import__("__main__")
|
||||
importer = zipimport.zipimporter(INITSCRIPT_ZIP_FILE_NAME)
|
||||
if INITSCRIPT_ZIP_FILE_NAME != SHARED_ZIP_FILE_NAME:
|
||||
moduleName = m.__name__
|
||||
else:
|
||||
name, ext = os.path.splitext(os.path.basename(os.path.normcase(FILE_NAME)))
|
||||
moduleName = "%s__main__" % name
|
||||
code = importer.get_code(moduleName)
|
||||
exec code in m.__dict__
|
||||
|
||||
if sys.version_info[:2] >= (2, 5):
|
||||
module = sys.modules.get("threading")
|
||||
if module is not None:
|
||||
module._shutdown()
|
||||
|
||||
19
installer/cx_Freeze/initscripts/ConsoleKeepPath.py
Executable file
19
installer/cx_Freeze/initscripts/ConsoleKeepPath.py
Executable file
@ -0,0 +1,19 @@
|
||||
#------------------------------------------------------------------------------
|
||||
# ConsoleKeepPath.py
|
||||
# Initialization script for cx_Freeze which leaves the path alone and does
|
||||
# not set the sys.frozen attribute.
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
import sys
|
||||
import zipimport
|
||||
|
||||
m = __import__("__main__")
|
||||
importer = zipimport.zipimporter(INITSCRIPT_ZIP_FILE_NAME)
|
||||
code = importer.get_code(m.__name__)
|
||||
exec code in m.__dict__
|
||||
|
||||
if sys.version_info[:2] >= (2, 5):
|
||||
module = sys.modules.get("threading")
|
||||
if module is not None:
|
||||
module._shutdown()
|
||||
|
||||
38
installer/cx_Freeze/initscripts/ConsoleSetLibPath.py
Executable file
38
installer/cx_Freeze/initscripts/ConsoleSetLibPath.py
Executable file
@ -0,0 +1,38 @@
|
||||
#------------------------------------------------------------------------------
|
||||
# ConsoleSetLibPath.py
|
||||
# Initialization script for cx_Freeze which manipulates the path so that the
|
||||
# directory in which the executable is found is searched for extensions but
|
||||
# no other directory is searched. The environment variable LD_LIBRARY_PATH is
|
||||
# manipulated first, however, to ensure that shared libraries found in the
|
||||
# target directory are found. This requires a restart of the executable because
|
||||
# the environment variable LD_LIBRARY_PATH is only checked at startup.
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
import encodings
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
import zipimport
|
||||
|
||||
paths = os.environ.get("LD_LIBRARY_PATH", "").split(os.pathsep)
|
||||
if DIR_NAME not in paths:
|
||||
paths.insert(0, DIR_NAME)
|
||||
os.environ["LD_LIBRARY_PATH"] = os.pathsep.join(paths)
|
||||
os.execv(sys.executable, sys.argv)
|
||||
|
||||
sys.frozen = True
|
||||
sys.path = sys.path[:4]
|
||||
|
||||
os.environ["TCL_LIBRARY"] = os.path.join(DIR_NAME, "tcl")
|
||||
os.environ["TK_LIBRARY"] = os.path.join(DIR_NAME, "tk")
|
||||
|
||||
m = __import__("__main__")
|
||||
importer = zipimport.zipimporter(INITSCRIPT_ZIP_FILE_NAME)
|
||||
code = importer.get_code(m.__name__)
|
||||
exec code in m.__dict__
|
||||
|
||||
if sys.version_info[:2] >= (2, 5):
|
||||
module = sys.modules.get("threading")
|
||||
if module is not None:
|
||||
module._shutdown()
|
||||
|
||||
20
installer/cx_Freeze/initscripts/SharedLib.py
Executable file
20
installer/cx_Freeze/initscripts/SharedLib.py
Executable file
@ -0,0 +1,20 @@
|
||||
#------------------------------------------------------------------------------
|
||||
# SharedLib.py
|
||||
# Initialization script for cx_Freeze which behaves similarly to the one for
|
||||
# console based applications but must handle the case where Python has already
|
||||
# been initialized and another DLL of this kind has been loaded. As such it
|
||||
# does not block the path unless sys.frozen is not already set.
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
import encodings
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
if not hasattr(sys, "frozen"):
|
||||
sys.frozen = True
|
||||
sys.path = sys.path[:4]
|
||||
|
||||
os.environ["TCL_LIBRARY"] = os.path.join(DIR_NAME, "tcl")
|
||||
os.environ["TK_LIBRARY"] = os.path.join(DIR_NAME, "tk")
|
||||
|
||||
23
installer/cx_Freeze/initscripts/SharedLibSource.py
Executable file
23
installer/cx_Freeze/initscripts/SharedLibSource.py
Executable file
@ -0,0 +1,23 @@
|
||||
#------------------------------------------------------------------------------
|
||||
# SharedLibSource.py
|
||||
# Initialization script for cx_Freeze which imports the site module (as per
|
||||
# normal processing of a Python script) and then searches for a file with the
|
||||
# same name as the shared library but with the extension .pth. The entries in
|
||||
# this file are used to modify the path to use for subsequent imports.
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# the site module must be imported for normal behavior to take place; it is
|
||||
# done dynamically so that cx_Freeze will not add all modules referenced by
|
||||
# the site module to the frozen executable
|
||||
__import__("site")
|
||||
|
||||
# now locate the pth file to modify the path appropriately
|
||||
baseName, ext = os.path.splitext(FILE_NAME)
|
||||
pathFileName = baseName + ".pth"
|
||||
sys.path = [s.strip() for s in file(pathFileName).read().splitlines()] + \
|
||||
sys.path
|
||||
|
||||
7
installer/cx_Freeze/samples/advanced/advanced_1.py
Normal file
7
installer/cx_Freeze/samples/advanced/advanced_1.py
Normal file
@ -0,0 +1,7 @@
|
||||
import sys
|
||||
|
||||
print "Hello from cx_Freeze Advanced #1"
|
||||
print
|
||||
|
||||
module = __import__("testfreeze_1")
|
||||
|
||||
7
installer/cx_Freeze/samples/advanced/advanced_2.py
Normal file
7
installer/cx_Freeze/samples/advanced/advanced_2.py
Normal file
@ -0,0 +1,7 @@
|
||||
import sys
|
||||
|
||||
print "Hello from cx_Freeze Advanced #2"
|
||||
print
|
||||
|
||||
module = __import__("testfreeze_2")
|
||||
|
||||
@ -0,0 +1 @@
|
||||
print "Test freeze module #1"
|
||||
@ -0,0 +1 @@
|
||||
print "Test freeze module #2"
|
||||
31
installer/cx_Freeze/samples/advanced/setup.py
Normal file
31
installer/cx_Freeze/samples/advanced/setup.py
Normal file
@ -0,0 +1,31 @@
|
||||
# An advanced setup script to create multiple executables and demonstrate a few
|
||||
# of the features available to setup scripts
|
||||
#
|
||||
# hello.py is a very simple "Hello, world" type script which also displays the
|
||||
# environment in which the script runs
|
||||
#
|
||||
# Run the build process by running the command 'python setup.py build'
|
||||
#
|
||||
# If everything works well you should find a subdirectory in the build
|
||||
# subdirectory that contains the files needed to run the script without Python
|
||||
|
||||
import sys
|
||||
from cx_Freeze import setup, Executable
|
||||
|
||||
executables = [
|
||||
Executable("advanced_1.py"),
|
||||
Executable("advanced_2.py")
|
||||
]
|
||||
|
||||
buildOptions = dict(
|
||||
compressed = True,
|
||||
includes = ["testfreeze_1", "testfreeze_2"],
|
||||
path = sys.path + ["modules"])
|
||||
|
||||
setup(
|
||||
name = "advanced_cx_Freeze_sample",
|
||||
version = "0.1",
|
||||
description = "Advanced sample cx_Freeze script",
|
||||
options = dict(build_exe = buildOptions),
|
||||
executables = executables)
|
||||
|
||||
27
installer/cx_Freeze/samples/matplotlib/setup.py
Normal file
27
installer/cx_Freeze/samples/matplotlib/setup.py
Normal file
@ -0,0 +1,27 @@
|
||||
# A simple setup script to create an executable using matplotlib.
|
||||
#
|
||||
# test_matplotlib.py is a very simple matplotlib application that demonstrates
|
||||
# its use.
|
||||
#
|
||||
# Run the build process by running the command 'python setup.py build'
|
||||
#
|
||||
# If everything works well you should find a subdirectory in the build
|
||||
# subdirectory that contains the files needed to run the application
|
||||
|
||||
import cx_Freeze
|
||||
import sys
|
||||
|
||||
base = None
|
||||
if sys.platform == "win32":
|
||||
base = "Win32GUI"
|
||||
|
||||
executables = [
|
||||
cx_Freeze.Executable("test_matplotlib.py", base = base)
|
||||
]
|
||||
|
||||
cx_Freeze.setup(
|
||||
name = "test_matplotlib",
|
||||
version = "0.1",
|
||||
description = "Sample matplotlib script",
|
||||
executables = executables)
|
||||
|
||||
48
installer/cx_Freeze/samples/matplotlib/test_matplotlib.py
Normal file
48
installer/cx_Freeze/samples/matplotlib/test_matplotlib.py
Normal file
@ -0,0 +1,48 @@
|
||||
from numpy import arange, sin, pi
|
||||
import matplotlib
|
||||
matplotlib.use('WXAgg')
|
||||
from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigureCanvas
|
||||
from matplotlib.backends.backend_wx import NavigationToolbar2Wx
|
||||
from matplotlib.figure import Figure
|
||||
from wx import *
|
||||
|
||||
class CanvasFrame(Frame):
|
||||
def __init__(self):
|
||||
Frame.__init__(self,None,-1, 'CanvasFrame',size=(550,350))
|
||||
self.SetBackgroundColour(NamedColor("WHITE"))
|
||||
self.figure = Figure()
|
||||
self.axes = self.figure.add_subplot(111)
|
||||
t = arange(0.0,3.0,0.01)
|
||||
s = sin(2*pi*t)
|
||||
self.axes.plot(t,s)
|
||||
self.canvas = FigureCanvas(self, -1, self.figure)
|
||||
self.sizer = BoxSizer(VERTICAL)
|
||||
self.sizer.Add(self.canvas, 1, LEFT | TOP | GROW)
|
||||
self.SetSizerAndFit(self.sizer)
|
||||
self.add_toolbar()
|
||||
|
||||
def add_toolbar(self):
|
||||
self.toolbar = NavigationToolbar2Wx(self.canvas)
|
||||
self.toolbar.Realize()
|
||||
if Platform == '__WXMAC__':
|
||||
self.SetToolBar(self.toolbar)
|
||||
else:
|
||||
tw, th = self.toolbar.GetSizeTuple()
|
||||
fw, fh = self.canvas.GetSizeTuple()
|
||||
self.toolbar.SetSize(Size(fw, th))
|
||||
self.sizer.Add(self.toolbar, 0, LEFT | EXPAND)
|
||||
self.toolbar.update()
|
||||
|
||||
def OnPaint(self, event):
|
||||
self.canvas.draw()
|
||||
|
||||
class App(App):
|
||||
def OnInit(self):
|
||||
'Create the main window and insert the custom frame'
|
||||
frame = CanvasFrame()
|
||||
frame.Show(True)
|
||||
return True
|
||||
|
||||
app = App(0)
|
||||
app.MainLoop()
|
||||
|
||||
3
installer/cx_Freeze/samples/relimport/pkg1/__init__.py
Normal file
3
installer/cx_Freeze/samples/relimport/pkg1/__init__.py
Normal file
@ -0,0 +1,3 @@
|
||||
print "importing pkg1"
|
||||
from . import sub1
|
||||
from . import pkg2
|
||||
@ -0,0 +1,3 @@
|
||||
print "importing pkg1.pkg2"
|
||||
from . import sub3
|
||||
from .. import sub4
|
||||
3
installer/cx_Freeze/samples/relimport/pkg1/pkg2/sub3.py
Normal file
3
installer/cx_Freeze/samples/relimport/pkg1/pkg2/sub3.py
Normal file
@ -0,0 +1,3 @@
|
||||
print "importing pkg1.pkg2.sub3"
|
||||
from . import sub5
|
||||
from .. import sub6
|
||||
1
installer/cx_Freeze/samples/relimport/pkg1/pkg2/sub5.py
Normal file
1
installer/cx_Freeze/samples/relimport/pkg1/pkg2/sub5.py
Normal file
@ -0,0 +1 @@
|
||||
print "importing pkg1.pkg2.sub5"
|
||||
2
installer/cx_Freeze/samples/relimport/pkg1/sub1.py
Normal file
2
installer/cx_Freeze/samples/relimport/pkg1/sub1.py
Normal file
@ -0,0 +1,2 @@
|
||||
print "importing pkg1.sub1"
|
||||
from . import sub2
|
||||
1
installer/cx_Freeze/samples/relimport/pkg1/sub2.py
Normal file
1
installer/cx_Freeze/samples/relimport/pkg1/sub2.py
Normal file
@ -0,0 +1 @@
|
||||
print "importing pkg1.sub2"
|
||||
1
installer/cx_Freeze/samples/relimport/pkg1/sub4.py
Normal file
1
installer/cx_Freeze/samples/relimport/pkg1/sub4.py
Normal file
@ -0,0 +1 @@
|
||||
print 'importing pkg1.sub4'
|
||||
1
installer/cx_Freeze/samples/relimport/pkg1/sub6.py
Normal file
1
installer/cx_Freeze/samples/relimport/pkg1/sub6.py
Normal file
@ -0,0 +1 @@
|
||||
print "importing pkg1.sub6"
|
||||
1
installer/cx_Freeze/samples/relimport/relimport.py
Normal file
1
installer/cx_Freeze/samples/relimport/relimport.py
Normal file
@ -0,0 +1 @@
|
||||
import pkg1
|
||||
16
installer/cx_Freeze/samples/relimport/setup.py
Normal file
16
installer/cx_Freeze/samples/relimport/setup.py
Normal file
@ -0,0 +1,16 @@
|
||||
# relimport.py is a very simple script that tests importing using relative
|
||||
# imports (available in Python 2.5 and up)
|
||||
#
|
||||
# Run the build process by running the command 'python setup.py build'
|
||||
#
|
||||
# If everything works well you should find a subdirectory in the build
|
||||
# subdirectory that contains the files needed to run the script without Python
|
||||
|
||||
from cx_Freeze import setup, Executable
|
||||
|
||||
setup(
|
||||
name = "relimport",
|
||||
version = "0.1",
|
||||
description = "Sample cx_Freeze script for relative imports",
|
||||
executables = [Executable("relimport.py")])
|
||||
|
||||
19
installer/cx_Freeze/samples/simple/hello.py
Normal file
19
installer/cx_Freeze/samples/simple/hello.py
Normal file
@ -0,0 +1,19 @@
|
||||
import sys
|
||||
|
||||
print "Hello from cx_Freeze"
|
||||
print
|
||||
|
||||
print "sys.executable", sys.executable
|
||||
print "sys.prefix", sys.prefix
|
||||
print
|
||||
|
||||
print "ARGUMENTS:"
|
||||
for a in sys.argv:
|
||||
print a
|
||||
print
|
||||
|
||||
print "PATH:"
|
||||
for p in sys.path:
|
||||
print p
|
||||
print
|
||||
|
||||
18
installer/cx_Freeze/samples/simple/setup.py
Normal file
18
installer/cx_Freeze/samples/simple/setup.py
Normal file
@ -0,0 +1,18 @@
|
||||
# A very simple setup script to create a single executable
|
||||
#
|
||||
# hello.py is a very simple "Hello, world" type script which also displays the
|
||||
# environment in which the script runs
|
||||
#
|
||||
# Run the build process by running the command 'python setup.py build'
|
||||
#
|
||||
# If everything works well you should find a subdirectory in the build
|
||||
# subdirectory that contains the files needed to run the script without Python
|
||||
|
||||
from cx_Freeze import setup, Executable
|
||||
|
||||
setup(
|
||||
name = "hello",
|
||||
version = "0.1",
|
||||
description = "Sample cx_Freeze script",
|
||||
executables = [Executable("hello.py")])
|
||||
|
||||
25
installer/cx_Freeze/samples/wx/setup.py
Normal file
25
installer/cx_Freeze/samples/wx/setup.py
Normal file
@ -0,0 +1,25 @@
|
||||
# A simple setup script to create an executable running wxPython. This also
|
||||
# demonstrates the method for creating a Windows executable that does not have
|
||||
# an associated console.
|
||||
#
|
||||
# wxapp.py is a very simple "Hello, world" type wxPython application
|
||||
#
|
||||
# Run the build process by running the command 'python setup.py build'
|
||||
#
|
||||
# If everything works well you should find a subdirectory in the build
|
||||
# subdirectory that contains the files needed to run the application
|
||||
|
||||
import sys
|
||||
|
||||
from cx_Freeze import setup, Executable
|
||||
|
||||
base = None
|
||||
if sys.platform == "win32":
|
||||
base = "Win32GUI"
|
||||
|
||||
setup(
|
||||
name = "hello",
|
||||
version = "0.1",
|
||||
description = "Sample cx_Freeze wxPython script",
|
||||
executables = [Executable("wxapp.py", base = base)])
|
||||
|
||||
42
installer/cx_Freeze/samples/wx/wxapp.py
Normal file
42
installer/cx_Freeze/samples/wx/wxapp.py
Normal file
@ -0,0 +1,42 @@
|
||||
import wx
|
||||
|
||||
class Frame(wx.Frame):
|
||||
|
||||
def __init__(self):
|
||||
wx.Frame.__init__(self, parent = None, title = "Hello from cx_Freeze")
|
||||
panel = wx.Panel(self)
|
||||
closeMeButton = wx.Button(panel, -1, "Close Me")
|
||||
wx.EVT_BUTTON(self, closeMeButton.GetId(), self.OnCloseMe)
|
||||
wx.EVT_CLOSE(self, self.OnCloseWindow)
|
||||
pushMeButton = wx.Button(panel, -1, "Push Me")
|
||||
wx.EVT_BUTTON(self, pushMeButton.GetId(), self.OnPushMe)
|
||||
sizer = wx.BoxSizer(wx.HORIZONTAL)
|
||||
sizer.Add(closeMeButton, flag = wx.ALL, border = 20)
|
||||
sizer.Add(pushMeButton, flag = wx.ALL, border = 20)
|
||||
panel.SetSizer(sizer)
|
||||
topSizer = wx.BoxSizer(wx.VERTICAL)
|
||||
topSizer.Add(panel, flag = wx.ALL | wx.EXPAND)
|
||||
topSizer.Fit(self)
|
||||
|
||||
def OnCloseMe(self, event):
|
||||
self.Close(True)
|
||||
|
||||
def OnPushMe(self, event):
|
||||
1 / 0
|
||||
|
||||
def OnCloseWindow(self, event):
|
||||
self.Destroy()
|
||||
|
||||
|
||||
class App(wx.App):
|
||||
|
||||
def OnInit(self):
|
||||
frame = Frame()
|
||||
frame.Show(True)
|
||||
self.SetTopWindow(frame)
|
||||
return True
|
||||
|
||||
|
||||
app = App(1)
|
||||
app.MainLoop()
|
||||
|
||||
197
installer/cx_Freeze/setup.py
Executable file
197
installer/cx_Freeze/setup.py
Executable file
@ -0,0 +1,197 @@
|
||||
"""
|
||||
Distutils script for cx_Freeze.
|
||||
"""
|
||||
|
||||
import distutils.command.bdist_rpm
|
||||
import distutils.command.build_ext
|
||||
import distutils.command.build_scripts
|
||||
import distutils.command.install
|
||||
import distutils.command.install_data
|
||||
import distutils.sysconfig
|
||||
import os
|
||||
import sys
|
||||
|
||||
from distutils.core import setup
|
||||
from distutils.extension import Extension
|
||||
|
||||
class bdist_rpm(distutils.command.bdist_rpm.bdist_rpm):
|
||||
|
||||
# rpm automatically byte compiles all Python files in a package but we
|
||||
# don't want that to happen for initscripts and samples so we tell it to
|
||||
# ignore those files
|
||||
def _make_spec_file(self):
|
||||
specFile = distutils.command.bdist_rpm.bdist_rpm._make_spec_file(self)
|
||||
specFile.insert(0, "%define _unpackaged_files_terminate_build 0%{nil}")
|
||||
return specFile
|
||||
|
||||
def run(self):
|
||||
distutils.command.bdist_rpm.bdist_rpm.run(self)
|
||||
specFile = os.path.join(self.rpm_base, "SPECS",
|
||||
"%s.spec" % self.distribution.get_name())
|
||||
queryFormat = "%{name}-%{version}-%{release}.%{arch}.rpm"
|
||||
command = "rpm -q --qf '%s' --specfile %s" % (queryFormat, specFile)
|
||||
origFileName = os.popen(command).read()
|
||||
parts = origFileName.split("-")
|
||||
parts.insert(2, "py%s%s" % sys.version_info[:2])
|
||||
newFileName = "-".join(parts)
|
||||
self.move_file(os.path.join("dist", origFileName),
|
||||
os.path.join("dist", newFileName))
|
||||
|
||||
|
||||
class build_ext(distutils.command.build_ext.build_ext):
|
||||
|
||||
def build_extension(self, ext):
|
||||
if ext.name.find("bases") < 0:
|
||||
distutils.command.build_ext.build_ext.build_extension(self, ext)
|
||||
return
|
||||
os.environ["LD_RUN_PATH"] = "${ORIGIN}:${ORIGIN}/../lib"
|
||||
objects = self.compiler.compile(ext.sources,
|
||||
output_dir = self.build_temp,
|
||||
include_dirs = ext.include_dirs,
|
||||
debug = self.debug,
|
||||
depends = ext.depends)
|
||||
fileName = os.path.splitext(self.get_ext_filename(ext.name))[0]
|
||||
fullName = os.path.join(self.build_lib, fileName)
|
||||
libraryDirs = ext.library_dirs or []
|
||||
libraries = self.get_libraries(ext)
|
||||
extraArgs = ext.extra_link_args or []
|
||||
if sys.platform != "win32":
|
||||
vars = distutils.sysconfig.get_config_vars()
|
||||
libraryDirs.append(vars["LIBPL"])
|
||||
libraries.append("python%s.%s" % sys.version_info[:2])
|
||||
if vars["LINKFORSHARED"]:
|
||||
extraArgs.extend(vars["LINKFORSHARED"].split())
|
||||
if vars["LIBS"]:
|
||||
extraArgs.extend(vars["LIBS"].split())
|
||||
if vars["LIBM"]:
|
||||
extraArgs.append(vars["LIBM"])
|
||||
if vars["BASEMODLIBS"]:
|
||||
extraArgs.extend(vars["BASEMODLIBS"].split())
|
||||
if vars["LOCALMODLIBS"]:
|
||||
extraArgs.extend(vars["LOCALMODLIBS"].split())
|
||||
extraArgs.append("-s")
|
||||
self.compiler.link_executable(objects, fullName,
|
||||
libraries = libraries,
|
||||
library_dirs = libraryDirs,
|
||||
runtime_library_dirs = ext.runtime_library_dirs,
|
||||
extra_postargs = extraArgs,
|
||||
debug = self.debug)
|
||||
|
||||
def get_ext_filename(self, name):
|
||||
fileName = distutils.command.build_ext.build_ext.get_ext_filename(self,
|
||||
name)
|
||||
if name.find("bases") < 0:
|
||||
return fileName
|
||||
ext = self.compiler.exe_extension or ""
|
||||
return os.path.splitext(fileName)[0] + ext
|
||||
|
||||
|
||||
class build_scripts(distutils.command.build_scripts.build_scripts):
|
||||
|
||||
def copy_scripts(self):
|
||||
distutils.command.build_scripts.build_scripts.copy_scripts(self)
|
||||
if sys.platform == "win32":
|
||||
for script in self.scripts:
|
||||
batFileName = os.path.join(self.build_dir, script + ".bat")
|
||||
fullScriptName = r"%s\Scripts\%s" % \
|
||||
(os.path.dirname(sys.executable), script)
|
||||
command = "%s %s %%1 %%2 %%3 %%4 %%5 %%6 %%7 %%8 %%9" % \
|
||||
(sys.executable, fullScriptName)
|
||||
file(batFileName, "w").write("@echo off\n\n%s" % command)
|
||||
|
||||
|
||||
class install(distutils.command.install.install):
|
||||
|
||||
def get_sub_commands(self):
|
||||
subCommands = distutils.command.install.install.get_sub_commands(self)
|
||||
subCommands.append("install_packagedata")
|
||||
return subCommands
|
||||
|
||||
|
||||
class install_packagedata(distutils.command.install_data.install_data):
|
||||
|
||||
def run(self):
|
||||
installCommand = self.get_finalized_command("install")
|
||||
installDir = getattr(installCommand, "install_lib")
|
||||
sourceDirs = ["samples", "initscripts"]
|
||||
while sourceDirs:
|
||||
sourceDir = sourceDirs.pop(0)
|
||||
targetDir = os.path.join(installDir, "cx_Freeze", sourceDir)
|
||||
self.mkpath(targetDir)
|
||||
for name in os.listdir(sourceDir):
|
||||
if name == "build" or name.startswith("."):
|
||||
continue
|
||||
fullSourceName = os.path.join(sourceDir, name)
|
||||
if os.path.isdir(fullSourceName):
|
||||
sourceDirs.append(fullSourceName)
|
||||
else:
|
||||
fullTargetName = os.path.join(targetDir, name)
|
||||
self.copy_file(fullSourceName, fullTargetName)
|
||||
self.outfiles.append(fullTargetName)
|
||||
|
||||
|
||||
commandClasses = dict(
|
||||
build_ext = build_ext,
|
||||
build_scripts = build_scripts,
|
||||
bdist_rpm = bdist_rpm,
|
||||
install = install,
|
||||
install_packagedata = install_packagedata)
|
||||
|
||||
if sys.platform == "win32":
|
||||
libraries = ["imagehlp"]
|
||||
else:
|
||||
libraries = []
|
||||
utilModule = Extension("cx_Freeze.util", ["source/util.c"],
|
||||
libraries = libraries)
|
||||
depends = ["source/bases/Common.c"]
|
||||
if sys.platform == "win32":
|
||||
if sys.version_info[:2] >= (2, 6):
|
||||
extraSources = ["source/bases/manifest.rc"]
|
||||
else:
|
||||
extraSources = ["source/bases/dummy.rc"]
|
||||
else:
|
||||
extraSources = []
|
||||
console = Extension("cx_Freeze.bases.Console",
|
||||
["source/bases/Console.c"] + extraSources, depends = depends)
|
||||
consoleKeepPath = Extension("cx_Freeze.bases.ConsoleKeepPath",
|
||||
["source/bases/ConsoleKeepPath.c"] + extraSources, depends = depends)
|
||||
extensions = [utilModule, console, consoleKeepPath]
|
||||
if sys.platform == "win32":
|
||||
gui = Extension("cx_Freeze.bases.Win32GUI",
|
||||
["source/bases/Win32GUI.c"] + extraSources,
|
||||
depends = depends, extra_link_args = ["-mwindows"])
|
||||
extensions.append(gui)
|
||||
|
||||
docFiles = "LICENSE.txt README.txt HISTORY.txt doc/cx_Freeze.html"
|
||||
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: Python Software Foundation License",
|
||||
"Natural Language :: English",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: C",
|
||||
"Programming Language :: Python",
|
||||
"Topic :: Software Development :: Build Tools",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
"Topic :: System :: Software Distribution",
|
||||
"Topic :: Utilities"
|
||||
]
|
||||
|
||||
setup(name = "cx_Freeze",
|
||||
description = "create standalone executables from Python scripts",
|
||||
long_description = "create standalone executables from Python scripts",
|
||||
version = "4.0.1",
|
||||
cmdclass = commandClasses,
|
||||
options = dict(bdist_rpm = dict(doc_files = docFiles),
|
||||
install = dict(optimize = 1)),
|
||||
ext_modules = extensions,
|
||||
packages = ['cx_Freeze'],
|
||||
maintainer="Anthony Tuininga",
|
||||
maintainer_email="anthony.tuininga@gmail.com",
|
||||
url = "http://cx-freeze.sourceforge.net",
|
||||
scripts = ["cxfreeze"],
|
||||
classifiers = classifiers,
|
||||
keywords = "freeze",
|
||||
license = "Python Software Foundation License")
|
||||
|
||||
262
installer/cx_Freeze/source/bases/Common.c
Normal file
262
installer/cx_Freeze/source/bases/Common.c
Normal file
@ -0,0 +1,262 @@
|
||||
//-----------------------------------------------------------------------------
|
||||
// Common.c
|
||||
// Routines which are common to running frozen executables.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
#include <compile.h>
|
||||
#include <eval.h>
|
||||
#include <osdefs.h>
|
||||
|
||||
// global variables (used for simplicity)
|
||||
static PyObject *g_FileName = NULL;
|
||||
static PyObject *g_DirName = NULL;
|
||||
static PyObject *g_ExclusiveZipFileName = NULL;
|
||||
static PyObject *g_SharedZipFileName = NULL;
|
||||
static PyObject *g_InitScriptZipFileName = NULL;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// GetDirName()
|
||||
// Return the directory name of the given path.
|
||||
//-----------------------------------------------------------------------------
|
||||
static int GetDirName(
|
||||
const char *path, // path to calculate dir name for
|
||||
PyObject **dirName) // directory name (OUT)
|
||||
{
|
||||
int i;
|
||||
|
||||
for (i = strlen(path); i > 0 && path[i] != SEP; --i);
|
||||
*dirName = PyString_FromStringAndSize(path, i);
|
||||
if (!*dirName)
|
||||
return FatalError("cannot create string for directory name");
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// SetExecutableName()
|
||||
// Set the script to execute and calculate the directory in which the
|
||||
// executable is found as well as the exclusive (only for this executable) and
|
||||
// shared zip file names.
|
||||
//-----------------------------------------------------------------------------
|
||||
static int SetExecutableName(
|
||||
const char *fileName) // script to execute
|
||||
{
|
||||
char temp[MAXPATHLEN + 12], *ptr;
|
||||
#ifndef WIN32
|
||||
char linkData[MAXPATHLEN + 1];
|
||||
struct stat statData;
|
||||
size_t linkSize, i;
|
||||
PyObject *dirName;
|
||||
#endif
|
||||
|
||||
// store file name
|
||||
g_FileName = PyString_FromString(fileName);
|
||||
if (!g_FileName)
|
||||
return FatalError("cannot create string for file name");
|
||||
|
||||
#ifndef WIN32
|
||||
for (i = 0; i < 25; i++) {
|
||||
if (lstat(fileName, &statData) < 0) {
|
||||
PyErr_SetFromErrnoWithFilename(PyExc_OSError, (char*) fileName);
|
||||
return FatalError("unable to stat file");
|
||||
}
|
||||
if (!S_ISLNK(statData.st_mode))
|
||||
break;
|
||||
linkSize = readlink(fileName, linkData, sizeof(linkData));
|
||||
if (linkSize < 0) {
|
||||
PyErr_SetFromErrnoWithFilename(PyExc_OSError, (char*) fileName);
|
||||
return FatalError("unable to stat file");
|
||||
}
|
||||
if (linkData[0] == '/') {
|
||||
Py_DECREF(g_FileName);
|
||||
g_FileName = PyString_FromStringAndSize(linkData, linkSize);
|
||||
} else {
|
||||
if (GetDirName(PyString_AS_STRING(g_FileName), &dirName) < 0)
|
||||
return -1;
|
||||
if (PyString_GET_SIZE(dirName) + linkSize + 1 > MAXPATHLEN) {
|
||||
Py_DECREF(dirName);
|
||||
return FatalError("cannot dereference link, path too large");
|
||||
}
|
||||
strcpy(temp, PyString_AS_STRING(dirName));
|
||||
strcat(temp, "/");
|
||||
strcat(temp, linkData);
|
||||
Py_DECREF(g_FileName);
|
||||
g_FileName = PyString_FromString(temp);
|
||||
}
|
||||
if (!g_FileName)
|
||||
return FatalError("cannot create string for linked file name");
|
||||
fileName = PyString_AS_STRING(g_FileName);
|
||||
}
|
||||
#endif
|
||||
|
||||
// calculate and store directory name
|
||||
if (GetDirName(fileName, &g_DirName) < 0)
|
||||
return -1;
|
||||
|
||||
// calculate and store exclusive zip file name
|
||||
strcpy(temp, fileName);
|
||||
ptr = temp + strlen(temp) - 1;
|
||||
while (ptr > temp && *ptr != SEP && *ptr != '.')
|
||||
ptr--;
|
||||
if (*ptr == '.')
|
||||
*ptr = '\0';
|
||||
strcat(temp, ".zip");
|
||||
g_ExclusiveZipFileName = PyString_FromString(temp);
|
||||
if (!g_ExclusiveZipFileName)
|
||||
return FatalError("cannot create string for exclusive zip file name");
|
||||
|
||||
// calculate and store shared zip file name
|
||||
strcpy(temp, PyString_AS_STRING(g_DirName));
|
||||
ptr = temp + strlen(temp);
|
||||
*ptr++ = SEP;
|
||||
strcpy(ptr, "library.zip");
|
||||
g_SharedZipFileName = PyString_FromString(temp);
|
||||
if (!g_SharedZipFileName)
|
||||
return FatalError("cannot create string for shared zip file name");
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// SetPathToSearch()
|
||||
// Set the path to search. This includes the file (for those situations where
|
||||
// a zip file is attached to the executable itself), the directory where the
|
||||
// executable is found (to search for extensions), the exclusive zip file
|
||||
// name and the shared zip file name.
|
||||
//-----------------------------------------------------------------------------
|
||||
static int SetPathToSearch(void)
|
||||
{
|
||||
PyObject *pathList;
|
||||
|
||||
pathList = PySys_GetObject("path");
|
||||
if (!pathList)
|
||||
return FatalError("cannot acquire sys.path");
|
||||
if (PyList_Insert(pathList, 0, g_FileName) < 0)
|
||||
return FatalError("cannot insert file name into sys.path");
|
||||
if (PyList_Insert(pathList, 1, g_DirName) < 0)
|
||||
return FatalError("cannot insert directory name into sys.path");
|
||||
if (PyList_Insert(pathList, 2, g_ExclusiveZipFileName) < 0)
|
||||
return FatalError("cannot insert exclusive zip name into sys.path");
|
||||
if (PyList_Insert(pathList, 3, g_SharedZipFileName) < 0)
|
||||
return FatalError("cannot insert shared zip name into sys.path");
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// GetImporterHelper()
|
||||
// Helper which is used to locate the importer for the initscript.
|
||||
//-----------------------------------------------------------------------------
|
||||
static PyObject *GetImporterHelper(
|
||||
PyObject *module, // zipimport module
|
||||
PyObject *fileName) // name of file to search
|
||||
{
|
||||
PyObject *importer;
|
||||
|
||||
importer = PyObject_CallMethod(module, "zipimporter", "O", fileName);
|
||||
if (importer)
|
||||
g_InitScriptZipFileName = fileName;
|
||||
else
|
||||
PyErr_Clear();
|
||||
return importer;
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// GetImporter()
|
||||
// Return the importer which will be used for importing the initialization
|
||||
// script. The executable itself is searched first, followed by the exclusive
|
||||
// zip file and finally by the shared zip file.
|
||||
//-----------------------------------------------------------------------------
|
||||
static int GetImporter(
|
||||
PyObject **importer) // importer (OUT)
|
||||
{
|
||||
PyObject *module;
|
||||
|
||||
module = PyImport_ImportModule("zipimport");
|
||||
if (!module)
|
||||
return FatalError("cannot import zipimport module");
|
||||
*importer = GetImporterHelper(module, g_FileName);
|
||||
if (!*importer) {
|
||||
*importer = GetImporterHelper(module, g_ExclusiveZipFileName);
|
||||
if (!*importer)
|
||||
*importer = GetImporterHelper(module, g_SharedZipFileName);
|
||||
}
|
||||
Py_DECREF(module);
|
||||
if (!*importer)
|
||||
return FatalError("cannot get zipimporter instance");
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// PopulateInitScriptDict()
|
||||
// Return the dictionary used by the initialization script.
|
||||
//-----------------------------------------------------------------------------
|
||||
static int PopulateInitScriptDict(
|
||||
PyObject *dict) // dictionary to populate
|
||||
{
|
||||
if (!dict)
|
||||
return FatalError("unable to create temporary dictionary");
|
||||
if (PyDict_SetItemString(dict, "__builtins__", PyEval_GetBuiltins()) < 0)
|
||||
return FatalError("unable to set __builtins__");
|
||||
if (PyDict_SetItemString(dict, "FILE_NAME", g_FileName) < 0)
|
||||
return FatalError("unable to set FILE_NAME");
|
||||
if (PyDict_SetItemString(dict, "DIR_NAME", g_DirName) < 0)
|
||||
return FatalError("unable to set DIR_NAME");
|
||||
if (PyDict_SetItemString(dict, "EXCLUSIVE_ZIP_FILE_NAME",
|
||||
g_ExclusiveZipFileName) < 0)
|
||||
return FatalError("unable to set EXCLUSIVE_ZIP_FILE_NAME");
|
||||
if (PyDict_SetItemString(dict, "SHARED_ZIP_FILE_NAME",
|
||||
g_SharedZipFileName) < 0)
|
||||
return FatalError("unable to set SHARED_ZIP_FILE_NAME");
|
||||
if (PyDict_SetItemString(dict, "INITSCRIPT_ZIP_FILE_NAME",
|
||||
g_InitScriptZipFileName) < 0)
|
||||
return FatalError("unable to set INITSCRIPT_ZIP_FILE_NAME");
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// ExecuteScript()
|
||||
// Execute the script found within the file.
|
||||
//-----------------------------------------------------------------------------
|
||||
static int ExecuteScript(
|
||||
const char *fileName) // name of file containing Python code
|
||||
{
|
||||
PyObject *importer, *dict, *code, *temp;
|
||||
|
||||
if (SetExecutableName(fileName) < 0)
|
||||
return -1;
|
||||
if (SetPathToSearch() < 0)
|
||||
return -1;
|
||||
importer = NULL;
|
||||
if (GetImporter(&importer) < 0)
|
||||
return -1;
|
||||
|
||||
// create and populate dictionary for initscript module
|
||||
dict = PyDict_New();
|
||||
if (PopulateInitScriptDict(dict) < 0) {
|
||||
Py_XDECREF(dict);
|
||||
Py_DECREF(importer);
|
||||
return -1;
|
||||
}
|
||||
|
||||
// locate and execute script
|
||||
code = PyObject_CallMethod(importer, "get_code", "s", "cx_Freeze__init__");
|
||||
Py_DECREF(importer);
|
||||
if (!code)
|
||||
return FatalError("unable to locate initialization module");
|
||||
temp = PyEval_EvalCode( (PyCodeObject*) code, dict, dict);
|
||||
Py_DECREF(code);
|
||||
Py_DECREF(dict);
|
||||
if (!temp)
|
||||
return FatalScriptError();
|
||||
Py_DECREF(temp);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
72
installer/cx_Freeze/source/bases/Console.c
Normal file
72
installer/cx_Freeze/source/bases/Console.c
Normal file
@ -0,0 +1,72 @@
|
||||
//-----------------------------------------------------------------------------
|
||||
// Console.c
|
||||
// Main routine for frozen programs which run in a console.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
#include <Python.h>
|
||||
#ifdef __WIN32__
|
||||
#include <windows.h>
|
||||
#endif
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// FatalError()
|
||||
// Prints a fatal error.
|
||||
//-----------------------------------------------------------------------------
|
||||
static int FatalError(
|
||||
const char *message) // message to print
|
||||
{
|
||||
PyErr_Print();
|
||||
Py_FatalError(message);
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// FatalScriptError()
|
||||
// Prints a fatal error in the initialization script.
|
||||
//-----------------------------------------------------------------------------
|
||||
static int FatalScriptError(void)
|
||||
{
|
||||
PyErr_Print();
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
#include "Common.c"
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// main()
|
||||
// Main routine for frozen programs.
|
||||
//-----------------------------------------------------------------------------
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
const char *fileName;
|
||||
char *encoding;
|
||||
|
||||
// initialize Python
|
||||
Py_NoSiteFlag = 1;
|
||||
Py_FrozenFlag = 1;
|
||||
Py_IgnoreEnvironmentFlag = 1;
|
||||
|
||||
encoding = getenv("PYTHONIOENCODING");
|
||||
if (encoding != NULL) {
|
||||
Py_FileSystemDefaultEncoding = strndup(encoding, 100);
|
||||
}
|
||||
|
||||
Py_SetPythonHome("");
|
||||
Py_SetProgramName(argv[0]);
|
||||
fileName = Py_GetProgramFullPath();
|
||||
|
||||
Py_Initialize();
|
||||
PySys_SetArgv(argc, argv);
|
||||
|
||||
|
||||
// do the work
|
||||
if (ExecuteScript(fileName) < 0)
|
||||
return 1;
|
||||
|
||||
Py_Finalize();
|
||||
return 0;
|
||||
}
|
||||
|
||||
60
installer/cx_Freeze/source/bases/ConsoleKeepPath.c
Normal file
60
installer/cx_Freeze/source/bases/ConsoleKeepPath.c
Normal file
@ -0,0 +1,60 @@
|
||||
//-----------------------------------------------------------------------------
|
||||
// ConsoleKeepPath.c
|
||||
// Main routine for frozen programs which need a Python installation to do
|
||||
// their work.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
#include <Python.h>
|
||||
#ifdef __WIN32__
|
||||
#include <windows.h>
|
||||
#endif
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// FatalError()
|
||||
// Prints a fatal error.
|
||||
//-----------------------------------------------------------------------------
|
||||
static int FatalError(
|
||||
const char *message) // message to print
|
||||
{
|
||||
PyErr_Print();
|
||||
Py_FatalError(message);
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// FatalScriptError()
|
||||
// Prints a fatal error in the initialization script.
|
||||
//-----------------------------------------------------------------------------
|
||||
static int FatalScriptError(void)
|
||||
{
|
||||
PyErr_Print();
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
#include "Common.c"
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// main()
|
||||
// Main routine for frozen programs.
|
||||
//-----------------------------------------------------------------------------
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
const char *fileName;
|
||||
|
||||
// initialize Python
|
||||
Py_SetProgramName(argv[0]);
|
||||
fileName = Py_GetProgramFullPath();
|
||||
Py_Initialize();
|
||||
PySys_SetArgv(argc, argv);
|
||||
|
||||
// do the work
|
||||
if (ExecuteScript(fileName) < 0)
|
||||
return 1;
|
||||
|
||||
Py_Finalize();
|
||||
return 0;
|
||||
}
|
||||
|
||||
242
installer/cx_Freeze/source/bases/Win32GUI.c
Normal file
242
installer/cx_Freeze/source/bases/Win32GUI.c
Normal file
@ -0,0 +1,242 @@
|
||||
//-----------------------------------------------------------------------------
|
||||
// Win32GUI.c
|
||||
// Main routine for frozen programs written for the Win32 GUI subsystem.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
#include <Python.h>
|
||||
#include <windows.h>
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// FatalError()
|
||||
// Handle a fatal error.
|
||||
//-----------------------------------------------------------------------------
|
||||
static int FatalError(
|
||||
char *a_Message) // message to display
|
||||
{
|
||||
MessageBox(NULL, a_Message, "cx_Freeze Fatal Error", MB_ICONERROR);
|
||||
Py_Finalize();
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// StringifyObject()
|
||||
// Stringify a Python object.
|
||||
//-----------------------------------------------------------------------------
|
||||
static char *StringifyObject(
|
||||
PyObject *object, // object to stringify
|
||||
PyObject **stringRep) // string representation
|
||||
{
|
||||
if (object) {
|
||||
*stringRep = PyObject_Str(object);
|
||||
if (*stringRep)
|
||||
return PyString_AS_STRING(*stringRep);
|
||||
return "Unable to stringify";
|
||||
}
|
||||
|
||||
// object is NULL
|
||||
*stringRep = NULL;
|
||||
return "None";
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// FatalPythonErrorNoTraceback()
|
||||
// Handle a fatal Python error without traceback.
|
||||
//-----------------------------------------------------------------------------
|
||||
static int FatalPythonErrorNoTraceback(
|
||||
PyObject *origType, // exception type
|
||||
PyObject *origValue, // exception value
|
||||
char *message) // message to display
|
||||
{
|
||||
PyObject *typeStrRep, *valueStrRep, *origTypeStrRep, *origValueStrRep;
|
||||
char *totalMessage, *typeStr, *valueStr, *origTypeStr, *origValueStr;
|
||||
PyObject *type, *value, *traceback;
|
||||
int totalMessageLength;
|
||||
char *messageFormat;
|
||||
|
||||
// fetch error and string representations of the error
|
||||
PyErr_Fetch(&type, &value, &traceback);
|
||||
origTypeStr = StringifyObject(origType, &origTypeStrRep);
|
||||
origValueStr = StringifyObject(origValue, &origValueStrRep);
|
||||
typeStr = StringifyObject(type, &typeStrRep);
|
||||
valueStr = StringifyObject(value, &valueStrRep);
|
||||
|
||||
// fill out the message to be displayed
|
||||
messageFormat = "Type: %s\nValue: %s\nOther Type: %s\nOtherValue: %s\n%s";
|
||||
totalMessageLength = strlen(origTypeStr) + strlen(origValueStr) +
|
||||
strlen(typeStr) + strlen(valueStr) + strlen(message) +
|
||||
strlen(messageFormat) + 1;
|
||||
totalMessage = malloc(totalMessageLength);
|
||||
if (!totalMessage)
|
||||
return FatalError("Out of memory!");
|
||||
sprintf(totalMessage, messageFormat, typeStr, valueStr, origTypeStr,
|
||||
origValueStr, message);
|
||||
|
||||
// display the message
|
||||
MessageBox(NULL, totalMessage,
|
||||
"cx_Freeze: Python error in main script (traceback unavailable)",
|
||||
MB_ICONERROR);
|
||||
free(totalMessage);
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// ArgumentValue()
|
||||
// Return a suitable argument value by replacing NULL with Py_None.
|
||||
//-----------------------------------------------------------------------------
|
||||
static PyObject *ArgumentValue(
|
||||
PyObject *object) // argument to massage
|
||||
{
|
||||
if (object) {
|
||||
Py_INCREF(object);
|
||||
return object;
|
||||
}
|
||||
Py_INCREF(Py_None);
|
||||
return Py_None;
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// HandleSystemExitException()
|
||||
// Handles a system exit exception differently. If an integer value is passed
|
||||
// through then that becomes the exit value; otherwise the string value of the
|
||||
// value passed through is displayed in a message box.
|
||||
//-----------------------------------------------------------------------------
|
||||
static void HandleSystemExitException()
|
||||
{
|
||||
PyObject *type, *value, *traceback, *valueStr;
|
||||
int exitCode = 0;
|
||||
char *message;
|
||||
|
||||
PyErr_Fetch(&type, &value, &traceback);
|
||||
if (PyInstance_Check(value)) {
|
||||
PyObject *code = PyObject_GetAttrString(value, "code");
|
||||
if (code) {
|
||||
Py_DECREF(value);
|
||||
value = code;
|
||||
if (value == Py_None)
|
||||
Py_Exit(0);
|
||||
}
|
||||
}
|
||||
if (PyInt_Check(value))
|
||||
exitCode = PyInt_AsLong(value);
|
||||
else {
|
||||
message = StringifyObject(value, &valueStr);
|
||||
MessageBox(NULL, message, "cx_Freeze: Application Terminated",
|
||||
MB_ICONERROR);
|
||||
Py_XDECREF(valueStr);
|
||||
exitCode = 1;
|
||||
}
|
||||
Py_Exit(exitCode);
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// FatalScriptError()
|
||||
// Handle a fatal Python error with traceback.
|
||||
//-----------------------------------------------------------------------------
|
||||
static int FatalScriptError()
|
||||
{
|
||||
PyObject *type, *value, *traceback, *argsTuple, *module, *method, *result;
|
||||
int tracebackLength, i;
|
||||
char *tracebackStr;
|
||||
|
||||
// if a system exception, handle it specially
|
||||
if (PyErr_ExceptionMatches(PyExc_SystemExit))
|
||||
HandleSystemExitException();
|
||||
|
||||
// get the exception details
|
||||
PyErr_Fetch(&type, &value, &traceback);
|
||||
|
||||
// import the traceback module
|
||||
module = PyImport_ImportModule("traceback");
|
||||
if (!module)
|
||||
return FatalPythonErrorNoTraceback(type, value,
|
||||
"Cannot import traceback module.");
|
||||
|
||||
// get the format_exception method
|
||||
method = PyObject_GetAttrString(module, "format_exception");
|
||||
Py_DECREF(module);
|
||||
if (!method)
|
||||
return FatalPythonErrorNoTraceback(type, value,
|
||||
"Cannot get format_exception method.");
|
||||
|
||||
// create a tuple for the arguments
|
||||
argsTuple = PyTuple_New(3);
|
||||
if (!argsTuple) {
|
||||
Py_DECREF(method);
|
||||
return FatalPythonErrorNoTraceback(type, value,
|
||||
"Cannot create arguments tuple for traceback.");
|
||||
}
|
||||
PyTuple_SET_ITEM(argsTuple, 0, ArgumentValue(type));
|
||||
PyTuple_SET_ITEM(argsTuple, 1, ArgumentValue(value));
|
||||
PyTuple_SET_ITEM(argsTuple, 2, ArgumentValue(traceback));
|
||||
|
||||
// call the format_exception method
|
||||
result = PyObject_CallObject(method, argsTuple);
|
||||
Py_DECREF(method);
|
||||
Py_DECREF(argsTuple);
|
||||
if (!result)
|
||||
return FatalPythonErrorNoTraceback(type, value,
|
||||
"Failed calling format_exception method.");
|
||||
|
||||
// determine length of string representation of formatted traceback
|
||||
tracebackLength = 1;
|
||||
for (i = 0; i < PyList_GET_SIZE(result); i++)
|
||||
tracebackLength += PyString_GET_SIZE(PyList_GET_ITEM(result, i));
|
||||
|
||||
// create a string representation of the formatted traceback
|
||||
tracebackStr = malloc(tracebackLength);
|
||||
if (!tracebackStr) {
|
||||
Py_DECREF(result);
|
||||
return FatalError("Out of memory!");
|
||||
}
|
||||
tracebackStr[0] = '\0';
|
||||
for (i = 0; i < PyList_GET_SIZE(result); i++)
|
||||
strcat(tracebackStr, PyString_AS_STRING(PyList_GET_ITEM(result, i)));
|
||||
Py_DECREF(result);
|
||||
|
||||
// bring up the error
|
||||
MessageBox(NULL, tracebackStr, "cx_Freeze: Python error in main script",
|
||||
MB_ICONERROR);
|
||||
Py_Finalize();
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
#include "Common.c"
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// WinMain()
|
||||
// Main routine for the executable in Windows.
|
||||
//-----------------------------------------------------------------------------
|
||||
int WINAPI WinMain(
|
||||
HINSTANCE instance, // handle to application
|
||||
HINSTANCE prevInstance, // previous handle to application
|
||||
LPSTR commandLine, // command line
|
||||
int showFlag) // show flag
|
||||
{
|
||||
const char *fileName;
|
||||
|
||||
// initialize Python
|
||||
Py_NoSiteFlag = 1;
|
||||
Py_FrozenFlag = 1;
|
||||
Py_IgnoreEnvironmentFlag = 1;
|
||||
Py_SetPythonHome("");
|
||||
Py_SetProgramName(__argv[0]);
|
||||
fileName = Py_GetProgramFullPath();
|
||||
Py_Initialize();
|
||||
PySys_SetArgv(__argc, __argv);
|
||||
|
||||
// do the work
|
||||
if (ExecuteScript(fileName) < 0)
|
||||
return 1;
|
||||
|
||||
// terminate Python
|
||||
Py_Finalize();
|
||||
return 0;
|
||||
}
|
||||
|
||||
5
installer/cx_Freeze/source/bases/dummy.rc
Normal file
5
installer/cx_Freeze/source/bases/dummy.rc
Normal file
@ -0,0 +1,5 @@
|
||||
STRINGTABLE
|
||||
{
|
||||
1, "Just to ensure that buggy EndUpdateResource doesn't fall over."
|
||||
}
|
||||
|
||||
3
installer/cx_Freeze/source/bases/manifest.rc
Normal file
3
installer/cx_Freeze/source/bases/manifest.rc
Normal file
@ -0,0 +1,3 @@
|
||||
#include "dummy.rc"
|
||||
|
||||
1 24 source/bases/manifest.txt
|
||||
418
installer/cx_Freeze/source/util.c
Normal file
418
installer/cx_Freeze/source/util.c
Normal file
@ -0,0 +1,418 @@
|
||||
//-----------------------------------------------------------------------------
|
||||
// util.c
|
||||
// Shared library for use by cx_Freeze.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
#include <Python.h>
|
||||
|
||||
#ifdef WIN32
|
||||
#include <windows.h>
|
||||
#include <imagehlp.h>
|
||||
|
||||
#pragma pack(2)
|
||||
|
||||
typedef struct {
|
||||
BYTE bWidth; // Width, in pixels, of the image
|
||||
BYTE bHeight; // Height, in pixels, of the image
|
||||
BYTE bColorCount; // Number of colors in image
|
||||
BYTE bReserved; // Reserved ( must be 0)
|
||||
WORD wPlanes; // Color Planes
|
||||
WORD wBitCount; // Bits per pixel
|
||||
DWORD dwBytesInRes; // How many bytes in this resource?
|
||||
DWORD dwImageOffset; // Where in the file is this image?
|
||||
} ICONDIRENTRY;
|
||||
|
||||
typedef struct {
|
||||
WORD idReserved; // Reserved (must be 0)
|
||||
WORD idType; // Resource Type (1 for icons)
|
||||
WORD idCount; // How many images?
|
||||
ICONDIRENTRY idEntries[0]; // An entry for each image
|
||||
} ICONDIR;
|
||||
|
||||
typedef struct {
|
||||
BYTE bWidth; // Width, in pixels, of the image
|
||||
BYTE bHeight; // Height, in pixels, of the image
|
||||
BYTE bColorCount; // Number of colors in image
|
||||
BYTE bReserved; // Reserved ( must be 0)
|
||||
WORD wPlanes; // Color Planes
|
||||
WORD wBitCount; // Bits per pixel
|
||||
DWORD dwBytesInRes; // How many bytes in this resource?
|
||||
WORD nID; // resource ID
|
||||
} GRPICONDIRENTRY;
|
||||
|
||||
typedef struct {
|
||||
WORD idReserved; // Reserved (must be 0)
|
||||
WORD idType; // Resource Type (1 for icons)
|
||||
WORD idCount; // How many images?
|
||||
GRPICONDIRENTRY idEntries[0]; // An entry for each image
|
||||
} GRPICONDIR;
|
||||
#endif
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Globals
|
||||
//-----------------------------------------------------------------------------
|
||||
#ifdef WIN32
|
||||
static PyObject *g_BindErrorException = NULL;
|
||||
static PyObject *g_ImageNames = NULL;
|
||||
#endif
|
||||
|
||||
|
||||
#ifdef WIN32
|
||||
//-----------------------------------------------------------------------------
|
||||
// BindStatusRoutine()
|
||||
// Called by BindImageEx() at various points. This is used to determine the
|
||||
// dependency tree which is later examined by cx_Freeze.
|
||||
//-----------------------------------------------------------------------------
|
||||
static BOOL __stdcall BindStatusRoutine(
|
||||
IMAGEHLP_STATUS_REASON reason, // reason called
|
||||
PSTR imageName, // name of image being examined
|
||||
PSTR dllName, // name of DLL
|
||||
ULONG virtualAddress, // computed virtual address
|
||||
ULONG parameter) // parameter (value depends on reason)
|
||||
{
|
||||
char fileName[MAX_PATH + 1];
|
||||
|
||||
switch (reason) {
|
||||
case BindImportModule:
|
||||
if (!SearchPath(NULL, dllName, NULL, sizeof(fileName), fileName,
|
||||
NULL))
|
||||
return FALSE;
|
||||
Py_INCREF(Py_None);
|
||||
if (PyDict_SetItemString(g_ImageNames, fileName, Py_None) < 0)
|
||||
return FALSE;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// GetFileData()
|
||||
// Return the data for the given file.
|
||||
//-----------------------------------------------------------------------------
|
||||
static int GetFileData(
|
||||
const char *fileName, // name of file to read
|
||||
char **data) // pointer to data (OUT)
|
||||
{
|
||||
DWORD numberOfBytesRead, dataSize;
|
||||
HANDLE file;
|
||||
|
||||
file = CreateFile(fileName, GENERIC_READ, FILE_SHARE_READ, NULL,
|
||||
OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, NULL);
|
||||
if (file == INVALID_HANDLE_VALUE)
|
||||
return -1;
|
||||
dataSize = GetFileSize(file, NULL);
|
||||
if (dataSize == INVALID_FILE_SIZE) {
|
||||
CloseHandle(file);
|
||||
return -1;
|
||||
}
|
||||
*data = PyMem_Malloc(dataSize);
|
||||
if (!*data) {
|
||||
CloseHandle(file);
|
||||
return -1;
|
||||
}
|
||||
if (!ReadFile(file, *data, dataSize, &numberOfBytesRead, NULL)) {
|
||||
CloseHandle(file);
|
||||
return -1;
|
||||
}
|
||||
CloseHandle(file);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// CreateGroupIconResource()
|
||||
// Return the group icon resource given the icon file data.
|
||||
//-----------------------------------------------------------------------------
|
||||
static GRPICONDIR *CreateGroupIconResource(
|
||||
ICONDIR *iconDir, // icon information
|
||||
DWORD *resourceSize) // size of resource (OUT)
|
||||
{
|
||||
GRPICONDIR *groupIconDir;
|
||||
int i;
|
||||
|
||||
*resourceSize = sizeof(GRPICONDIR) +
|
||||
sizeof(GRPICONDIRENTRY) * iconDir->idCount;
|
||||
groupIconDir = PyMem_Malloc(*resourceSize);
|
||||
if (!groupIconDir)
|
||||
return NULL;
|
||||
groupIconDir->idReserved = iconDir->idReserved;
|
||||
groupIconDir->idType = iconDir->idType;
|
||||
groupIconDir->idCount = iconDir->idCount;
|
||||
for (i = 0; i < iconDir->idCount; i++) {
|
||||
groupIconDir->idEntries[i].bWidth = iconDir->idEntries[i].bWidth;
|
||||
groupIconDir->idEntries[i].bHeight = iconDir->idEntries[i].bHeight;
|
||||
groupIconDir->idEntries[i].bColorCount =
|
||||
iconDir->idEntries[i].bColorCount;
|
||||
groupIconDir->idEntries[i].bReserved = iconDir->idEntries[i].bReserved;
|
||||
groupIconDir->idEntries[i].wPlanes = iconDir->idEntries[i].wPlanes;
|
||||
groupIconDir->idEntries[i].wBitCount = iconDir->idEntries[i].wBitCount;
|
||||
groupIconDir->idEntries[i].dwBytesInRes =
|
||||
iconDir->idEntries[i].dwBytesInRes;
|
||||
groupIconDir->idEntries[i].nID = i + 1;
|
||||
}
|
||||
|
||||
return groupIconDir;
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// ExtAddIcon()
|
||||
// Add the icon as a resource to the specified file.
|
||||
//-----------------------------------------------------------------------------
|
||||
static PyObject *ExtAddIcon(
|
||||
PyObject *self, // passthrough argument
|
||||
PyObject *args) // arguments
|
||||
{
|
||||
char *executableName, *iconName, *data, *iconData;
|
||||
GRPICONDIR *groupIconDir;
|
||||
DWORD resourceSize;
|
||||
ICONDIR *iconDir;
|
||||
BOOL succeeded;
|
||||
HANDLE handle;
|
||||
int i;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "ss", &executableName, &iconName))
|
||||
return NULL;
|
||||
|
||||
// begin updating the executable
|
||||
handle = BeginUpdateResource(executableName, FALSE);
|
||||
if (!handle) {
|
||||
PyErr_SetExcFromWindowsErrWithFilename(PyExc_WindowsError,
|
||||
GetLastError(), executableName);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// first attempt to get the data from the icon file
|
||||
data = NULL;
|
||||
succeeded = TRUE;
|
||||
groupIconDir = NULL;
|
||||
if (GetFileData(iconName, &data) < 0)
|
||||
succeeded = FALSE;
|
||||
iconDir = (ICONDIR*) data;
|
||||
|
||||
// next, attempt to add a group icon resource
|
||||
if (succeeded) {
|
||||
groupIconDir = CreateGroupIconResource(iconDir, &resourceSize);
|
||||
if (groupIconDir)
|
||||
succeeded = UpdateResource(handle, RT_GROUP_ICON,
|
||||
MAKEINTRESOURCE(1),
|
||||
MAKELANGID(LANG_NEUTRAL, SUBLANG_NEUTRAL),
|
||||
groupIconDir, resourceSize);
|
||||
else succeeded = FALSE;
|
||||
}
|
||||
|
||||
// next, add each icon as a resource
|
||||
if (succeeded) {
|
||||
for (i = 0; i < iconDir->idCount; i++) {
|
||||
iconData = &data[iconDir->idEntries[i].dwImageOffset];
|
||||
resourceSize = iconDir->idEntries[i].dwBytesInRes;
|
||||
succeeded = UpdateResource(handle, RT_ICON, MAKEINTRESOURCE(i + 1),
|
||||
MAKELANGID(LANG_NEUTRAL, SUBLANG_NEUTRAL), iconData,
|
||||
resourceSize);
|
||||
if (!succeeded)
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// finish writing the resource (or discarding the changes upon an error)
|
||||
if (!EndUpdateResource(handle, !succeeded)) {
|
||||
if (succeeded) {
|
||||
succeeded = FALSE;
|
||||
PyErr_SetExcFromWindowsErrWithFilename(PyExc_WindowsError,
|
||||
GetLastError(), executableName);
|
||||
}
|
||||
}
|
||||
|
||||
// clean up
|
||||
if (groupIconDir)
|
||||
PyMem_Free(groupIconDir);
|
||||
if (data)
|
||||
PyMem_Free(data);
|
||||
if (!succeeded)
|
||||
return NULL;
|
||||
|
||||
Py_INCREF(Py_None);
|
||||
return Py_None;
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// ExtBeginUpdateResource()
|
||||
// Wrapper for BeginUpdateResource().
|
||||
//-----------------------------------------------------------------------------
|
||||
static PyObject *ExtBeginUpdateResource(
|
||||
PyObject *self, // passthrough argument
|
||||
PyObject *args) // arguments
|
||||
{
|
||||
BOOL deleteExistingResources;
|
||||
char *fileName;
|
||||
HANDLE handle;
|
||||
|
||||
deleteExistingResources = TRUE;
|
||||
if (!PyArg_ParseTuple(args, "s|i", &fileName, &deleteExistingResources))
|
||||
return NULL;
|
||||
handle = BeginUpdateResource(fileName, deleteExistingResources);
|
||||
if (!handle) {
|
||||
PyErr_SetExcFromWindowsErrWithFilename(PyExc_WindowsError,
|
||||
GetLastError(), fileName);
|
||||
return NULL;
|
||||
}
|
||||
return PyInt_FromLong((long) handle);
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// ExtUpdateResource()
|
||||
// Wrapper for UpdateResource().
|
||||
//-----------------------------------------------------------------------------
|
||||
static PyObject *ExtUpdateResource(
|
||||
PyObject *self, // passthrough argument
|
||||
PyObject *args) // arguments
|
||||
{
|
||||
int resourceType, resourceId, resourceDataSize;
|
||||
char *resourceData;
|
||||
HANDLE handle;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "iiis#", &handle, &resourceType, &resourceId,
|
||||
&resourceData, &resourceDataSize))
|
||||
return NULL;
|
||||
if (!UpdateResource(handle, MAKEINTRESOURCE(resourceType),
|
||||
MAKEINTRESOURCE(resourceId),
|
||||
MAKELANGID(LANG_NEUTRAL, SUBLANG_NEUTRAL), resourceData,
|
||||
resourceDataSize)) {
|
||||
PyErr_SetExcFromWindowsErr(PyExc_WindowsError, GetLastError());
|
||||
return NULL;
|
||||
}
|
||||
|
||||
Py_INCREF(Py_None);
|
||||
return Py_None;
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// ExtEndUpdateResource()
|
||||
// Wrapper for EndUpdateResource().
|
||||
//-----------------------------------------------------------------------------
|
||||
static PyObject *ExtEndUpdateResource(
|
||||
PyObject *self, // passthrough argument
|
||||
PyObject *args) // arguments
|
||||
{
|
||||
BOOL discardChanges;
|
||||
HANDLE handle;
|
||||
|
||||
discardChanges = FALSE;
|
||||
if (!PyArg_ParseTuple(args, "i|i", &handle, &discardChanges))
|
||||
return NULL;
|
||||
if (!EndUpdateResource(handle, discardChanges)) {
|
||||
PyErr_SetExcFromWindowsErr(PyExc_WindowsError, GetLastError());
|
||||
return NULL;
|
||||
}
|
||||
|
||||
Py_INCREF(Py_None);
|
||||
return Py_None;
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// ExtGetDependentFiles()
|
||||
// Return a list of files that this file depends on.
|
||||
//-----------------------------------------------------------------------------
|
||||
static PyObject *ExtGetDependentFiles(
|
||||
PyObject *self, // passthrough argument
|
||||
PyObject *args) // arguments
|
||||
{
|
||||
PyObject *results;
|
||||
char *imageName;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "s", &imageName))
|
||||
return NULL;
|
||||
g_ImageNames = PyDict_New();
|
||||
if (!g_ImageNames)
|
||||
return NULL;
|
||||
if (!BindImageEx(BIND_NO_BOUND_IMPORTS | BIND_NO_UPDATE | BIND_ALL_IMAGES,
|
||||
imageName, NULL, NULL, BindStatusRoutine)) {
|
||||
Py_DECREF(g_ImageNames);
|
||||
PyErr_SetExcFromWindowsErrWithFilename(g_BindErrorException,
|
||||
GetLastError(), imageName);
|
||||
return NULL;
|
||||
}
|
||||
results = PyDict_Keys(g_ImageNames);
|
||||
Py_DECREF(g_ImageNames);
|
||||
return results;
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// ExtGetSystemDir()
|
||||
// Return the Windows directory (C:\Windows for example).
|
||||
//-----------------------------------------------------------------------------
|
||||
static PyObject *ExtGetSystemDir(
|
||||
PyObject *self, // passthrough argument
|
||||
PyObject *args) // arguments (ignored)
|
||||
{
|
||||
char dir[MAX_PATH + 1];
|
||||
|
||||
if (GetSystemDirectory(dir, sizeof(dir)))
|
||||
return PyString_FromString(dir);
|
||||
PyErr_SetExcFromWindowsErr(PyExc_RuntimeError, GetLastError());
|
||||
return NULL;
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// ExtSetOptimizeFlag()
|
||||
// Set the optimize flag as needed.
|
||||
//-----------------------------------------------------------------------------
|
||||
static PyObject *ExtSetOptimizeFlag(
|
||||
PyObject *self, // passthrough argument
|
||||
PyObject *args) // arguments
|
||||
{
|
||||
if (!PyArg_ParseTuple(args, "i", &Py_OptimizeFlag))
|
||||
return NULL;
|
||||
Py_INCREF(Py_None);
|
||||
return Py_None;
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Methods
|
||||
//-----------------------------------------------------------------------------
|
||||
static PyMethodDef g_ModuleMethods[] = {
|
||||
{ "SetOptimizeFlag", ExtSetOptimizeFlag, METH_VARARGS },
|
||||
#ifdef WIN32
|
||||
{ "BeginUpdateResource", ExtBeginUpdateResource, METH_VARARGS },
|
||||
{ "UpdateResource", ExtUpdateResource, METH_VARARGS },
|
||||
{ "EndUpdateResource", ExtEndUpdateResource, METH_VARARGS },
|
||||
{ "AddIcon", ExtAddIcon, METH_VARARGS },
|
||||
{ "GetDependentFiles", ExtGetDependentFiles, METH_VARARGS },
|
||||
{ "GetSystemDir", ExtGetSystemDir, METH_NOARGS },
|
||||
#endif
|
||||
{ NULL }
|
||||
};
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// initutil()
|
||||
// Initialization routine for the shared libary.
|
||||
//-----------------------------------------------------------------------------
|
||||
void initutil(void)
|
||||
{
|
||||
PyObject *module;
|
||||
|
||||
module = Py_InitModule("cx_Freeze.util", g_ModuleMethods);
|
||||
if (!module)
|
||||
return;
|
||||
#ifdef WIN32
|
||||
g_BindErrorException = PyErr_NewException("cx_Freeze.util.BindError",
|
||||
NULL, NULL);
|
||||
if (!g_BindErrorException)
|
||||
return;
|
||||
if (PyModule_AddObject(module, "BindError", g_BindErrorException) < 0)
|
||||
return;
|
||||
#endif
|
||||
}
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
from __future__ import with_statement
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
@ -6,162 +7,216 @@ __docformat__ = 'restructuredtext en'
|
||||
'''
|
||||
Create linux binary.
|
||||
'''
|
||||
import glob, sys, subprocess, tarfile, os, re, py_compile, shutil
|
||||
HOME = '/home/kovid'
|
||||
PYINSTALLER = os.path.expanduser('~/build/pyinstaller')
|
||||
CALIBREPREFIX = '___'
|
||||
PDFTOHTML = '/usr/bin/pdftohtml'
|
||||
LIBUNRAR = '/usr/lib/libunrar.so'
|
||||
QTDIR = '/usr/lib/qt4'
|
||||
QTDLLS = ('QtCore', 'QtGui', 'QtNetwork', 'QtSvg', 'QtXml', 'QtWebKit')
|
||||
EXTRAS = ('/usr/lib/python2.5/site-packages/PIL', os.path.expanduser('~/ipython/IPython'))
|
||||
SQLITE = '/usr/lib/libsqlite3.so.0'
|
||||
DBUS = '/usr/lib/libdbus-1.so.3'
|
||||
LIBMNG = '/usr/lib/libmng.so.1'
|
||||
LIBZ = '/lib/libz.so.1'
|
||||
LIBBZ2 = '/lib/libbz2.so.1'
|
||||
LIBUSB = '/usr/lib/libusb.so'
|
||||
LIBPOPPLER = '/usr/lib/libpoppler.so.3'
|
||||
LIBXML2 = '/usr/lib/libxml2.so.2'
|
||||
LIBXSLT = '/usr/lib/libxslt.so.1'
|
||||
LIBEXSLT = '/usr/lib/libexslt.so.0'
|
||||
|
||||
def freeze():
|
||||
import glob, sys, subprocess, tarfile, os, re, textwrap, shutil, cStringIO, bz2, codecs
|
||||
from contextlib import closing
|
||||
from cx_Freeze import Executable, setup
|
||||
from calibre.constants import __version__, __appname__
|
||||
from calibre.linux import entry_points
|
||||
from calibre import walk
|
||||
from calibre.web.feeds.recipes import recipe_modules
|
||||
import calibre
|
||||
|
||||
|
||||
CALIBRESRC = os.path.join(CALIBREPREFIX, 'src')
|
||||
CALIBREPLUGINS = os.path.join(CALIBRESRC, 'calibre', 'plugins')
|
||||
QTDIR = '/usr/lib/qt4'
|
||||
QTDLLS = ('QtCore', 'QtGui', 'QtNetwork', 'QtSvg', 'QtXml', 'QtWebKit')
|
||||
|
||||
binary_excludes = ['libGLcore*', 'libGL*', 'libnvidia*']
|
||||
|
||||
binary_includes = [
|
||||
'/usr/bin/pdftohtml',
|
||||
'/usr/lib/libunrar.so',
|
||||
'/usr/lib/libsqlite3.so.0',
|
||||
'/usr/lib/libsqlite3.so.0',
|
||||
'/usr/lib/libmng.so.1',
|
||||
'/lib/libz.so.1',
|
||||
'/lib/libbz2.so.1',
|
||||
'/lib/libbz2.so.1',
|
||||
'/usr/lib/libpoppler.so.4',
|
||||
'/usr/lib/libxml2.so.2',
|
||||
'/usr/lib/libxslt.so.1',
|
||||
'/usr/lib/libxslt.so.1',
|
||||
'/usr/lib/libMagickWand.so',
|
||||
'/usr/lib/libMagickCore.so',
|
||||
]
|
||||
|
||||
binary_includes += [os.path.join(QTDIR, 'lib%s.so.4'%x) for x in QTDLLS]
|
||||
|
||||
|
||||
d = os.path.dirname
|
||||
CALIBRESRC = d(d(d(os.path.abspath(calibre.__file__))))
|
||||
CALIBREPLUGINS = os.path.join(CALIBRESRC, 'src', 'calibre', 'plugins')
|
||||
FREEZE_DIR = os.path.join(CALIBRESRC, 'build', 'cx_freeze')
|
||||
DIST_DIR = os.path.join(CALIBRESRC, 'dist')
|
||||
|
||||
os.chdir(CALIBRESRC)
|
||||
|
||||
print 'Freezing calibre located at', CALIBRESRC
|
||||
|
||||
sys.path.insert(0, os.path.join(CALIBRESRC, 'src'))
|
||||
|
||||
entry_points = entry_points['console_scripts'] + entry_points['gui_scripts']
|
||||
entry_points = ['calibre_postinstall=calibre.linux:binary_install',
|
||||
'calibre-parallel=calibre.parallel:main'] + entry_points
|
||||
executables = {}
|
||||
for ep in entry_points:
|
||||
executables[ep.split('=')[0].strip()] = (ep.split('=')[1].split(':')[0].strip(),
|
||||
ep.split(':')[-1].strip())
|
||||
|
||||
if os.path.exists(FREEZE_DIR):
|
||||
shutil.rmtree(FREEZE_DIR)
|
||||
os.makedirs(FREEZE_DIR)
|
||||
|
||||
if not os.path.exists(DIST_DIR):
|
||||
os.makedirs(DIST_DIR)
|
||||
|
||||
includes = [x[0] for x in executables.values()]
|
||||
|
||||
excludes = ['matplotlib', "Tkconstants", "Tkinter", "tcl", "_imagingtk",
|
||||
"ImageTk", "FixTk", 'wx', 'PyQt4.QtAssistant', 'PyQt4.QtOpenGL.so',
|
||||
'PyQt4.QtScript.so', 'PyQt4.QtSql.so', 'PyQt4.QtTest.so', 'qt',
|
||||
'glib', 'gobject']
|
||||
|
||||
packages = ['calibre', 'encodings', 'cherrypy', 'cssutils', 'xdg']
|
||||
|
||||
includes += ['calibre.web.feeds.recipes.'+r for r in recipe_modules]
|
||||
|
||||
LOADER = '/tmp/loader.py'
|
||||
open(LOADER, 'wb').write('# This script is never actually used.\nimport sys')
|
||||
|
||||
INIT_SCRIPT = '/tmp/init.py'
|
||||
open(INIT_SCRIPT, 'wb').write(textwrap.dedent('''
|
||||
## Load calibre module specified in the environment variable CALIBRE_CX_EXE
|
||||
## Also restrict sys.path to the executables' directory and add the
|
||||
## executables directory to LD_LIBRARY_PATH
|
||||
import encodings
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
import zipimport
|
||||
import locale
|
||||
import codecs
|
||||
|
||||
enc = locale.getdefaultlocale()[1]
|
||||
if not enc:
|
||||
enc = locale.nl_langinfo(locale.CODESET)
|
||||
enc = codecs.lookup(enc if enc else 'UTF-8').name
|
||||
sys.setdefaultencoding(enc)
|
||||
|
||||
paths = os.environ.get('LD_LIBRARY_PATH', '').split(os.pathsep)
|
||||
if DIR_NAME not in paths or not sys.getfilesystemencoding():
|
||||
paths.insert(0, DIR_NAME)
|
||||
os.environ['LD_LIBRARY_PATH'] = os.pathsep.join(paths)
|
||||
os.environ['PYTHONIOENCODING'] = enc
|
||||
os.execv(sys.executable, sys.argv)
|
||||
|
||||
sys.path = sys.path[:3]
|
||||
sys.frozen = True
|
||||
sys.frozen_path = DIR_NAME
|
||||
|
||||
executables = %(executables)s
|
||||
|
||||
exe = os.environ.get('CALIBRE_CX_EXE', False)
|
||||
ret = 1
|
||||
if not exe:
|
||||
print >>sys.stderr, 'Invalid invocation of calibre loader. CALIBRE_CX_EXE not set'
|
||||
elif exe not in executables:
|
||||
print >>sys.stderr, 'Invalid invocation of calibre loader. CALIBRE_CX_EXE=%%s is unknown'%%exe
|
||||
else:
|
||||
from PyQt4.QtCore import QCoreApplication
|
||||
QCoreApplication.setLibraryPaths([sys.frozen_path, os.path.join(sys.frozen_path, "qtplugins")])
|
||||
sys.argv[0] = exe
|
||||
module, func = executables[exe]
|
||||
module = __import__(module, fromlist=[1])
|
||||
func = getattr(module, func)
|
||||
ret = func()
|
||||
|
||||
module = sys.modules.get("threading")
|
||||
if module is not None:
|
||||
module._shutdown()
|
||||
sys.exit(ret)
|
||||
''')%dict(executables=repr(executables)))
|
||||
sys.argv = ['freeze', 'build_exe']
|
||||
setup(
|
||||
name = __appname__,
|
||||
version = __version__,
|
||||
executables = [Executable(script=LOADER, targetName='loader', compress=False)],
|
||||
options = { 'build_exe' :
|
||||
{
|
||||
'build_exe' : os.path.join(CALIBRESRC, 'build/cx_freeze'),
|
||||
'optimize' : 2,
|
||||
'excludes' : excludes,
|
||||
'includes' : includes,
|
||||
'packages' : packages,
|
||||
'init_script' : INIT_SCRIPT,
|
||||
'copy_dependent_files' : True,
|
||||
'create_shared_zip' : False,
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
def copy_binary(src, dest_dir):
|
||||
dest = os.path.join(dest_dir, os.path.basename(src))
|
||||
if not os.path.exists(dest_dir):
|
||||
os.makedirs(dest_dir)
|
||||
shutil.copyfile(os.path.realpath(src), dest)
|
||||
shutil.copymode(os.path.realpath(src), dest)
|
||||
|
||||
for f in binary_includes:
|
||||
copy_binary(f, FREEZE_DIR)
|
||||
|
||||
for pat in binary_excludes:
|
||||
matches = glob.glob(os.path.join(FREEZE_DIR, pat))
|
||||
for f in matches:
|
||||
os.remove(f)
|
||||
|
||||
print 'Adding calibre plugins...'
|
||||
os.makedirs(os.path.join(FREEZE_DIR, 'plugins'))
|
||||
for f in glob.glob(os.path.join(CALIBREPLUGINS, '*.so')):
|
||||
copy_binary(f, os.path.join(FREEZE_DIR, 'plugins'))
|
||||
|
||||
print 'Adding Qt plugins...'
|
||||
plugdir = os.path.join(QTDIR, 'plugins')
|
||||
for dirpath, dirnames, filenames in os.walk(plugdir):
|
||||
for f in filenames:
|
||||
if not f.endswith('.so') or 'designer' in dirpath or 'codecs' in dirpath or 'sqldrivers' in dirpath:
|
||||
continue
|
||||
f = os.path.join(dirpath, f)
|
||||
dest_dir = dirpath.replace(plugdir, os.path.join(FREEZE_DIR, 'qtplugins'))
|
||||
copy_binary(f, dest_dir)
|
||||
|
||||
sys.path.insert(0, CALIBRESRC)
|
||||
from calibre import __version__
|
||||
from calibre.parallel import PARALLEL_FUNCS
|
||||
from calibre.web.feeds.recipes import recipes
|
||||
hiddenimports = list(map(lambda x: x[0], PARALLEL_FUNCS.values()))
|
||||
hiddenimports += ['PyQt4.QtWebKit']
|
||||
hiddenimports += ['lxml._elementpath', 'keyword', 'codeop', 'commands', 'shlex', 'pydoc']
|
||||
hiddenimports += map(lambda x: x.__module__, recipes)
|
||||
open(os.path.join(PYINSTALLER, 'hooks', 'hook-calibre.parallel.py'), 'wb').write('hiddenimports = %s'%repr(hiddenimports))
|
||||
|
||||
def run_pyinstaller(args=sys.argv):
|
||||
subprocess.check_call(('/usr/bin/sudo', 'chown', '-R', 'kovid:users', glob.glob('/usr/lib/python*/site-packages/')[-1]))
|
||||
subprocess.check_call('rm -rf %(py)s/dist/* %(py)s/build/*'%dict(py=PYINSTALLER), shell=True)
|
||||
cp = HOME+'/build/'+os.path.basename(os.getcwd())
|
||||
spec = open(os.path.join(PYINSTALLER, 'calibre', 'calibre.spec'), 'wb')
|
||||
raw = re.sub(r'CALIBREPREFIX\s+=\s+\'___\'', 'CALIBREPREFIX = '+repr(cp),
|
||||
open(__file__).read())
|
||||
spec.write(raw)
|
||||
spec.close()
|
||||
os.chdir(PYINSTALLER)
|
||||
shutil.rmtree('calibre/dist')
|
||||
os.mkdir('calibre/dist')
|
||||
subprocess.check_call('python -OO Build.py calibre/calibre.spec', shell=True)
|
||||
|
||||
print 'Creating launchers'
|
||||
for exe in executables:
|
||||
path = os.path.join(FREEZE_DIR, exe)
|
||||
open(path, 'wb').write(textwrap.dedent('''\
|
||||
#!/bin/sh
|
||||
export CALIBRE_CX_EXE=%s
|
||||
path=`readlink -e $0`
|
||||
base=`dirname $path`
|
||||
loader=$base/loader
|
||||
export LD_LIBRARY_PATH=$base:$LD_LIBRARY_PATH
|
||||
$loader "$@"
|
||||
''')%exe)
|
||||
os.chmod(path, 0755)
|
||||
|
||||
exes = list(executables.keys())
|
||||
exes.remove('calibre_postinstall')
|
||||
exes.remove('calibre-parallel')
|
||||
open(os.path.join(FREEZE_DIR, 'manifest'), 'wb').write('\n'.join(exes))
|
||||
|
||||
print 'Creating archive...'
|
||||
dist = open(os.path.join(DIST_DIR, 'calibre-%s-i686.tar.bz2'%__version__), 'wb')
|
||||
with closing(tarfile.open(fileobj=dist, mode='w:bz2',
|
||||
format=tarfile.PAX_FORMAT)) as tf:
|
||||
for f in walk(FREEZE_DIR):
|
||||
name = f.replace(FREEZE_DIR, '')[1:]
|
||||
if name:
|
||||
tf.add(f, name)
|
||||
dist.flush()
|
||||
dist.seek(0, 2)
|
||||
print 'Archive %s created: %.2f MB'%(dist.name, dist.tell()/(1024.**2))
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__' and 'freeze.py' in __file__:
|
||||
sys.exit(run_pyinstaller())
|
||||
|
||||
|
||||
loader = os.path.join(os.path.expanduser('~/temp'), 'calibre_installer_loader.py')
|
||||
if not os.path.exists(loader):
|
||||
open(loader, 'wb').write('''
|
||||
import sys, os
|
||||
sys.frozen_path = os.getcwd()
|
||||
os.chdir(os.environ.get("ORIGWD", "."))
|
||||
sys.path.insert(0, os.path.join(sys.frozen_path, "library.pyz"))
|
||||
sys.path.insert(0, sys.frozen_path)
|
||||
from PyQt4.QtCore import QCoreApplication
|
||||
QCoreApplication.setLibraryPaths([sys.frozen_path, os.path.join(sys.frozen_path, "qtplugins")])
|
||||
''')
|
||||
excludes = ['gtk._gtk', 'gtk.glade', 'qt', 'matplotlib.nxutils', 'matplotlib._cntr',
|
||||
'matplotlib.ttconv', 'matplotlib._image', 'matplotlib.ft2font',
|
||||
'matplotlib._transforms', 'matplotlib._agg', 'matplotlib.backends._backend_agg',
|
||||
'matplotlib.axes', 'matplotlib', 'matplotlib.pyparsing',
|
||||
'TKinter', 'atk', 'gobject._gobject', 'pango', 'PIL', 'Image', 'IPython']
|
||||
|
||||
|
||||
sys.path.insert(0, CALIBRESRC)
|
||||
from calibre.linux import entry_points
|
||||
|
||||
executables, scripts = ['calibre_postinstall', 'calibre-parallel'], \
|
||||
[os.path.join(CALIBRESRC, 'calibre', 'linux.py'), os.path.join(CALIBRESRC, 'calibre', 'parallel.py')]
|
||||
|
||||
for entry in entry_points['console_scripts'] + entry_points['gui_scripts']:
|
||||
fields = entry.split('=')
|
||||
executables.append(fields[0].strip())
|
||||
scripts.append(os.path.join(CALIBRESRC, *map(lambda x: x.strip(), fields[1].split(':')[0].split('.')))+'.py')
|
||||
|
||||
analyses = [Analysis([os.path.join(HOMEPATH,'support/_mountzlib.py'), os.path.join(HOMEPATH,'support/useUnicode.py'), loader, script],
|
||||
pathex=[PYINSTALLER, CALIBRESRC], excludes=excludes) for script in scripts]
|
||||
|
||||
pyz = TOC()
|
||||
binaries = TOC()
|
||||
|
||||
for a in analyses:
|
||||
pyz = a.pure + pyz
|
||||
binaries = a.binaries + binaries
|
||||
pyz = PYZ(pyz, name='library.pyz')
|
||||
|
||||
built_executables = []
|
||||
for script, exe, a in zip(scripts, executables, analyses):
|
||||
built_executables.append(EXE(PYZ(TOC()),
|
||||
a.scripts+[('O','','OPTION'),],
|
||||
exclude_binaries=1,
|
||||
name=os.path.join('buildcalibre', exe),
|
||||
debug=False,
|
||||
strip=True,
|
||||
upx=False,
|
||||
excludes=excludes,
|
||||
console=1))
|
||||
|
||||
print 'Adding plugins...'
|
||||
for f in glob.glob(os.path.join(CALIBREPLUGINS, '*.so')):
|
||||
binaries += [('plugins/'+os.path.basename(f), f, 'BINARY')]
|
||||
for f in glob.glob(os.path.join(CALIBREPLUGINS, '*.so.*')):
|
||||
binaries += [(os.path.basename(f), f, 'BINARY')]
|
||||
|
||||
print 'Adding external programs...'
|
||||
binaries += [('pdftohtml', PDFTOHTML, 'BINARY'),
|
||||
('libunrar.so', LIBUNRAR, 'BINARY')]
|
||||
|
||||
print 'Adding external libraries...'
|
||||
binaries += [ (os.path.basename(x), x, 'BINARY') for x in (SQLITE, DBUS,
|
||||
LIBMNG, LIBZ, LIBBZ2, LIBUSB, LIBPOPPLER, LIBXML2, LIBXSLT, LIBEXSLT)]
|
||||
|
||||
|
||||
qt = []
|
||||
for dll in QTDLLS:
|
||||
path = os.path.join(QTDIR, 'lib'+dll+'.so.4')
|
||||
qt.append((os.path.basename(path), path, 'BINARY'))
|
||||
binaries += qt
|
||||
|
||||
plugins = []
|
||||
plugdir = os.path.join(QTDIR, 'plugins')
|
||||
for dirpath, dirnames, filenames in os.walk(plugdir):
|
||||
for f in filenames:
|
||||
if not f.endswith('.so') or 'designer' in dirpath or 'codcs' in dirpath or 'sqldrivers' in dirpath : continue
|
||||
f = os.path.join(dirpath, f)
|
||||
plugins.append(('qtplugins/'+f.replace(plugdir, ''), f, 'BINARY'))
|
||||
binaries += plugins
|
||||
|
||||
manifest = '/tmp/manifest'
|
||||
open(manifest, 'wb').write('\n'.join(executables))
|
||||
version = '/tmp/version'
|
||||
open(version, 'wb').write(__version__)
|
||||
coll = COLLECT(binaries, pyz,
|
||||
[('manifest', manifest, 'DATA'), ('version', version, 'DATA')],
|
||||
*built_executables,
|
||||
**dict(strip=True,
|
||||
upx=False,
|
||||
excludes=excludes,
|
||||
name='dist'))
|
||||
|
||||
os.chdir(os.path.join(HOMEPATH, 'calibre', 'dist'))
|
||||
for folder in EXTRAS:
|
||||
subprocess.check_call('cp -rf %s .'%folder, shell=True)
|
||||
|
||||
print 'Building tarball...'
|
||||
tbz2 = 'calibre-%s-i686.tar.bz2'%__version__
|
||||
tf = tarfile.open(os.path.join('/tmp', tbz2), 'w:bz2')
|
||||
|
||||
for f in os.listdir('.'):
|
||||
tf.add(f)
|
||||
if __name__ == '__main__':
|
||||
freeze()
|
||||
|
||||
@ -177,7 +177,7 @@ _check_symlinks_prescript()
|
||||
|
||||
def fix_python_dependencies(self, files):
|
||||
for f in files:
|
||||
subprocess.check_call(['/usr/bin/install_name_tool', '-change', '/Library/Frameworks/Python.framework/Versions/2.5/Python', '@executable_path/../Frameworks/Python.framework/Versions/2.5/Python', f])
|
||||
subprocess.check_call(['/usr/bin/install_name_tool', '-change', '/Library/Frameworks/Python.framework/Versions/2.6/Python', '@executable_path/../Frameworks/Python.framework/Versions/2.6/Python', f])
|
||||
|
||||
def fix_misc_dependencies(self, files):
|
||||
for path in files:
|
||||
@ -247,10 +247,13 @@ _check_symlinks_prescript()
|
||||
print 'Adding pdftohtml'
|
||||
os.link(os.path.expanduser('~/pdftohtml'), os.path.join(frameworks_dir, 'pdftohtml'))
|
||||
print 'Adding plugins'
|
||||
module_dir = os.path.join(resource_dir, 'lib', 'python2.5', 'lib-dynload')
|
||||
module_dir = os.path.join(resource_dir, 'lib', 'python2.6', 'lib-dynload')
|
||||
print 'Adding fontconfig'
|
||||
for f in glob.glob(os.path.expanduser('~/fontconfig-bundled/*')):
|
||||
os.link(f, os.path.join(frameworks_dir, os.path.basename(f)))
|
||||
dest = os.path.join(frameworks_dir, os.path.basename(f))
|
||||
if os.path.exists(dest):
|
||||
os.remove(dest)
|
||||
os.link(f, dest)
|
||||
dst = os.path.join(resource_dir, 'fonts')
|
||||
if os.path.exists(dst):
|
||||
shutil.rmtree(dst)
|
||||
@ -258,7 +261,7 @@ _check_symlinks_prescript()
|
||||
|
||||
print
|
||||
print 'Adding IPython'
|
||||
dst = os.path.join(resource_dir, 'lib', 'python2.5', 'IPython')
|
||||
dst = os.path.join(resource_dir, 'lib', 'python2.6', 'IPython')
|
||||
if os.path.exists(dst): shutil.rmtree(dst)
|
||||
shutil.copytree(os.path.expanduser('~/build/ipython/IPython'), dst)
|
||||
|
||||
@ -280,6 +283,7 @@ _check_symlinks_prescript()
|
||||
f = open(launcher_path, 'r')
|
||||
src = f.read()
|
||||
f.close()
|
||||
src = src.replace('import Image', 'from PIL import Image')
|
||||
src = re.sub('(_run\s*\(.*?.py.*?\))', cs+'%s'%(
|
||||
'''
|
||||
sys.frameworks_dir = os.path.join(os.path.dirname(os.environ['RESOURCEPATH']), 'Frameworks')
|
||||
@ -290,7 +294,7 @@ sys.frameworks_dir = os.path.join(os.path.dirname(os.environ['RESOURCEPATH']), '
|
||||
f.close()
|
||||
print
|
||||
print 'Adding main scripts to site-packages'
|
||||
f = zipfile.ZipFile(os.path.join(self.dist_dir, APPNAME+'.app', 'Contents', 'Resources', 'lib', 'python2.5', 'site-packages.zip'), 'a', zipfile.ZIP_DEFLATED)
|
||||
f = zipfile.ZipFile(os.path.join(self.dist_dir, APPNAME+'.app', 'Contents', 'Resources', 'lib', 'python2.6', 'site-packages.zip'), 'a', zipfile.ZIP_DEFLATED)
|
||||
for script in scripts['gui']+scripts['console']:
|
||||
f.write(script, script.partition('/')[-1])
|
||||
f.close()
|
||||
@ -322,7 +326,8 @@ def main():
|
||||
'genshi', 'calibre.web.feeds.recipes.*',
|
||||
'calibre.ebooks.lrf.any.*', 'calibre.ebooks.lrf.feeds.*',
|
||||
'keyword', 'codeop', 'pydoc', 'readline',
|
||||
'BeautifulSoup'],
|
||||
'BeautifulSoup'
|
||||
],
|
||||
'packages' : ['PIL', 'Authorization', 'lxml'],
|
||||
'excludes' : ['IPython'],
|
||||
'plist' : { 'CFBundleGetInfoString' : '''calibre, an E-book management application.'''
|
||||
|
||||
@ -6,13 +6,13 @@ __docformat__ = 'restructuredtext en'
|
||||
'''
|
||||
Freeze app into executable using py2exe.
|
||||
'''
|
||||
QT_DIR = 'C:\\Qt\\4.4.1'
|
||||
QT_DIR = 'C:\\Qt\\4.4.3'
|
||||
LIBUSB_DIR = 'C:\\libusb'
|
||||
LIBUNRAR = 'C:\\Program Files\\UnrarDLL\\unrar.dll'
|
||||
PDFTOHTML = 'C:\\pdftohtml\\pdftohtml.exe'
|
||||
IMAGEMAGICK_DIR = 'C:\\ImageMagick'
|
||||
FONTCONFIG_DIR = 'C:\\fontconfig'
|
||||
|
||||
VC90 = r'C:\Program Files\Microsoft Visual Studio 9.0\VC\redist\x86\Microsoft.VC90.CRT'
|
||||
|
||||
import sys, os, py2exe, shutil, zipfile, glob, subprocess, re
|
||||
from distutils.core import setup
|
||||
@ -65,6 +65,8 @@ class BuildEXE(py2exe.build_exe.py2exe):
|
||||
shutil.copyfile(f, os.path.join(self.dist_dir, os.path.basename(f)))
|
||||
for f in glob.glob(os.path.join(BASE_DIR, 'src', 'calibre', 'plugins', '*.pyd')):
|
||||
shutil.copyfile(f, os.path.join(tgt, os.path.basename(f)))
|
||||
for f in glob.glob(os.path.join(BASE_DIR, 'src', 'calibre', 'plugins', '*.manifest')):
|
||||
shutil.copyfile(f, os.path.join(tgt, os.path.basename(f)))
|
||||
shutil.copyfile('LICENSE', os.path.join(self.dist_dir, 'LICENSE'))
|
||||
print
|
||||
print 'Adding QtXml4.dll'
|
||||
@ -115,12 +117,17 @@ class BuildEXE(py2exe.build_exe.py2exe):
|
||||
shutil.copytree(f, tgt)
|
||||
else:
|
||||
shutil.copyfile(f, tgt)
|
||||
|
||||
|
||||
print
|
||||
print 'Doing DLL redirection' # See http://msdn.microsoft.com/en-us/library/ms682600(VS.85).aspx
|
||||
for f in glob.glob(os.path.join(PY2EXE_DIR, '*.exe')):
|
||||
open(f + '.local', 'w').write('\n')
|
||||
|
||||
print
|
||||
print 'Adding Windows runtime dependencies...'
|
||||
for f in glob.glob(os.path.join(VC90, '*')):
|
||||
shutil.copyfile(f, os.path.join(PY2EXE_DIR, os.path.basename(f)))
|
||||
|
||||
|
||||
@classmethod
|
||||
def manifest(cls, prog):
|
||||
@ -142,17 +149,17 @@ def main(args=sys.argv):
|
||||
{'script' : scripts['gui'][0],
|
||||
'dest_base' : APPNAME,
|
||||
'icon_resources' : [(1, ICONS[0])],
|
||||
'other_resources' : [BuildEXE.manifest(APPNAME)],
|
||||
#'other_resources' : [BuildEXE.manifest(APPNAME)],
|
||||
},
|
||||
{'script' : scripts['gui'][1],
|
||||
'dest_base' : 'lrfviewer',
|
||||
'icon_resources' : [(1, ICONS[1])],
|
||||
'other_resources' : [BuildEXE.manifest('lrfviewer')],
|
||||
#'other_resources' : [BuildEXE.manifest('lrfviewer')],
|
||||
},
|
||||
{'script' : scripts['gui'][2],
|
||||
'dest_base' : 'ebook-viewer',
|
||||
'icon_resources' : [(1, ICONS[1])],
|
||||
'other_resources' : [BuildEXE.manifest('ebook-viewer')],
|
||||
#'other_resources' : [BuildEXE.manifest('ebook-viewer')],
|
||||
},
|
||||
],
|
||||
console = console,
|
||||
@ -162,12 +169,12 @@ def main(args=sys.argv):
|
||||
'includes' : [
|
||||
'sip', 'pkg_resources', 'PyQt4.QtSvg',
|
||||
'mechanize', 'ClientForm', 'wmi',
|
||||
'win32file', 'pythoncom', 'rtf2xml',
|
||||
'win32file', 'pythoncom',
|
||||
'win32process', 'win32api', 'msvcrt',
|
||||
'win32event', 'calibre.ebooks.lrf.any.*',
|
||||
'calibre.ebooks.lrf.feeds.*',
|
||||
'genshi', 'BeautifulSoup',
|
||||
'path', 'pydoc', 'IPython.Extensions.*',
|
||||
'BeautifulSoup', 'pyreadline',
|
||||
'pydoc', 'IPython.Extensions.*',
|
||||
'calibre.web.feeds.recipes.*',
|
||||
'PyQt4.QtWebKit', 'PyQt4.QtNetwork',
|
||||
],
|
||||
|
||||
@ -6,7 +6,7 @@ __docformat__ = 'restructuredtext en'
|
||||
'''
|
||||
Build PyQt extensions. Integrates with distutils (but uses the PyQt build system).
|
||||
'''
|
||||
from distutils.core import Extension
|
||||
from distutils.core import Extension as _Extension
|
||||
from distutils.command.build_ext import build_ext as _build_ext
|
||||
from distutils.dep_util import newer_group
|
||||
from distutils import log
|
||||
@ -15,12 +15,23 @@ import sipconfig, os, sys, string, glob, shutil
|
||||
from PyQt4 import pyqtconfig
|
||||
iswindows = 'win32' in sys.platform
|
||||
QMAKE = os.path.expanduser('~/qt/bin/qmake') if 'darwin' in sys.platform else'qmake'
|
||||
WINDOWS_PYTHON = ['C:/Python25/libs']
|
||||
WINDOWS_PYTHON = ['C:/Python26/libs']
|
||||
OSX_SDK = '/Developer/SDKs/MacOSX10.4u.sdk'
|
||||
|
||||
def replace_suffix(path, new_suffix):
|
||||
return os.path.splitext(path)[0] + new_suffix
|
||||
|
||||
class Extension(_Extension):
|
||||
pass
|
||||
|
||||
if iswindows:
|
||||
from distutils import msvc9compiler
|
||||
msvc = msvc9compiler.MSVCCompiler()
|
||||
msvc.initialize()
|
||||
nmake = msvc.find_exe('nmake.exe')
|
||||
rc = msvc.find_exe('rc.exe')
|
||||
|
||||
|
||||
class PyQtExtension(Extension):
|
||||
|
||||
def __init__(self, name, sources, sip_sources, **kw):
|
||||
@ -37,9 +48,7 @@ class PyQtExtension(Extension):
|
||||
class build_ext(_build_ext):
|
||||
|
||||
def make(self, makefile):
|
||||
make = 'make'
|
||||
if iswindows:
|
||||
make = 'mingw32-make'
|
||||
make = nmake if iswindows else 'make'
|
||||
self.spawn([make, '-f', makefile])
|
||||
|
||||
def build_qt_objects(self, ext, bdir):
|
||||
@ -65,12 +74,13 @@ CONFIG += x86 ppc
|
||||
open(name+'.pro', 'wb').write(pro)
|
||||
self.spawn([QMAKE, '-o', 'Makefile.qt', name+'.pro'])
|
||||
self.make('Makefile.qt')
|
||||
pat = 'release\\*.o' if iswindows else '*.o'
|
||||
pat = 'release\\*.obj' if iswindows else '*.o'
|
||||
return map(os.path.abspath, glob.glob(pat))
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
|
||||
def build_sbf(self, sip, sbf, bdir):
|
||||
print '\tBuilding spf...'
|
||||
sip_bin = self.sipcfg.sip_bin
|
||||
self.spawn([sip_bin,
|
||||
"-c", bdir,
|
||||
@ -100,9 +110,7 @@ CONFIG += x86 ppc
|
||||
|
||||
def build_extension(self, ext):
|
||||
self.inplace = True # Causes extensions to be built in the source tree
|
||||
if not isinstance(ext, PyQtExtension):
|
||||
return _build_ext.build_extension(self, ext)
|
||||
|
||||
|
||||
fullname = self.get_ext_fullname(ext.name)
|
||||
if self.inplace:
|
||||
# ignore build-lib -- put the compiled extension into
|
||||
@ -119,7 +127,38 @@ CONFIG += x86 ppc
|
||||
else:
|
||||
ext_filename = os.path.join(self.build_lib,
|
||||
self.get_ext_filename(fullname))
|
||||
bdir = os.path.abspath(os.path.join(self.build_temp, fullname))
|
||||
bdir = os.path.abspath(os.path.join(self.build_temp, fullname))
|
||||
if not os.path.exists(bdir):
|
||||
os.makedirs(bdir)
|
||||
|
||||
if not isinstance(ext, PyQtExtension):
|
||||
if not iswindows:
|
||||
return _build_ext.build_extension(self, ext)
|
||||
|
||||
c_sources = [f for f in ext.sources if os.path.splitext(f)[1].lower() in ('.c', '.cpp', '.cxx')]
|
||||
compile_args = '/c /nologo /Ox /MD /W3 /GX /DNDEBUG'.split()
|
||||
compile_args += ext.extra_compile_args
|
||||
self.swig_opts = ''
|
||||
inc_dirs = self.include_dirs + [x.replace('/', '\\') for x in ext.include_dirs]
|
||||
cc = [msvc.cc] + compile_args + ['-I%s'%x for x in list(set(inc_dirs))]
|
||||
objects = []
|
||||
for f in c_sources:
|
||||
o = os.path.join(bdir, os.path.basename(f)+'.obj')
|
||||
objects.append(o)
|
||||
compiler = cc + ['/Tc'+f, '/Fo'+o]
|
||||
self.spawn(compiler)
|
||||
out = os.path.join(bdir, base+'.pyd')
|
||||
linker = [msvc.linker] + '/DLL /nologo /INCREMENTAL:NO'.split()
|
||||
linker += ['/LIBPATH:'+x for x in self.library_dirs]
|
||||
linker += [x+'.lib' for x in ext.libraries]
|
||||
linker += ['/EXPORT:init'+base] + objects + ['/OUT:'+out]
|
||||
self.spawn(linker)
|
||||
for src in (out, out+'.manifest'):
|
||||
shutil.copyfile(src, os.path.join('src', 'calibre', 'plugins', os.path.basename(src)))
|
||||
return
|
||||
|
||||
|
||||
|
||||
if not os.path.exists(bdir):
|
||||
os.makedirs(bdir)
|
||||
ext.sources2 = map(os.path.abspath, ext.sources)
|
||||
|
||||
81
setup.py
81
setup.py
@ -46,10 +46,11 @@ main_functions = {
|
||||
}
|
||||
|
||||
if __name__ == '__main__':
|
||||
from setuptools import setup, find_packages, Extension
|
||||
from setuptools import setup, find_packages
|
||||
from setuptools.command.build_py import build_py as _build_py, convert_path
|
||||
from distutils.command.build import build as _build
|
||||
from distutils.core import Command as _Command
|
||||
from pyqtdistutils import PyQtExtension, build_ext
|
||||
from pyqtdistutils import PyQtExtension, build_ext, Extension
|
||||
import subprocess, glob
|
||||
|
||||
def newer(targets, sources):
|
||||
@ -65,6 +66,25 @@ if __name__ == '__main__':
|
||||
newest_source, oldest_target = max(stimes), min(ttimes)
|
||||
return newest_source > oldest_target
|
||||
|
||||
class build_py(_build_py):
|
||||
|
||||
def find_data_files(self, package, src_dir):
|
||||
"""
|
||||
Return filenames for package's data files in 'src_dir'
|
||||
Modified to treat data file specs as paths not globs
|
||||
"""
|
||||
globs = (self.package_data.get('', [])
|
||||
+ self.package_data.get(package, []))
|
||||
files = self.manifest_files.get(package, [])[:]
|
||||
for pattern in globs:
|
||||
# Each pattern has to be converted to a platform-specific path
|
||||
pattern = os.path.join(src_dir, convert_path(pattern))
|
||||
next = glob.glob(pattern)
|
||||
files.extend(next if next else [pattern])
|
||||
|
||||
return self.exclude_data_files(package, src_dir, files)
|
||||
|
||||
|
||||
class Command(_Command):
|
||||
user_options = []
|
||||
def initialize_options(self): pass
|
||||
@ -146,6 +166,7 @@ if __name__ == '__main__':
|
||||
metadata_sqlite = 'library/metadata_sqlite.sql',
|
||||
jquery = 'gui2/viewer/jquery.js',
|
||||
jquery_scrollTo = 'gui2/viewer/jquery_scrollTo.js',
|
||||
html_css = 'ebooks/lit/html.css',
|
||||
)
|
||||
|
||||
DEST = os.path.join('src', APPNAME, 'resources.py')
|
||||
@ -251,6 +272,7 @@ if __name__ == '__main__':
|
||||
description='''Compile all GUI forms and images'''
|
||||
PATH = os.path.join('src', APPNAME, 'gui2')
|
||||
IMAGES_DEST = os.path.join(PATH, 'images_rc.py')
|
||||
QRC = os.path.join(PATH, 'images.qrc')
|
||||
|
||||
@classmethod
|
||||
def find_forms(cls):
|
||||
@ -330,9 +352,9 @@ if __name__ == '__main__':
|
||||
c = cls.form_to_compiled_form(form)
|
||||
if os.path.exists(c):
|
||||
os.remove(c)
|
||||
images = cls.IMAGES_DEST
|
||||
if os.path.exists(images):
|
||||
os.remove(images)
|
||||
for x in (cls.IMAGES_DEST, cls.QRC):
|
||||
if os.path.exists(x):
|
||||
os.remove(x)
|
||||
|
||||
class clean(Command):
|
||||
description='''Delete all computer generated files in the source tree'''
|
||||
@ -348,32 +370,34 @@ if __name__ == '__main__':
|
||||
os.remove(f)
|
||||
for root, dirs, files in os.walk('.'):
|
||||
for name in files:
|
||||
if name.endswith('~') or \
|
||||
name.endswith('.pyc') or \
|
||||
name.endswith('.pyo'):
|
||||
os.remove(os.path.join(root, name))
|
||||
for t in ('.pyc', '.pyo', '~'):
|
||||
if name.endswith(t):
|
||||
os.remove(os.path.join(root, name))
|
||||
break
|
||||
|
||||
for dir in 'build', 'dist':
|
||||
for f in os.listdir(dir):
|
||||
if os.path.isdir(dir + os.sep + f):
|
||||
shutil.rmtree(dir + os.sep + f)
|
||||
else:
|
||||
os.remove(dir + os.sep + f)
|
||||
for dir in ('build', 'dist', os.path.join('src', 'calibre.egg-info')):
|
||||
shutil.rmtree(dir, ignore_errors=True)
|
||||
|
||||
class build(_build):
|
||||
|
||||
sub_commands = \
|
||||
[
|
||||
sub_commands = [
|
||||
('resources', lambda self : 'CALIBRE_BUILDBOT' not in os.environ.keys()),
|
||||
('translations', lambda self : 'CALIBRE_BUILDBOT' not in os.environ.keys()),
|
||||
('gui', lambda self : 'CALIBRE_BUILDBOT' not in os.environ.keys()),
|
||||
] + _build.sub_commands
|
||||
|
||||
('build_ext', lambda self: True),
|
||||
('build_py', lambda self: True),
|
||||
('build_clib', _build.has_c_libraries),
|
||||
('build_scripts', _build.has_scripts),
|
||||
]
|
||||
|
||||
entry_points['console_scripts'].append('calibre_postinstall = calibre.linux:post_install')
|
||||
ext_modules = [
|
||||
Extension('calibre.plugins.lzx',
|
||||
sources=['src/calibre/utils/lzx/lzxmodule.c',
|
||||
'src/calibre/utils/lzx/lzxd.c'],
|
||||
'src/calibre/utils/lzx/compressor.c',
|
||||
'src/calibre/utils/lzx/lzxd.c',
|
||||
'src/calibre/utils/lzx/lzc.c',
|
||||
'src/calibre/utils/lzx/lzxc.c'],
|
||||
include_dirs=['src/calibre/utils/lzx']),
|
||||
|
||||
Extension('calibre.plugins.msdes',
|
||||
@ -391,13 +415,18 @@ if __name__ == '__main__':
|
||||
ext_modules.append(Extension('calibre.plugins.winutil',
|
||||
sources=['src/calibre/utils/windows/winutil.c'],
|
||||
libraries=['shell32', 'setupapi'],
|
||||
include_dirs=['C:/WinDDK/6001.18001/inc/api/'])
|
||||
)
|
||||
include_dirs=['C:/WinDDK/6001.18001/inc/api/',
|
||||
'C:/WinDDK/6001.18001/inc/crt/'],
|
||||
extra_compile_args=['/X']
|
||||
))
|
||||
if isosx:
|
||||
ext_modules.append(Extension('calibre.plugins.usbobserver',
|
||||
sources=['src/calibre/devices/usbobserver/usbobserver.c'])
|
||||
sources=['src/calibre/devices/usbobserver/usbobserver.c'],
|
||||
extra_link_args=['-framework', 'IOKit'])
|
||||
)
|
||||
|
||||
plugins = ['plugins/%s.so'%(x.name.rpartition('.')[-1]) for x in ext_modules]
|
||||
|
||||
setup(
|
||||
name = APPNAME,
|
||||
packages = find_packages('src'),
|
||||
@ -406,8 +435,7 @@ if __name__ == '__main__':
|
||||
author = 'Kovid Goyal',
|
||||
author_email = 'kovid@kovidgoyal.net',
|
||||
url = 'http://%s.kovidgoyal.net'%APPNAME,
|
||||
package_data = {'calibre':['plugins/*']},
|
||||
include_package_data = True,
|
||||
package_data = {'calibre':plugins},
|
||||
entry_points = entry_points,
|
||||
zip_safe = False,
|
||||
options = { 'bdist_egg' : {'exclude_source_files': True,}, },
|
||||
@ -448,7 +476,8 @@ if __name__ == '__main__':
|
||||
],
|
||||
cmdclass = {
|
||||
'build_ext' : build_ext,
|
||||
'build' : build,
|
||||
'build' : build,
|
||||
'build_py' : build_py,
|
||||
'pot' : pot,
|
||||
'manual' : manual,
|
||||
'resources' : resources,
|
||||
|
||||
@ -13,13 +13,19 @@ from calibre.startup import plugins, winutil, winutilerror
|
||||
from calibre.constants import iswindows, isosx, islinux, isfrozen, \
|
||||
terminal_controller, preferred_encoding, \
|
||||
__appname__, __version__, __author__, \
|
||||
win32event, win32api, winerror, fcntl
|
||||
win32event, win32api, winerror, fcntl, \
|
||||
filesystem_encoding
|
||||
import mechanize
|
||||
|
||||
mimetypes.add_type('application/epub+zip', '.epub')
|
||||
mimetypes.add_type('text/x-sony-bbeb+xml', '.lrs')
|
||||
mimetypes.add_type('application/x-sony-bbeb', '.lrf')
|
||||
|
||||
def to_unicode(raw, encoding='utf-8', errors='strict'):
|
||||
if isinstance(raw, unicode):
|
||||
return raw
|
||||
return raw.decode(encoding, errors)
|
||||
|
||||
def unicode_path(path, abs=False):
|
||||
if not isinstance(path, unicode):
|
||||
path = path.decode(sys.getfilesystemencoding())
|
||||
@ -36,6 +42,28 @@ def osx_version():
|
||||
return int(m.group(1)), int(m.group(2)), int(m.group(3))
|
||||
|
||||
|
||||
_filename_sanitize = re.compile(r'[\xae\0\\|\?\*<":>\+\[\]/]')
|
||||
|
||||
def sanitize_file_name(name, substitute='_', as_unicode=False):
|
||||
'''
|
||||
Sanitize the filename `name`. All invalid characters are replaced by `substitute`.
|
||||
The set of invalid characters is the union of the invalid characters in Windows,
|
||||
OS X and Linux. Also removes leading an trailing whitespace.
|
||||
**WARNING:** This function also replaces path separators, so only pass file names
|
||||
and not full paths to it.
|
||||
*NOTE:* This function always returns byte strings, not unicode objects. The byte strings
|
||||
are encoded in the filesystem encoding of the platform, or UTF-8.
|
||||
'''
|
||||
if isinstance(name, unicode):
|
||||
name = name.encode(filesystem_encoding, 'ignore')
|
||||
one = _filename_sanitize.sub(substitute, name)
|
||||
one = re.sub(r'\s', ' ', one).strip()
|
||||
one = re.sub(r'^\.+$', '_', one)
|
||||
if as_unicode:
|
||||
one = one.decode(filesystem_encoding)
|
||||
return one
|
||||
|
||||
|
||||
class CommandLineError(Exception):
|
||||
pass
|
||||
|
||||
@ -196,13 +224,6 @@ class CurrentDir(object):
|
||||
def __exit__(self, *args):
|
||||
os.chdir(self.cwd)
|
||||
|
||||
def sanitize_file_name(name):
|
||||
'''
|
||||
Remove characters that are illegal in filenames from name.
|
||||
Also remove path separators. All illegal characters are replaced by
|
||||
underscores.
|
||||
'''
|
||||
return re.sub(r'\s', ' ', re.sub(r'[\xae"\'\|\~\:\?\\\/]|^-', '_', name.strip()))
|
||||
|
||||
def detect_ncpus():
|
||||
"""Detects the number of effective CPUs in the system"""
|
||||
@ -317,7 +338,12 @@ class LoggingInterface:
|
||||
def ___log(self, func, msg, args, kwargs):
|
||||
args = [msg] + list(args)
|
||||
for i in range(len(args)):
|
||||
if isinstance(args[i], unicode):
|
||||
if not isinstance(args[i], basestring):
|
||||
continue
|
||||
if sys.version_info[:2] > (2, 5):
|
||||
if not isinstance(args[i], unicode):
|
||||
args[i] = args[i].decode(preferred_encoding, 'replace')
|
||||
elif isinstance(args[i], unicode):
|
||||
args[i] = args[i].encode(preferred_encoding, 'replace')
|
||||
func(*args, **kwargs)
|
||||
|
||||
@ -355,7 +381,13 @@ def strftime(fmt, t=time.localtime()):
|
||||
fmt = fmt.encode('mbcs')
|
||||
return plugins['winutil'][0].strftime(fmt, t)
|
||||
return time.strftime(fmt, t).decode(preferred_encoding, 'replace')
|
||||
|
||||
|
||||
def my_unichr(num):
|
||||
try:
|
||||
return unichr(num)
|
||||
except ValueError:
|
||||
return u'?'
|
||||
|
||||
def entity_to_unicode(match, exceptions=[], encoding='cp1252'):
|
||||
'''
|
||||
@param match: A match object such that '&'+match.group(1)';' is the entity.
|
||||
@ -371,7 +403,7 @@ def entity_to_unicode(match, exceptions=[], encoding='cp1252'):
|
||||
if ent.startswith(u'#x'):
|
||||
num = int(ent[2:], 16)
|
||||
if encoding is None or num > 255:
|
||||
return unichr(num)
|
||||
return my_unichr(num)
|
||||
return chr(num).decode(encoding)
|
||||
if ent.startswith(u'#'):
|
||||
try:
|
||||
@ -379,13 +411,13 @@ def entity_to_unicode(match, exceptions=[], encoding='cp1252'):
|
||||
except ValueError:
|
||||
return '&'+ent+';'
|
||||
if encoding is None or num > 255:
|
||||
return unichr(num)
|
||||
return my_unichr(num)
|
||||
try:
|
||||
return chr(num).decode(encoding)
|
||||
except UnicodeDecodeError:
|
||||
return unichr(num)
|
||||
return my_unichr(num)
|
||||
try:
|
||||
return unichr(name2codepoint[ent])
|
||||
return my_unichr(name2codepoint[ent])
|
||||
except KeyError:
|
||||
return '&'+ent+';'
|
||||
|
||||
|
||||
@ -2,7 +2,7 @@ __license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
__appname__ = 'calibre'
|
||||
__version__ = '0.4.109'
|
||||
__version__ = '0.4.125'
|
||||
__author__ = "Kovid Goyal <kovid@kovidgoyal.net>"
|
||||
'''
|
||||
Various run time constants.
|
||||
@ -29,6 +29,10 @@ winerror = __import__('winerror') if iswindows else None
|
||||
win32api = __import__('win32api') if iswindows else None
|
||||
fcntl = None if iswindows else __import__('fcntl')
|
||||
|
||||
filesystem_encoding = sys.getfilesystemencoding()
|
||||
if filesystem_encoding is None: filesystem_encoding = 'utf-8'
|
||||
|
||||
|
||||
################################################################################
|
||||
plugins = None
|
||||
if plugins is None:
|
||||
|
||||
223
src/calibre/customize/__init__.py
Normal file
223
src/calibre/customize/__init__.py
Normal file
@ -0,0 +1,223 @@
|
||||
from __future__ import with_statement
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
|
||||
import sys
|
||||
|
||||
from calibre.ptempfile import PersistentTemporaryFile
|
||||
from calibre.constants import __version__, __author__
|
||||
|
||||
class Plugin(object):
|
||||
'''
|
||||
A calibre plugin. Useful members include:
|
||||
|
||||
* ``self.plugin_path``: Stores path to the zip file that contains
|
||||
this plugin or None if it is a builtin
|
||||
plugin
|
||||
* ``self.site_customization``: Stores a customization string entered
|
||||
by the user.
|
||||
|
||||
Methods that should be overridden in sub classes:
|
||||
|
||||
* :meth:`initialize`
|
||||
* :meth:`customization_help`
|
||||
|
||||
Useful methods:
|
||||
|
||||
* :meth:`temporary_file`
|
||||
|
||||
'''
|
||||
#: List of platforms this plugin works on
|
||||
#: For example: ``['windows', 'osx', 'linux']
|
||||
supported_platforms = []
|
||||
|
||||
#: The name of this plugin
|
||||
name = 'Trivial Plugin'
|
||||
|
||||
#: The version of this plugin as a 3-tuple (major, minor, revision)
|
||||
version = (1, 0, 0)
|
||||
|
||||
#: A short string describing what this plugin does
|
||||
description = _('Does absolutely nothing')
|
||||
|
||||
#: The author of this plugin
|
||||
author = _('Unknown')
|
||||
|
||||
#: When more than one plugin exists for a filetype,
|
||||
#: the plugins are run in order of decreasing priority
|
||||
#: i.e. plugins with higher priority will be run first.
|
||||
#: The highest possible priority is ``sys.maxint``.
|
||||
#: Default pririty is 1.
|
||||
priority = 1
|
||||
|
||||
#: The earliest version of calibre this plugin requires
|
||||
minimum_calibre_version = (0, 4, 118)
|
||||
|
||||
#: If False, the user will not be able to disable this plugin. Use with
|
||||
#: care.
|
||||
can_be_disabled = True
|
||||
|
||||
#: The type of this plugin. Used for categorizing plugins in the
|
||||
#: GUI
|
||||
type = _('Base')
|
||||
|
||||
def __init__(self, plugin_path):
|
||||
self.plugin_path = plugin_path
|
||||
self.site_customization = None
|
||||
|
||||
def initialize(self):
|
||||
'''
|
||||
Called once when calibre plugins are initialized. Plugins are re-initialized
|
||||
every time a new plugin is added.
|
||||
|
||||
Perform any plugin specific initialization here, such as extracting
|
||||
resources from the plugin zip file. The path to the zip file is
|
||||
available as ``self.plugin_path``.
|
||||
|
||||
Note that ``self.site_customization`` is **not** available at this point.
|
||||
'''
|
||||
pass
|
||||
|
||||
def customization_help(self, gui=False):
|
||||
'''
|
||||
Return a string giving help on how to customize this plugin.
|
||||
By default raise a :class:`NotImplementedError`, which indicates that
|
||||
the plugin does not require customization.
|
||||
|
||||
If you re-implement this method in your subclass, the user will
|
||||
be asked to enter a string as customization for this plugin.
|
||||
The customization string will be available as
|
||||
``self.site_customization``.
|
||||
|
||||
Site customization could be anything, for example, the path to
|
||||
a needed binary on the user's computer.
|
||||
|
||||
:param gui: If True return HTML help, otherwise return plain text help.
|
||||
|
||||
'''
|
||||
raise NotImplementedError
|
||||
|
||||
def temporary_file(self, suffix):
|
||||
'''
|
||||
Return a file-like object that is a temporary file on the file system.
|
||||
This file will remain available even after being closed and will only
|
||||
be removed on interpreter shutdown. Use the ``name`` member of the
|
||||
returned object to access the full path to the created temporary file.
|
||||
|
||||
:param suffix: The suffix that the temporary file will have.
|
||||
'''
|
||||
return PersistentTemporaryFile(suffix)
|
||||
|
||||
def is_customizable(self):
|
||||
try:
|
||||
self.customization_help()
|
||||
return True
|
||||
except NotImplementedError:
|
||||
return False
|
||||
|
||||
def __enter__(self, *args):
|
||||
if self.plugin_path is not None:
|
||||
sys.path.insert(0, self.plugin_path)
|
||||
|
||||
def __exit__(self, *args):
|
||||
if self.plugin_path in sys.path:
|
||||
sys.path.remove(self.plugin_path)
|
||||
|
||||
|
||||
class FileTypePlugin(Plugin):
|
||||
'''
|
||||
A plugin that is associated with a particular set of file types.
|
||||
'''
|
||||
|
||||
#: Set of file types for which this plugin should be run
|
||||
#: For example: ``set(['lit', 'mobi', 'prc'])``
|
||||
file_types = set([])
|
||||
|
||||
#: If True, this plugin is run when books are added
|
||||
#: to the database
|
||||
on_import = False
|
||||
|
||||
#: If True, this plugin is run whenever an any2* tool
|
||||
#: is used, on the file passed to the any2* tool.
|
||||
on_preprocess = False
|
||||
|
||||
#: If True, this plugin is run after an any2* tool is
|
||||
#: used, on the final file produced by the tool.
|
||||
on_postprocess = False
|
||||
|
||||
type = _('File type')
|
||||
|
||||
def run(self, path_to_ebook):
|
||||
'''
|
||||
Run the plugin. Must be implemented in subclasses.
|
||||
It should perform whatever modifications are required
|
||||
on the ebook and return the absolute path to the
|
||||
modified ebook. If no modifications are needed, it should
|
||||
return the path to the original ebook. If an error is encountered
|
||||
it should raise an Exception. The default implementation
|
||||
simply return the path to the original ebook.
|
||||
|
||||
The modified ebook file should be created with the
|
||||
:meth:`temporary_file` method.
|
||||
|
||||
:param path_to_ebook: Absolute path to the ebook.
|
||||
|
||||
:return: Absolute path to the modified ebook.
|
||||
'''
|
||||
# Default implementation does nothing
|
||||
return path_to_ebook
|
||||
|
||||
class MetadataReaderPlugin(Plugin):
|
||||
'''
|
||||
A plugin that implements reading metadata from a set of file types.
|
||||
'''
|
||||
#: Set of file types for which this plugin should be run
|
||||
#: For example: ``set(['lit', 'mobi', 'prc'])``
|
||||
file_types = set([])
|
||||
|
||||
supported_platforms = ['windows', 'osx', 'linux']
|
||||
version = tuple(map(int, (__version__.split('.'))[:3]))
|
||||
author = 'Kovid Goyal'
|
||||
|
||||
type = _('Metadata reader')
|
||||
|
||||
def get_metadata(self, stream, type):
|
||||
'''
|
||||
Return metadata for the file represented by stream (a file like object
|
||||
that supports reading). Raise an exception when there is an error
|
||||
with the input data.
|
||||
|
||||
:param type: The type of file. Guaranteed to be one of the entries
|
||||
in :attr:`file_types`.
|
||||
|
||||
:return: A :class:`calibre.ebooks.metadata.MetaInformation` object
|
||||
'''
|
||||
return None
|
||||
|
||||
class MetadataWriterPlugin(Plugin):
|
||||
'''
|
||||
A plugin that implements reading metadata from a set of file types.
|
||||
'''
|
||||
#: Set of file types for which this plugin should be run
|
||||
#: For example: ``set(['lit', 'mobi', 'prc'])``
|
||||
file_types = set([])
|
||||
|
||||
supported_platforms = ['windows', 'osx', 'linux']
|
||||
version = tuple(map(int, (__version__.split('.'))[:3]))
|
||||
author = 'Kovid Goyal'
|
||||
|
||||
type = _('Metadata writer')
|
||||
|
||||
def set_metadata(self, stream, mi, type):
|
||||
'''
|
||||
Set metadata for the file represented by stream (a file like object
|
||||
that supports reading). Raise an exception when there is an error
|
||||
with the input data.
|
||||
|
||||
:param type: The type of file. Guaranteed to be one of the entries
|
||||
in :attr:`file_types`.
|
||||
:param mi: A :class:`calibre.ebooks.metadata.MetaInformation` object
|
||||
|
||||
'''
|
||||
pass
|
||||
|
||||
205
src/calibre/customize/builtins.py
Normal file
205
src/calibre/customize/builtins.py
Normal file
@ -0,0 +1,205 @@
|
||||
from __future__ import with_statement
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
|
||||
import textwrap, os
|
||||
from calibre.customize import FileTypePlugin, MetadataReaderPlugin, MetadataWriterPlugin
|
||||
from calibre.constants import __version__
|
||||
|
||||
class HTML2ZIP(FileTypePlugin):
|
||||
name = 'HTML to ZIP'
|
||||
author = 'Kovid Goyal'
|
||||
description = textwrap.dedent(_('''\
|
||||
Follow all local links in an HTML file and create a ZIP \
|
||||
file containing all linked files. This plugin is run \
|
||||
every time you add an HTML file to the library.\
|
||||
'''))
|
||||
version = tuple(map(int, (__version__.split('.'))[:3]))
|
||||
file_types = set(['html', 'htm', 'xhtml', 'xhtm'])
|
||||
supported_platforms = ['windows', 'osx', 'linux']
|
||||
on_import = True
|
||||
|
||||
def run(self, htmlfile):
|
||||
of = self.temporary_file('_plugin_html2zip.zip')
|
||||
from calibre.ebooks.html import gui_main as html2oeb
|
||||
html2oeb(htmlfile, of)
|
||||
return of.name
|
||||
|
||||
class RTFMetadataReader(MetadataReaderPlugin):
|
||||
|
||||
name = 'Read RTF metadata'
|
||||
file_types = set(['rtf'])
|
||||
description = _('Read metadata from %s files')%'RTF'
|
||||
|
||||
def get_metadata(self, stream, ftype):
|
||||
from calibre.ebooks.metadata.rtf import get_metadata
|
||||
return get_metadata(stream)
|
||||
|
||||
class FB2MetadataReader(MetadataReaderPlugin):
|
||||
|
||||
name = 'Read FB2 metadata'
|
||||
file_types = set(['fb2'])
|
||||
description = _('Read metadata from %s files')%'FB2'
|
||||
|
||||
def get_metadata(self, stream, ftype):
|
||||
from calibre.ebooks.metadata.fb2 import get_metadata
|
||||
return get_metadata(stream)
|
||||
|
||||
|
||||
class LRFMetadataReader(MetadataReaderPlugin):
|
||||
|
||||
name = 'Read LRF metadata'
|
||||
file_types = set(['lrf'])
|
||||
description = _('Read metadata from %s files')%'LRF'
|
||||
|
||||
def get_metadata(self, stream, ftype):
|
||||
from calibre.ebooks.lrf.meta import get_metadata
|
||||
return get_metadata(stream)
|
||||
|
||||
class PDFMetadataReader(MetadataReaderPlugin):
|
||||
|
||||
name = 'Read PDF metadata'
|
||||
file_types = set(['pdf'])
|
||||
description = _('Read metadata from %s files')%'PDF'
|
||||
|
||||
def get_metadata(self, stream, ftype):
|
||||
from calibre.ebooks.metadata.pdf import get_metadata
|
||||
return get_metadata(stream)
|
||||
|
||||
class LITMetadataReader(MetadataReaderPlugin):
|
||||
|
||||
name = 'Read LIT metadata'
|
||||
file_types = set(['lit'])
|
||||
description = _('Read metadata from %s files')%'LIT'
|
||||
|
||||
def get_metadata(self, stream, ftype):
|
||||
from calibre.ebooks.metadata.lit import get_metadata
|
||||
return get_metadata(stream)
|
||||
|
||||
class IMPMetadataReader(MetadataReaderPlugin):
|
||||
|
||||
name = 'Read IMP metadata'
|
||||
file_types = set(['imp'])
|
||||
description = _('Read metadata from %s files')%'IMP'
|
||||
author = 'Ashish Kulkarni'
|
||||
|
||||
def get_metadata(self, stream, ftype):
|
||||
from calibre.ebooks.metadata.imp import get_metadata
|
||||
return get_metadata(stream)
|
||||
|
||||
class RBMetadataReader(MetadataReaderPlugin):
|
||||
|
||||
name = 'Read RB metadata'
|
||||
file_types = set(['rb'])
|
||||
description = _('Read metadata from %s files')%'RB'
|
||||
author = 'Ashish Kulkarni'
|
||||
|
||||
def get_metadata(self, stream, ftype):
|
||||
from calibre.ebooks.metadata.rb import get_metadata
|
||||
return get_metadata(stream)
|
||||
|
||||
class EPUBMetadataReader(MetadataReaderPlugin):
|
||||
|
||||
name = 'Read EPUB metadata'
|
||||
file_types = set(['epub'])
|
||||
description = _('Read metadata from %s files')%'EPUB'
|
||||
|
||||
def get_metadata(self, stream, ftype):
|
||||
from calibre.ebooks.metadata.epub import get_metadata
|
||||
return get_metadata(stream)
|
||||
|
||||
class HTMLMetadataReader(MetadataReaderPlugin):
|
||||
|
||||
name = 'Read HTML metadata'
|
||||
file_types = set(['html'])
|
||||
description = _('Read metadata from %s files')%'HTML'
|
||||
|
||||
def get_metadata(self, stream, ftype):
|
||||
from calibre.ebooks.metadata.html import get_metadata
|
||||
return get_metadata(stream)
|
||||
|
||||
class MOBIMetadataReader(MetadataReaderPlugin):
|
||||
|
||||
name = 'Read MOBI metadata'
|
||||
file_types = set(['mobi'])
|
||||
description = _('Read metadata from %s files')%'MOBI'
|
||||
|
||||
def get_metadata(self, stream, ftype):
|
||||
from calibre.ebooks.mobi.reader import get_metadata
|
||||
return get_metadata(stream)
|
||||
|
||||
class ODTMetadataReader(MetadataReaderPlugin):
|
||||
|
||||
name = 'Read ODT metadata'
|
||||
file_types = set(['odt'])
|
||||
description = _('Read metadata from %s files')%'ODT'
|
||||
|
||||
def get_metadata(self, stream, ftype):
|
||||
from calibre.ebooks.metadata.odt import get_metadata
|
||||
return get_metadata(stream)
|
||||
|
||||
class LRXMetadataReader(MetadataReaderPlugin):
|
||||
|
||||
name = 'Read LRX metadata'
|
||||
file_types = set(['lrx'])
|
||||
description = _('Read metadata from %s files')%'LRX'
|
||||
|
||||
def get_metadata(self, stream, ftype):
|
||||
from calibre.ebooks.metadata.lrx import get_metadata
|
||||
return get_metadata(stream)
|
||||
|
||||
class ComicMetadataReader(MetadataReaderPlugin):
|
||||
|
||||
name = 'Read comic metadata'
|
||||
file_types = set(['cbr', 'cbz'])
|
||||
description = _('Extract cover from comic files')
|
||||
|
||||
def get_metadata(self, stream, ftype):
|
||||
if ftype == 'cbr':
|
||||
from calibre.libunrar import extract_member as extract_first
|
||||
else:
|
||||
from calibre.libunzip import extract_member as extract_first
|
||||
from calibre.ebooks.metadata import MetaInformation
|
||||
ret = extract_first(stream)
|
||||
mi = MetaInformation(None, None)
|
||||
if ret is not None:
|
||||
path, data = ret
|
||||
ext = os.path.splitext(path)[1][1:]
|
||||
mi.cover_data = (ext.lower(), data)
|
||||
return mi
|
||||
|
||||
class EPUBMetadataWriter(MetadataWriterPlugin):
|
||||
|
||||
name = 'Set EPUB metadata'
|
||||
file_types = set(['epub'])
|
||||
description = _('Set metadata in EPUB files')
|
||||
|
||||
def set_metadata(self, stream, mi, type):
|
||||
from calibre.ebooks.metadata.epub import set_metadata
|
||||
set_metadata(stream, mi)
|
||||
|
||||
class LRFMetadataWriter(MetadataWriterPlugin):
|
||||
|
||||
name = 'Set LRF metadata'
|
||||
file_types = set(['lrf'])
|
||||
description = _('Set metadata in LRF files')
|
||||
|
||||
def set_metadata(self, stream, mi, type):
|
||||
from calibre.ebooks.lrf.meta import set_metadata
|
||||
set_metadata(stream, mi)
|
||||
|
||||
class RTFMetadataWriter(MetadataWriterPlugin):
|
||||
|
||||
name = 'Set RTF metadata'
|
||||
file_types = set(['rtf'])
|
||||
description = _('Set metadata in RTF files')
|
||||
|
||||
def set_metadata(self, stream, mi, type):
|
||||
from calibre.ebooks.metadata.rtf import set_metadata
|
||||
set_metadata(stream, mi)
|
||||
|
||||
plugins = [HTML2ZIP]
|
||||
plugins += [x for x in list(locals().values()) if isinstance(x, type) and \
|
||||
x.__name__.endswith('MetadataReader')]
|
||||
plugins += [x for x in list(locals().values()) if isinstance(x, type) and \
|
||||
x.__name__.endswith('MetadataWriter')]
|
||||
320
src/calibre/customize/ui.py
Normal file
320
src/calibre/customize/ui.py
Normal file
@ -0,0 +1,320 @@
|
||||
from __future__ import with_statement
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
|
||||
import os, shutil, traceback, functools, sys
|
||||
|
||||
from calibre.customize import Plugin, FileTypePlugin, MetadataReaderPlugin, \
|
||||
MetadataWriterPlugin
|
||||
from calibre.customize.builtins import plugins as builtin_plugins
|
||||
from calibre.constants import __version__, iswindows, isosx
|
||||
from calibre.ebooks.metadata import MetaInformation
|
||||
from calibre.utils.config import make_config_dir, Config, ConfigProxy, \
|
||||
plugin_dir, OptionParser
|
||||
|
||||
|
||||
version = tuple([int(x) for x in __version__.split('.')])
|
||||
|
||||
platform = 'linux'
|
||||
if iswindows:
|
||||
platform = 'windows'
|
||||
if isosx:
|
||||
platform = 'osx'
|
||||
|
||||
from zipfile import ZipFile
|
||||
|
||||
def _config():
|
||||
c = Config('customize')
|
||||
c.add_opt('plugins', default={}, help=_('Installed plugins'))
|
||||
c.add_opt('filetype_mapping', default={}, help=_('Mapping for filetype plugins'))
|
||||
c.add_opt('plugin_customization', default={}, help=_('Local plugin customization'))
|
||||
c.add_opt('disabled_plugins', default=set([]), help=_('Disabled plugins'))
|
||||
|
||||
return ConfigProxy(c)
|
||||
|
||||
config = _config()
|
||||
|
||||
|
||||
class InvalidPlugin(ValueError):
|
||||
pass
|
||||
|
||||
class PluginNotFound(ValueError):
|
||||
pass
|
||||
|
||||
def load_plugin(path_to_zip_file):
|
||||
'''
|
||||
Load plugin from zip file or raise InvalidPlugin error
|
||||
|
||||
:return: A :class:`Plugin` instance.
|
||||
'''
|
||||
print 'Loading plugin from', path_to_zip_file
|
||||
if not os.access(path_to_zip_file, os.R_OK):
|
||||
raise PluginNotFound
|
||||
zf = ZipFile(path_to_zip_file)
|
||||
for name in zf.namelist():
|
||||
if name.lower().endswith('plugin.py'):
|
||||
locals = {}
|
||||
exec zf.read(name) in locals
|
||||
for x in locals.values():
|
||||
if isinstance(x, type) and issubclass(x, Plugin):
|
||||
if x.minimum_calibre_version > version or \
|
||||
platform not in x.supported_platforms:
|
||||
continue
|
||||
|
||||
return x
|
||||
|
||||
raise InvalidPlugin(_('No valid plugin found in ')+path_to_zip_file)
|
||||
|
||||
_initialized_plugins = []
|
||||
_on_import = {}
|
||||
_on_preprocess = {}
|
||||
_on_postprocess = {}
|
||||
|
||||
|
||||
|
||||
def reread_filetype_plugins():
|
||||
global _on_import
|
||||
global _on_preprocess
|
||||
global _on_postprocess
|
||||
_on_import = {}
|
||||
_on_preprocess = {}
|
||||
_on_postprocess = {}
|
||||
|
||||
for plugin in _initialized_plugins:
|
||||
if isinstance(plugin, FileTypePlugin):
|
||||
for ft in plugin.file_types:
|
||||
if plugin.on_import:
|
||||
if not _on_import.has_key(ft):
|
||||
_on_import[ft] = []
|
||||
_on_import[ft].append(plugin)
|
||||
if plugin.on_preprocess:
|
||||
if not _on_preprocess.has_key(ft):
|
||||
_on_preprocess[ft] = []
|
||||
_on_preprocess[ft].append(plugin)
|
||||
if plugin.on_postprocess:
|
||||
if not _on_postprocess.has_key(ft):
|
||||
_on_postprocess[ft] = []
|
||||
_on_postprocess[ft].append(plugin)
|
||||
|
||||
_metadata_readers = {}
|
||||
_metadata_writers = {}
|
||||
def reread_metadata_plugins():
|
||||
global _metadata_readers
|
||||
global _metadata_writers
|
||||
_metadata_readers = {}
|
||||
for plugin in _initialized_plugins:
|
||||
if isinstance(plugin, MetadataReaderPlugin):
|
||||
for ft in plugin.file_types:
|
||||
_metadata_readers[ft] = plugin
|
||||
elif isinstance(plugin, MetadataWriterPlugin):
|
||||
for ft in plugin.file_types:
|
||||
_metadata_writers[ft] = plugin
|
||||
|
||||
def get_file_type_metadata(stream, ftype):
|
||||
mi = MetaInformation(None, None)
|
||||
try:
|
||||
plugin = _metadata_readers[ftype.lower().strip()]
|
||||
if not is_disabled(plugin):
|
||||
with plugin:
|
||||
mi = plugin.get_metadata(stream, ftype.lower().strip())
|
||||
except:
|
||||
pass
|
||||
return mi
|
||||
|
||||
def set_file_type_metadata(stream, mi, ftype):
|
||||
try:
|
||||
plugin = _metadata_writers[ftype.lower().strip()]
|
||||
if not is_disabled(plugin):
|
||||
with plugin:
|
||||
plugin.set_metadata(stream, mi, ftype.lower().strip())
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
def _run_filetype_plugins(path_to_file, ft=None, occasion='preprocess'):
|
||||
occasion = {'import':_on_import, 'preprocess':_on_preprocess,
|
||||
'postprocess':_on_postprocess}[occasion]
|
||||
customization = config['plugin_customization']
|
||||
if ft is None:
|
||||
ft = os.path.splitext(path_to_file)[-1].lower().replace('.', '')
|
||||
nfp = path_to_file
|
||||
for plugin in occasion.get(ft, []):
|
||||
if is_disabled(plugin):
|
||||
continue
|
||||
plugin.site_customization = customization.get(plugin.name, '')
|
||||
with plugin:
|
||||
try:
|
||||
nfp = plugin.run(path_to_file)
|
||||
except:
|
||||
print 'Running file type plugin %s failed with traceback:'%plugin.name
|
||||
traceback.print_exc()
|
||||
x = lambda j : os.path.normpath(os.path.normcase(j))
|
||||
if occasion == 'postprocess' and x(nfp) != x(path_to_file):
|
||||
shutil.copyfile(nfp, path_to_file)
|
||||
nfp = path_to_file
|
||||
return nfp
|
||||
|
||||
run_plugins_on_import = functools.partial(_run_filetype_plugins,
|
||||
occasion='import')
|
||||
run_plugins_on_preprocess = functools.partial(_run_filetype_plugins,
|
||||
occasion='preprocess')
|
||||
run_plugins_on_postprocess = functools.partial(_run_filetype_plugins,
|
||||
occasion='postprocess')
|
||||
|
||||
|
||||
def initialize_plugin(plugin, path_to_zip_file):
|
||||
try:
|
||||
return plugin(path_to_zip_file)
|
||||
except Exception:
|
||||
print 'Failed to initialize plugin:', plugin.name, plugin.version
|
||||
tb = traceback.format_exc()
|
||||
raise InvalidPlugin((_('Initialization of plugin %s failed with traceback:')
|
||||
%tb) + '\n'+tb)
|
||||
|
||||
|
||||
def add_plugin(path_to_zip_file):
|
||||
make_config_dir()
|
||||
plugin = load_plugin(path_to_zip_file)
|
||||
plugin = initialize_plugin(plugin, path_to_zip_file)
|
||||
plugins = config['plugins']
|
||||
zfp = os.path.join(plugin_dir, plugin.name+'.zip')
|
||||
if os.path.exists(zfp):
|
||||
os.remove(zfp)
|
||||
shutil.copyfile(path_to_zip_file, zfp)
|
||||
plugins[plugin.name] = zfp
|
||||
config['plugins'] = plugins
|
||||
initialize_plugins()
|
||||
return plugin
|
||||
|
||||
def remove_plugin(plugin_or_name):
|
||||
name = getattr(plugin_or_name, 'name', plugin_or_name)
|
||||
plugins = config['plugins']
|
||||
removed = False
|
||||
if name in plugins.keys():
|
||||
removed = True
|
||||
zfp = plugins[name]
|
||||
if os.path.exists(zfp):
|
||||
os.remove(zfp)
|
||||
plugins.pop(name)
|
||||
config['plugins'] = plugins
|
||||
initialize_plugins()
|
||||
return removed
|
||||
|
||||
def is_disabled(plugin):
|
||||
return plugin.name in config['disabled_plugins']
|
||||
|
||||
def find_plugin(name):
|
||||
for plugin in _initialized_plugins:
|
||||
if plugin.name == name:
|
||||
return plugin
|
||||
|
||||
def disable_plugin(plugin_or_name):
|
||||
x = getattr(plugin_or_name, 'name', plugin_or_name)
|
||||
plugin = find_plugin(x)
|
||||
if not plugin.can_be_disabled:
|
||||
raise ValueError('Plugin %s cannot be disabled'%x)
|
||||
dp = config['disabled_plugins']
|
||||
dp.add(x)
|
||||
config['disabled_plugins'] = dp
|
||||
|
||||
def enable_plugin(plugin_or_name):
|
||||
x = getattr(plugin_or_name, 'name', plugin_or_name)
|
||||
dp = config['disabled_plugins']
|
||||
if x in dp:
|
||||
dp.remove(x)
|
||||
config['disabled_plugins'] = dp
|
||||
|
||||
def initialize_plugins():
|
||||
global _initialized_plugins
|
||||
_initialized_plugins = []
|
||||
for zfp in list(config['plugins'].values()) + builtin_plugins:
|
||||
try:
|
||||
try:
|
||||
plugin = load_plugin(zfp) if not isinstance(zfp, type) else zfp
|
||||
except PluginNotFound:
|
||||
continue
|
||||
plugin = initialize_plugin(plugin, zfp if not isinstance(zfp, type) else zfp)
|
||||
_initialized_plugins.append(plugin)
|
||||
except:
|
||||
print 'Failed to initialize plugin...'
|
||||
traceback.print_exc()
|
||||
_initialized_plugins.sort(cmp=lambda x,y:cmp(x.priority, y.priority), reverse=True)
|
||||
reread_filetype_plugins()
|
||||
reread_metadata_plugins()
|
||||
|
||||
initialize_plugins()
|
||||
|
||||
def option_parser():
|
||||
parser = OptionParser(usage=_('''\
|
||||
%prog options
|
||||
|
||||
Customize calibre by loading external plugins.
|
||||
'''))
|
||||
parser.add_option('-a', '--add-plugin', default=None,
|
||||
help=_('Add a plugin by specifying the path to the zip file containing it.'))
|
||||
parser.add_option('-r', '--remove-plugin', default=None,
|
||||
help=_('Remove a custom plugin by name. Has no effect on builtin plugins'))
|
||||
parser.add_option('--customize-plugin', default=None,
|
||||
help=_('Customize plugin. Specify name of plugin and customization string separated by a comma.'))
|
||||
parser.add_option('-l', '--list-plugins', default=False, action='store_true',
|
||||
help=_('List all installed plugins'))
|
||||
parser.add_option('--enable-plugin', default=None,
|
||||
help=_('Enable the named plugin'))
|
||||
parser.add_option('--disable-plugin', default=None,
|
||||
help=_('Disable the named plugin'))
|
||||
return parser
|
||||
|
||||
def initialized_plugins():
|
||||
return _initialized_plugins
|
||||
|
||||
def customize_plugin(plugin, custom):
|
||||
d = config['plugin_customization']
|
||||
d[plugin.name] = custom.strip()
|
||||
config['plugin_customization'] = d
|
||||
|
||||
def plugin_customization(plugin):
|
||||
return config['plugin_customization'].get(plugin.name, '')
|
||||
|
||||
def main(args=sys.argv):
|
||||
parser = option_parser()
|
||||
if len(args) < 2:
|
||||
parser.print_help()
|
||||
return 1
|
||||
opts, args = parser.parse_args(args)
|
||||
if opts.add_plugin is not None:
|
||||
plugin = add_plugin(opts.add_plugin)
|
||||
print 'Plugin added:', plugin.name, plugin.version
|
||||
if opts.remove_plugin is not None:
|
||||
if remove_plugin(opts.remove_plugin):
|
||||
print 'Plugin removed'
|
||||
else:
|
||||
print 'No custom pluginnamed', opts.remove_plugin
|
||||
if opts.customize_plugin is not None:
|
||||
name, custom = opts.customize_plugin.split(',')
|
||||
plugin = find_plugin(name.strip())
|
||||
if plugin is None:
|
||||
print 'No plugin with the name %s exists'%name
|
||||
return 1
|
||||
customize_plugin(plugin, custom)
|
||||
if opts.enable_plugin is not None:
|
||||
enable_plugin(opts.enable_plugin.strip())
|
||||
if opts.disable_plugin is not None:
|
||||
disable_plugin(opts.disable_plugin.strip())
|
||||
if opts.list_plugins:
|
||||
fmt = '%-15s%-20s%-15s%-15s%s'
|
||||
print fmt%tuple(('Type|Name|Version|Disabled|Site Customization'.split('|')))
|
||||
print
|
||||
for plugin in initialized_plugins():
|
||||
print fmt%(
|
||||
plugin.type, plugin.name,
|
||||
plugin.version, is_disabled(plugin),
|
||||
plugin_customization(plugin)
|
||||
)
|
||||
print '\t', plugin.description
|
||||
if plugin.is_customizable():
|
||||
print '\t', plugin.customization_help()
|
||||
print
|
||||
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
@ -21,8 +21,10 @@ Run an embedded python interpreter.
|
||||
'Module specifications are of the form full.name.of.module,path_to_module.py', default=None
|
||||
)
|
||||
parser.add_option('-c', '--command', help='Run python code.', default=None)
|
||||
parser.add_option('-g', '--gui', default=False, action='store_true',
|
||||
help='Run the GUI',)
|
||||
parser.add_option('--migrate', action='store_true', default=False,
|
||||
help='Migrate old database. Needs two arguments. Path to library1.db and path to new library folder.', default=False)
|
||||
help='Migrate old database. Needs two arguments. Path to library1.db and path to new library folder.')
|
||||
return parser
|
||||
|
||||
def update_zipfile(zipfile, mod, path):
|
||||
@ -72,7 +74,10 @@ def migrate(old, new):
|
||||
|
||||
def main(args=sys.argv):
|
||||
opts, args = option_parser().parse_args(args)
|
||||
if opts.update_module:
|
||||
if opts.gui:
|
||||
from calibre.gui2.main import main
|
||||
main(['calibre'])
|
||||
elif opts.update_module:
|
||||
mod, path = opts.update_module.partition(',')[0], opts.update_module.partition(',')[-1]
|
||||
update_module(mod, os.path.expanduser(path))
|
||||
elif opts.command:
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
|
||||
'''
|
||||
Device drivers.
|
||||
'''
|
||||
@ -8,8 +9,9 @@ def devices():
|
||||
from calibre.devices.prs500.driver import PRS500
|
||||
from calibre.devices.prs505.driver import PRS505
|
||||
from calibre.devices.prs700.driver import PRS700
|
||||
from calibre.devices.cybookg3.driver import CYBOOKG3
|
||||
#from calibre.devices.kindle.driver import KINDLE
|
||||
return (PRS500, PRS505, PRS700)
|
||||
return (PRS500, PRS505, PRS700, CYBOOKG3)
|
||||
|
||||
import time
|
||||
|
||||
|
||||
0
src/calibre/devices/cybookg3/__init__.py
Normal file
0
src/calibre/devices/cybookg3/__init__.py
Normal file
78
src/calibre/devices/cybookg3/books.py
Normal file
78
src/calibre/devices/cybookg3/books.py
Normal file
@ -0,0 +1,78 @@
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2009, John Schember <john at nachtimwald.com'
|
||||
|
||||
'''
|
||||
'''
|
||||
import os, fnmatch, time
|
||||
|
||||
from calibre.devices.interface import BookList as _BookList
|
||||
|
||||
EBOOK_DIR = "eBooks"
|
||||
EBOOK_TYPES = ['mobi', 'prc', 'html', 'pdf', 'rtf', 'txt']
|
||||
|
||||
class Book(object):
|
||||
def __init__(self, path, title, authors):
|
||||
self.title = title
|
||||
self.authors = authors
|
||||
self.size = os.path.getsize(path)
|
||||
self.datetime = time.gmtime(os.path.getctime(path))
|
||||
self.path = path
|
||||
self.thumbnail = None
|
||||
self.tags = []
|
||||
|
||||
@apply
|
||||
def thumbnail():
|
||||
return None
|
||||
|
||||
def __str__(self):
|
||||
""" Return a utf-8 encoded string with title author and path information """
|
||||
return self.title.encode('utf-8') + " by " + \
|
||||
self.authors.encode('utf-8') + " at " + self.path.encode('utf-8')
|
||||
|
||||
|
||||
class BookList(_BookList):
|
||||
def __init__(self, mountpath):
|
||||
self._mountpath = mountpath
|
||||
_BookList.__init__(self)
|
||||
self.return_books(mountpath)
|
||||
|
||||
def return_books(self, mountpath):
|
||||
# Get all books in all directories under the root EBOOK_DIR directory
|
||||
for path, dirs, files in os.walk(os.path.join(mountpath, EBOOK_DIR)):
|
||||
# Filter out anything that isn't in the list of supported ebook types
|
||||
for book_type in EBOOK_TYPES:
|
||||
for filename in fnmatch.filter(files, '*.%s' % (book_type)):
|
||||
book_title = ''
|
||||
book_author = ''
|
||||
# Calibre uses a specific format for file names. They take the form
|
||||
# title_-_author_number.extention We want to see if the file name is
|
||||
# in this format.
|
||||
if fnmatch.fnmatchcase(filename, '*_-_*.*'):
|
||||
# Get the title and author from the file name
|
||||
title, sep, author = filename.rpartition('_-_')
|
||||
author, sep, ext = author.rpartition('_')
|
||||
book_title = title.replace('_', ' ')
|
||||
book_author = author.replace('_', ' ')
|
||||
# if the filename did not match just set the title to
|
||||
# the filename without the extension
|
||||
else:
|
||||
book_title = os.path.splitext(filename)[0].replace('_', ' ')
|
||||
|
||||
self.append(Book(os.path.join(path, filename), book_title, book_author))
|
||||
|
||||
def add_book(self, path, title):
|
||||
self.append(Book(path, title, ""))
|
||||
|
||||
def remove_book(self, path):
|
||||
for book in self:
|
||||
if path.endswith(book.path):
|
||||
self.remove(book)
|
||||
break
|
||||
|
||||
def supports_tags(self):
|
||||
''' Return True if the the device supports tags (collections) for this book list. '''
|
||||
return False
|
||||
|
||||
def set_tags(self, book, tags):
|
||||
pass
|
||||
|
||||
325
src/calibre/devices/cybookg3/driver.py
Normal file
325
src/calibre/devices/cybookg3/driver.py
Normal file
@ -0,0 +1,325 @@
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2009, John Schember <john at nachtimwald.com'
|
||||
|
||||
'''
|
||||
Device driver for Bookeen's Cybook Gen 3
|
||||
'''
|
||||
import os, fnmatch, shutil, time
|
||||
from itertools import cycle
|
||||
|
||||
from calibre.devices.interface import Device
|
||||
from calibre.devices.errors import DeviceError, FreeSpaceError
|
||||
|
||||
from calibre.devices.cybookg3.books import BookList, EBOOK_DIR, EBOOK_TYPES
|
||||
from calibre import iswindows, islinux, isosx, __appname__
|
||||
|
||||
class CYBOOKG3(Device):
|
||||
# Ordered list of supported formats
|
||||
FORMATS = EBOOK_TYPES
|
||||
VENDOR_ID = 0x0bda
|
||||
PRODUCT_ID = 0x0703
|
||||
BCD = 0x110
|
||||
#THUMBNAIL_HEIGHT = 68 # Height for thumbnails on device
|
||||
|
||||
MAIN_MEMORY_VOLUME_LABEL = 'Cybook Gen 3 Main Memory'
|
||||
STORAGE_CARD_VOLUME_LABEL = 'Cybook Gen 3 Storage Card'
|
||||
|
||||
FDI_TEMPLATE = \
|
||||
'''
|
||||
<device>
|
||||
<match key="info.category" string="volume">
|
||||
<match key="@info.parent:@info.parent:@info.parent:@info.parent:usb.vendor_id" int="%(vendor_id)s">
|
||||
<match key="@info.parent:@info.parent:@info.parent:@info.parent:usb.product_id" int="%(product_id)s">
|
||||
<match key="@info.parent:@info.parent:@info.parent:@info.parent:usb.device_revision_bcd" int="%(bcd)s">
|
||||
<match key="volume.is_partition" bool="false">
|
||||
<merge key="volume.label" type="string">%(main_memory)s</merge>
|
||||
<merge key="%(app)s.mainvolume" type="string">%(deviceclass)s</merge>
|
||||
</match>
|
||||
</match>
|
||||
</match>
|
||||
</match>
|
||||
</match>
|
||||
</device>
|
||||
<device>
|
||||
<match key="info.category" string="volume">
|
||||
<match key="@info.parent:@info.parent:@info.parent:@info.parent:usb.vendor_id" int="%(vendor_id)s">
|
||||
<match key="@info.parent:@info.parent:@info.parent:@info.parent:usb.product_id" int="%(product_id)s">
|
||||
<match key="@info.parent:@info.parent:@info.parent:@info.parent:usb.device_revision_bcd" int="%(bcd)s">
|
||||
<match key="volume.is_partition" bool="true">
|
||||
<merge key="volume.label" type="string">%(storage_card)s</merge>
|
||||
<merge key="%(app)s.cardvolume" type="string">%(deviceclass)s</merge>
|
||||
</match>
|
||||
</match>
|
||||
</match>
|
||||
</match>
|
||||
</match>
|
||||
</device>
|
||||
'''
|
||||
|
||||
|
||||
def __init__(self, key='-1', log_packets=False, report_progress=None) :
|
||||
self._main_prefix = self._card_prefix = None
|
||||
|
||||
@classmethod
|
||||
def get_fdi(cls):
|
||||
return cls.FDI_TEMPLATE%dict(
|
||||
app=__appname__,
|
||||
deviceclass=cls.__name__,
|
||||
vendor_id=hex(cls.VENDOR_ID),
|
||||
product_id=hex(cls.PRODUCT_ID),
|
||||
bcd=hex(cls.BCD),
|
||||
main_memory=cls.MAIN_MEMORY_VOLUME_LABEL,
|
||||
storage_card=cls.STORAGE_CARD_VOLUME_LABEL,
|
||||
)
|
||||
|
||||
def set_progress_reporter(self, report_progress):
|
||||
self.report_progress = report_progress
|
||||
|
||||
def get_device_information(self, end_session=True):
|
||||
"""
|
||||
Ask device for device information. See L{DeviceInfoQuery}.
|
||||
@return: (device name, device version, software version on device, mime type)
|
||||
"""
|
||||
return (self.__class__.__name__, '', '', '')
|
||||
|
||||
def card_prefix(self, end_session=True):
|
||||
return self._card_prefix
|
||||
|
||||
@classmethod
|
||||
def _windows_space(cls, prefix):
|
||||
if prefix is None:
|
||||
return 0, 0
|
||||
win32file = __import__('win32file', globals(), locals(), [], -1)
|
||||
try:
|
||||
sectors_per_cluster, bytes_per_sector, free_clusters, total_clusters = \
|
||||
win32file.GetDiskFreeSpace(prefix[:-1])
|
||||
except Exception, err:
|
||||
if getattr(err, 'args', [None])[0] == 21: # Disk not ready
|
||||
time.sleep(3)
|
||||
sectors_per_cluster, bytes_per_sector, free_clusters, total_clusters = \
|
||||
win32file.GetDiskFreeSpace(prefix[:-1])
|
||||
else: raise
|
||||
mult = sectors_per_cluster * bytes_per_sector
|
||||
return total_clusters * mult, free_clusters * mult
|
||||
|
||||
def total_space(self, end_session=True):
|
||||
msz = csz = 0
|
||||
print self._main_prefix
|
||||
if not iswindows:
|
||||
if self._main_prefix is not None:
|
||||
stats = os.statvfs(self._main_prefix)
|
||||
msz = stats.f_frsize * (stats.f_blocks + stats.f_bavail - stats.f_bfree)
|
||||
if self._card_prefix is not None:
|
||||
stats = os.statvfs(self._card_prefix)
|
||||
csz = stats.f_frsize * (stats.f_blocks + stats.f_bavail - stats.f_bfree)
|
||||
else:
|
||||
msz = self._windows_space(self._main_prefix)[0]
|
||||
csz = self._windows_space(self._card_prefix)[0]
|
||||
|
||||
return (msz, 0, csz)
|
||||
|
||||
def free_space(self, end_session=True):
|
||||
msz = csz = 0
|
||||
if not iswindows:
|
||||
if self._main_prefix is not None:
|
||||
stats = os.statvfs(self._main_prefix)
|
||||
msz = stats.f_frsize * stats.f_bavail
|
||||
if self._card_prefix is not None:
|
||||
stats = os.statvfs(self._card_prefix)
|
||||
csz = stats.f_frsize * stats.f_bavail
|
||||
else:
|
||||
msz = self._windows_space(self._main_prefix)[1]
|
||||
csz = self._windows_space(self._card_prefix)[1]
|
||||
|
||||
return (msz, 0, csz)
|
||||
|
||||
def books(self, oncard=False, end_session=True):
|
||||
if oncard and self._card_prefix is None:
|
||||
return []
|
||||
prefix = self._card_prefix if oncard else self._main_prefix
|
||||
bl = BookList(prefix)
|
||||
return bl
|
||||
|
||||
def upload_books(self, files, names, on_card=False, end_session=True):
|
||||
if on_card and not self._card_prefix:
|
||||
raise ValueError(_('The reader has no storage card connected.'))
|
||||
|
||||
if not on_card:
|
||||
path = os.path.join(self._main_prefix, EBOOK_DIR)
|
||||
else:
|
||||
path = os.path.join(self._card_prefix, EBOOK_DIR)
|
||||
|
||||
sizes = map(os.path.getsize, files)
|
||||
size = sum(sizes)
|
||||
|
||||
if on_card and size > self.free_space()[2] - 1024*1024:
|
||||
raise FreeSpaceError("There is insufficient free space "+\
|
||||
"on the storage card")
|
||||
if not on_card and size > self.free_space()[0] - 2*1024*1024:
|
||||
raise FreeSpaceError("There is insufficient free space " +\
|
||||
"in main memory")
|
||||
|
||||
paths = []
|
||||
names = iter(names)
|
||||
|
||||
for infile in files:
|
||||
filepath = os.path.join(path, names.next())
|
||||
paths.append(filepath)
|
||||
|
||||
shutil.copy2(infile, filepath)
|
||||
|
||||
return zip(paths, cycle([on_card]))
|
||||
|
||||
@classmethod
|
||||
def add_books_to_metadata(cls, locations, metadata, booklists):
|
||||
for location in locations:
|
||||
path = location[0]
|
||||
on_card = 1 if location[1] else 0
|
||||
booklists[on_card].add_book(path, os.path.basename(path))
|
||||
|
||||
def delete_books(self, paths, end_session=True):
|
||||
for path in paths:
|
||||
if os.path.exists(path):
|
||||
# Delete the ebook
|
||||
os.unlink(path)
|
||||
|
||||
filepath, ext = os.path.splitext(path)
|
||||
basepath, filename = os.path.split(filepath)
|
||||
|
||||
# Delete the ebook auxiliary file
|
||||
if os.path.exists(filepath + '.mbp'):
|
||||
os.unlink(filepath + '.mbp')
|
||||
|
||||
# Delete the thumbnails file auto generated for the ebook
|
||||
for p, d, files in os.walk(basepath):
|
||||
for filen in fnmatch.filter(files, filename + "*.t2b"):
|
||||
os.unlink(os.path.join(p, filen))
|
||||
|
||||
@classmethod
|
||||
def remove_books_from_metadata(cls, paths, booklists):
|
||||
for path in paths:
|
||||
for bl in booklists:
|
||||
bl.remove_book(path)
|
||||
|
||||
def sync_booklists(self, booklists, end_session=True):
|
||||
# There is no meta data on the device to update. The device is treated
|
||||
# as a mass storage device and does not use a meta data xml file like
|
||||
# the Sony Readers.
|
||||
pass
|
||||
|
||||
def get_file(self, path, outfile, end_session=True):
|
||||
path = self.munge_path(path)
|
||||
src = open(path, 'rb')
|
||||
shutil.copyfileobj(src, outfile, 10*1024*1024)
|
||||
|
||||
def munge_path(self, path):
|
||||
if path.startswith('/') and not (path.startswith(self._main_prefix) or \
|
||||
(self._card_prefix and path.startswith(self._card_prefix))):
|
||||
path = self._main_prefix + path[1:]
|
||||
elif path.startswith('card:'):
|
||||
path = path.replace('card:', self._card_prefix[:-1])
|
||||
return path
|
||||
|
||||
|
||||
def _windows_match_device(self, device_id):
|
||||
device_id = device_id.upper()
|
||||
vid, pid = hex(cls.VENDOR_ID)[2:], hex(cls.PRODUCT_ID)[2:]
|
||||
while len(vid) < 4: vid = '0' + vid
|
||||
while len(pid) < 4: pid = '0' + pid
|
||||
if 'VID_'+vid in device_id and 'PID_'+pid in device_id:
|
||||
return True
|
||||
return False
|
||||
|
||||
# This only supports Windows >= 2000
|
||||
def open_windows(self):
|
||||
drives = []
|
||||
wmi = __import__('wmi', globals(), locals(), [], -1)
|
||||
c = wmi.WMI()
|
||||
for drive in c.Win32_DiskDrive():
|
||||
if self._windows_match_device(str(drive.PNPDeviceID)):
|
||||
if drive.Partitions == 0:
|
||||
continue
|
||||
try:
|
||||
partition = drive.associators("Win32_DiskDriveToDiskPartition")[0]
|
||||
logical_disk = partition.associators('Win32_LogicalDiskToPartition')[0]
|
||||
prefix = logical_disk.DeviceID+os.sep
|
||||
drives.append((drive.Index, prefix))
|
||||
except IndexError:
|
||||
continue
|
||||
|
||||
if not drives:
|
||||
raise DeviceError(_('Unable to detect the %s disk drive. Try rebooting.')%self.__class__.__name__)
|
||||
|
||||
drives.sort(cmp=lambda a, b: cmp(a[0], b[0]))
|
||||
self._main_prefix = drives[0][1]
|
||||
if len(drives) > 1:
|
||||
self._card_prefix = drives[1][1]
|
||||
|
||||
def open_osx(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def open_linux(self):
|
||||
import dbus
|
||||
bus = dbus.SystemBus()
|
||||
hm = dbus.Interface(bus.get_object("org.freedesktop.Hal", "/org/freedesktop/Hal/Manager"), "org.freedesktop.Hal.Manager")
|
||||
|
||||
def conditional_mount(dev):
|
||||
mmo = bus.get_object("org.freedesktop.Hal", dev)
|
||||
label = mmo.GetPropertyString('volume.label', dbus_interface='org.freedesktop.Hal.Device')
|
||||
is_mounted = mmo.GetPropertyString('volume.is_mounted', dbus_interface='org.freedesktop.Hal.Device')
|
||||
mount_point = mmo.GetPropertyString('volume.mount_point', dbus_interface='org.freedesktop.Hal.Device')
|
||||
fstype = mmo.GetPropertyString('volume.fstype', dbus_interface='org.freedesktop.Hal.Device')
|
||||
if is_mounted:
|
||||
return str(mount_point)
|
||||
mmo.Mount(label, fstype, ['umask=077', 'uid='+str(os.getuid()), 'sync'],
|
||||
dbus_interface='org.freedesktop.Hal.Device.Volume')
|
||||
return os.path.normpath('/media/'+label)+'/'
|
||||
|
||||
mm = hm.FindDeviceStringMatch(__appname__+'.mainvolume', self.__class__.__name__)
|
||||
if not mm:
|
||||
raise DeviceError(_('Unable to detect the %s disk drive. Try rebooting.')%(self.__class__.__name__,))
|
||||
self._main_prefix = None
|
||||
for dev in mm:
|
||||
try:
|
||||
self._main_prefix = conditional_mount(dev)+os.sep
|
||||
break
|
||||
except dbus.exceptions.DBusException:
|
||||
continue
|
||||
|
||||
if not self._main_prefix:
|
||||
raise DeviceError('Could not open device for reading. Try a reboot.')
|
||||
|
||||
self._card_prefix = None
|
||||
cards = hm.FindDeviceStringMatch(__appname__+'.cardvolume', self.__class__.__name__)
|
||||
|
||||
for dev in cards:
|
||||
try:
|
||||
self._card_prefix = conditional_mount(dev)+os.sep
|
||||
break
|
||||
except:
|
||||
import traceback
|
||||
print traceback
|
||||
continue
|
||||
|
||||
def open(self):
|
||||
time.sleep(5)
|
||||
self._main_prefix = self._card_prefix = None
|
||||
if islinux:
|
||||
try:
|
||||
self.open_linux()
|
||||
except DeviceError:
|
||||
time.sleep(3)
|
||||
self.open_linux()
|
||||
if iswindows:
|
||||
try:
|
||||
self.open_windows()
|
||||
except DeviceError:
|
||||
time.sleep(3)
|
||||
self.open_windows()
|
||||
if isosx:
|
||||
try:
|
||||
self.open_osx()
|
||||
except DeviceError:
|
||||
time.sleep(3)
|
||||
self.open_osx()
|
||||
|
||||
@ -39,6 +39,18 @@ class Device(object):
|
||||
'''Return the FDI description of this device for HAL on linux.'''
|
||||
return ''
|
||||
|
||||
def open(self):
|
||||
'''
|
||||
Perform any device specific initialization. Called after the device is
|
||||
detected but before any other functions that communicate with the device.
|
||||
For example: For devices that present themselves as USB Mass storage
|
||||
devices, this method would be responsible for mounting the device or
|
||||
if the device has been automounted, for finding out where it has been
|
||||
mounted. The driver for the PRS505 has a implementation of this function
|
||||
that should serve as a good example for USB Mass storage devices.
|
||||
'''
|
||||
raise NotImplementedError()
|
||||
|
||||
def set_progress_reporter(self, report_progress):
|
||||
'''
|
||||
@param report_progress: Function that is called with a % progress
|
||||
|
||||
@ -4,17 +4,20 @@ __copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
This module provides a thin ctypes based wrapper around libusb.
|
||||
"""
|
||||
|
||||
from ctypes import cdll, POINTER, byref, pointer, Structure, \
|
||||
from ctypes import cdll, POINTER, byref, pointer, Structure as _Structure, \
|
||||
c_ubyte, c_ushort, c_int, c_char, c_void_p, c_byte, c_uint
|
||||
from errno import EBUSY, ENOMEM
|
||||
|
||||
from calibre import iswindows, isosx, load_library, isfrozen
|
||||
from calibre import iswindows, isosx, load_library
|
||||
|
||||
_libusb_name = 'libusb'
|
||||
PATH_MAX = 511 if iswindows else 1024 if isosx else 4096
|
||||
if iswindows:
|
||||
Structure._pack_ = 1
|
||||
class Structure(_Structure):
|
||||
_pack_ = 1
|
||||
_libusb_name = 'libusb0'
|
||||
else:
|
||||
Structure = _Structure
|
||||
|
||||
try:
|
||||
try:
|
||||
|
||||
@ -60,7 +60,7 @@ class Book(object):
|
||||
rpath = book_metadata_field("path")
|
||||
id = book_metadata_field("id", formatter=int)
|
||||
sourceid = book_metadata_field("sourceid", formatter=int)
|
||||
size = book_metadata_field("size", formatter=int)
|
||||
size = book_metadata_field("size", formatter=lambda x : int(float(x)))
|
||||
# When setting this attribute you must use an epoch
|
||||
datetime = book_metadata_field("date", formatter=strptime, setter=strftime)
|
||||
|
||||
|
||||
@ -19,10 +19,10 @@ class File(object):
|
||||
self.is_readonly = not os.access(path, os.W_OK)
|
||||
self.ctime = stats.st_ctime
|
||||
self.wtime = stats.st_mtime
|
||||
self.size = stats.st_size
|
||||
if path.endswith(os.sep):
|
||||
self.size = stats.st_size
|
||||
if path.endswith(os.sep):
|
||||
path = path[:-1]
|
||||
self.path = path
|
||||
self.path = path
|
||||
self.name = os.path.basename(path)
|
||||
|
||||
|
||||
@ -64,9 +64,11 @@ class PRS505(Device):
|
||||
<match key="info.category" string="volume">
|
||||
<match key="@info.parent:@info.parent:@info.parent:@info.parent:usb.vendor_id" int="%(vendor_id)s">
|
||||
<match key="@info.parent:@info.parent:@info.parent:@info.parent:usb.product_id" int="%(product_id)s">
|
||||
<match key="volume.is_partition" bool="true">
|
||||
<merge key="volume.label" type="string">%(storage_card)s</merge>
|
||||
<merge key="%(app)s.cardvolume" type="string">%(deviceclass)s</merge>
|
||||
<match key="@info.parent:@info.parent:@info.parent:@info.parent:usb.device_revision_bcd" int="%(bcd)s">
|
||||
<match key="volume.is_partition" bool="true">
|
||||
<merge key="volume.label" type="string">%(storage_card)s</merge>
|
||||
<merge key="%(app)s.cardvolume" type="string">%(deviceclass)s</merge>
|
||||
</match>
|
||||
</match>
|
||||
</match>
|
||||
</match>
|
||||
@ -106,7 +108,10 @@ class PRS505(Device):
|
||||
@classmethod
|
||||
def get_osx_mountpoints(cls, raw=None):
|
||||
if raw is None:
|
||||
raw = subprocess.Popen('ioreg -w 0 -S -c IOMedia'.split(),
|
||||
ioreg = '/usr/sbin/ioreg'
|
||||
if not os.access(ioreg, os.X_OK):
|
||||
ioreg = 'ioreg'
|
||||
raw = subprocess.Popen((ioreg+' -w 0 -S -c IOMedia').split(),
|
||||
stdout=subprocess.PIPE).stdout.read()
|
||||
lines = raw.splitlines()
|
||||
names = {}
|
||||
|
||||
@ -43,7 +43,8 @@ class DeviceScanner(object):
|
||||
if iswindows:
|
||||
for device_id in self.devices:
|
||||
vid, pid = 'vid_%4.4x'%device.VENDOR_ID, 'pid_%4.4x'%device.PRODUCT_ID
|
||||
if vid in device_id and pid in device_id:
|
||||
rev = ('rev_%4.4x'%device.BCD).replace('a', ':') # Bug in winutil.get_usb_devices converts a to :
|
||||
if vid in device_id and pid in device_id and rev in device_id:
|
||||
return True
|
||||
return False
|
||||
else:
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
from __future__ import with_statement
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
@ -6,10 +6,12 @@ __docformat__ = 'restructuredtext en'
|
||||
'''
|
||||
Conversion to EPUB.
|
||||
'''
|
||||
import sys, textwrap, re
|
||||
import sys, textwrap, re, os, uuid
|
||||
from itertools import cycle
|
||||
from calibre.utils.config import Config, StringConfig
|
||||
from calibre.utils.zipfile import ZipFile, ZIP_STORED
|
||||
from calibre.ebooks.html import config as common_config, tostring
|
||||
from lxml import etree
|
||||
|
||||
class DefaultProfile(object):
|
||||
|
||||
@ -36,6 +38,38 @@ def rules(stylesheets):
|
||||
if r.type == r.STYLE_RULE:
|
||||
yield r
|
||||
|
||||
def decrypt_font(key, path):
|
||||
raw = open(path, 'rb').read()
|
||||
crypt = raw[:1024]
|
||||
key = cycle(iter(key))
|
||||
decrypt = ''.join([chr(ord(x)^key.next()) for x in crypt])
|
||||
with open(path, 'wb') as f:
|
||||
f.write(decrypt)
|
||||
f.write(raw[1024:])
|
||||
|
||||
def process_encryption(encfile, opf):
|
||||
key = None
|
||||
m = re.search(r'(?i)(urn:uuid:[0-9a-f-]+)', open(opf, 'rb').read())
|
||||
if m:
|
||||
key = m.group(1)
|
||||
key = list(map(ord, uuid.UUID(key).bytes))
|
||||
try:
|
||||
root = etree.parse(encfile)
|
||||
for em in root.xpath('descendant::*[contains(name(), "EncryptionMethod")]'):
|
||||
algorithm = em.get('Algorithm', '')
|
||||
if algorithm != 'http://ns.adobe.com/pdf/enc#RC':
|
||||
return False
|
||||
cr = em.getparent().xpath('descendant::*[contains(name(), "CipherReference")]')[0]
|
||||
uri = cr.get('URI')
|
||||
path = os.path.abspath(os.path.join(os.path.dirname(encfile), '..', *uri.split('/')))
|
||||
if os.path.exists(path):
|
||||
decrypt_font(key, path)
|
||||
return True
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
def initialize_container(path_to_container, opf_name='metadata.opf'):
|
||||
'''
|
||||
Create an empty EPUB document, with a default skeleton.
|
||||
@ -54,10 +88,10 @@ def initialize_container(path_to_container, opf_name='metadata.opf'):
|
||||
zf.writestr('META-INF/container.xml', CONTAINER)
|
||||
return zf
|
||||
|
||||
def config(defaults=None):
|
||||
def config(defaults=None, name='epub'):
|
||||
desc = _('Options to control the conversion to EPUB')
|
||||
if defaults is None:
|
||||
c = Config('epub', desc)
|
||||
c = Config(name, desc)
|
||||
else:
|
||||
c = StringConfig(defaults, desc)
|
||||
|
||||
|
||||
@ -12,12 +12,13 @@ from contextlib import nested
|
||||
|
||||
from calibre import extract, walk
|
||||
from calibre.ebooks import DRMError
|
||||
from calibre.ebooks.epub import config as common_config
|
||||
from calibre.ebooks.epub import config as common_config, process_encryption
|
||||
from calibre.ebooks.epub.from_html import convert as html2epub, find_html_index
|
||||
from calibre.ptempfile import TemporaryDirectory
|
||||
from calibre.ebooks.metadata import MetaInformation
|
||||
from calibre.ebooks.metadata.opf2 import OPFCreator
|
||||
from calibre.utils.zipfile import ZipFile
|
||||
from calibre.customize.ui import run_plugins_on_preprocess
|
||||
|
||||
def lit2opf(path, tdir, opts):
|
||||
from calibre.ebooks.lit.reader import LitReader
|
||||
@ -30,7 +31,7 @@ def lit2opf(path, tdir, opts):
|
||||
|
||||
def mobi2opf(path, tdir, opts):
|
||||
from calibre.ebooks.mobi.reader import MobiReader
|
||||
print 'Exploding MOBI file:', path
|
||||
print 'Exploding MOBI file:', path.encode('utf-8') if isinstance(path, unicode) else path
|
||||
reader = MobiReader(path)
|
||||
reader.extract_content(tdir)
|
||||
files = list(walk(tdir))
|
||||
@ -72,12 +73,19 @@ def epub2opf(path, tdir, opts):
|
||||
zf = ZipFile(path)
|
||||
zf.extractall(tdir)
|
||||
opts.chapter_mark = 'none'
|
||||
if os.path.exists(os.path.join(tdir, 'META-INF', 'encryption.xml')):
|
||||
raise DRMError(os.path.basename(path))
|
||||
encfile = os.path.join(tdir, 'META-INF', 'encryption.xml')
|
||||
opf = None
|
||||
for f in walk(tdir):
|
||||
if f.lower().endswith('.opf'):
|
||||
return f
|
||||
raise ValueError('%s is not a valid EPUB file'%path)
|
||||
opf = f
|
||||
break
|
||||
if opf and os.path.exists(encfile):
|
||||
if not process_encryption(encfile, opf):
|
||||
raise DRMError(os.path.basename(path))
|
||||
|
||||
if opf is None:
|
||||
raise ValueError('%s is not a valid EPUB file'%path)
|
||||
return opf
|
||||
|
||||
def odt2epub(path, tdir, opts):
|
||||
from calibre.ebooks.odt.to_oeb import Extract
|
||||
@ -109,7 +117,9 @@ def unarchive(path, tdir):
|
||||
return f, ext
|
||||
return find_html_index(files)
|
||||
|
||||
def any2epub(opts, path, notification=None):
|
||||
def any2epub(opts, path, notification=None, create_epub=True,
|
||||
oeb_cover=False, extract_to=None):
|
||||
path = run_plugins_on_preprocess(path)
|
||||
ext = os.path.splitext(path)[1]
|
||||
if not ext:
|
||||
raise ValueError('Unknown file type: '+path)
|
||||
@ -132,7 +142,9 @@ def any2epub(opts, path, notification=None):
|
||||
raise ValueError('Conversion from %s is not supported'%ext.upper())
|
||||
|
||||
print 'Creating EPUB file...'
|
||||
html2epub(path, opts, notification=notification)
|
||||
html2epub(path, opts, notification=notification,
|
||||
create_epub=create_epub, oeb_cover=oeb_cover,
|
||||
extract_to=extract_to)
|
||||
|
||||
def config(defaults=None):
|
||||
return common_config(defaults=defaults)
|
||||
@ -141,14 +153,14 @@ def config(defaults=None):
|
||||
def formats():
|
||||
return ['html', 'rar', 'zip', 'oebzip']+list(MAP.keys())
|
||||
|
||||
def option_parser():
|
||||
|
||||
return config().option_parser(usage=_('''\
|
||||
USAGE = _('''\
|
||||
%%prog [options] filename
|
||||
|
||||
Convert any of a large number of ebook formats to an epub file. Supported formats are: %s
|
||||
''')%formats()
|
||||
)
|
||||
Convert any of a large number of ebook formats to a %s file. Supported formats are: %s
|
||||
''')
|
||||
|
||||
def option_parser(usage=USAGE):
|
||||
return config().option_parser(usage=usage%('EPUB', formats()))
|
||||
|
||||
def main(args=sys.argv):
|
||||
parser = option_parser()
|
||||
|
||||
@ -32,14 +32,15 @@ Conversion of HTML/OPF files follows several stages:
|
||||
* The EPUB container is created.
|
||||
'''
|
||||
|
||||
import os, sys, cStringIO, logging, re
|
||||
import os, sys, cStringIO, logging, re, functools, shutil
|
||||
|
||||
from lxml.etree import XPath
|
||||
from lxml import html, etree
|
||||
from PyQt4.Qt import QApplication, QPixmap
|
||||
|
||||
from calibre.ebooks.html import Processor, merge_metadata, get_filelist,\
|
||||
opf_traverse, create_metadata, rebase_toc
|
||||
from calibre.ebooks.epub import config as common_config
|
||||
opf_traverse, create_metadata, rebase_toc, Link, parser
|
||||
from calibre.ebooks.epub import config as common_config, tostring
|
||||
from calibre.ptempfile import TemporaryDirectory
|
||||
from calibre.ebooks.metadata.toc import TOC
|
||||
from calibre.ebooks.metadata.opf2 import OPF
|
||||
@ -47,7 +48,46 @@ from calibre.ebooks.epub import initialize_container, PROFILES
|
||||
from calibre.ebooks.epub.split import split
|
||||
from calibre.ebooks.epub.fonts import Rationalizer
|
||||
from calibre.constants import preferred_encoding
|
||||
from calibre import walk
|
||||
from calibre.customize.ui import run_plugins_on_postprocess
|
||||
from calibre import walk, CurrentDir, to_unicode
|
||||
|
||||
content = functools.partial(os.path.join, u'content')
|
||||
|
||||
def remove_bad_link(element, attribute, link, pos):
|
||||
if attribute is not None:
|
||||
if element.tag in ['link']:
|
||||
element.getparent().remove(element)
|
||||
else:
|
||||
element.set(attribute, '')
|
||||
del element.attrib[attribute]
|
||||
|
||||
def check_links(opf_path, pretty_print):
|
||||
'''
|
||||
Find and remove all invalid links in the HTML files
|
||||
'''
|
||||
logger = logging.getLogger('html2epub')
|
||||
logger.info('\tChecking files for bad links...')
|
||||
pathtoopf = os.path.abspath(opf_path)
|
||||
with CurrentDir(os.path.dirname(pathtoopf)):
|
||||
opf = OPF(open(pathtoopf, 'rb'), os.path.dirname(pathtoopf))
|
||||
html_files = []
|
||||
for item in opf.itermanifest():
|
||||
if 'html' in item.get('media-type', '').lower():
|
||||
f = item.get('href').split('/')[-1].decode('utf-8')
|
||||
html_files.append(os.path.abspath(content(f)))
|
||||
|
||||
for path in html_files:
|
||||
base = os.path.dirname(path)
|
||||
root = html.fromstring(open(content(path), 'rb').read(), parser=parser)
|
||||
for element, attribute, link, pos in list(root.iterlinks()):
|
||||
link = to_unicode(link)
|
||||
plink = Link(link, base)
|
||||
bad = False
|
||||
if plink.path is not None and not os.path.exists(plink.path):
|
||||
bad = True
|
||||
if bad:
|
||||
remove_bad_link(element, attribute, link, pos)
|
||||
open(content(path), 'wb').write(tostring(root, pretty_print))
|
||||
|
||||
def find_html_index(files):
|
||||
'''
|
||||
@ -83,6 +123,10 @@ class HTMLProcessor(Processor, Rationalizer):
|
||||
if opts.verbose > 2:
|
||||
self.debug_tree('nocss')
|
||||
|
||||
if hasattr(self.body, 'xpath'):
|
||||
for script in list(self.body.xpath('descendant::script')):
|
||||
script.getparent().remove(script)
|
||||
|
||||
def convert_image(self, img):
|
||||
rpath = img.get('src', '')
|
||||
path = os.path.join(os.path.dirname(self.save_path()), *rpath.split('/'))
|
||||
@ -165,23 +209,22 @@ TITLEPAGE = '''\
|
||||
</head>
|
||||
<body>
|
||||
<div>
|
||||
<img src="%s" alt="cover" />
|
||||
<img src="%s" alt="cover" style="height: 100%%" />
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
'''
|
||||
|
||||
def create_cover_image(src, dest, screen_size):
|
||||
from PyQt4.Qt import QApplication, QImage, Qt
|
||||
if QApplication.instance() is None:
|
||||
app = QApplication([])
|
||||
app
|
||||
im = QImage()
|
||||
def create_cover_image(src, dest, screen_size, rescale_cover=True):
|
||||
try:
|
||||
from PyQt4.Qt import QImage, Qt
|
||||
if QApplication.instance() is None:
|
||||
QApplication([])
|
||||
im = QImage()
|
||||
im.load(src)
|
||||
if im.isNull():
|
||||
raise ValueError
|
||||
if screen_size is not None:
|
||||
raise ValueError('Invalid cover image')
|
||||
if rescale_cover and screen_size is not None:
|
||||
width, height = im.width(), im.height()
|
||||
dw, dh = (screen_size[0]-width)/float(width), (screen_size[1]-height)/float(height)
|
||||
delta = min(dw, dh)
|
||||
@ -189,7 +232,7 @@ def create_cover_image(src, dest, screen_size):
|
||||
nwidth = int(width + delta*(width))
|
||||
nheight = int(height + delta*(height))
|
||||
im = im.scaled(int(nwidth), int(nheight), Qt.IgnoreAspectRatio, Qt.SmoothTransformation)
|
||||
im.save(dest)
|
||||
im.save(dest)
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
@ -202,7 +245,6 @@ def process_title_page(mi, filelist, htmlfilemap, opts, tdir):
|
||||
if mi.cover:
|
||||
if f(filelist[0].path) == f(mi.cover):
|
||||
old_title_page = htmlfilemap[filelist[0].path]
|
||||
|
||||
#logger = logging.getLogger('html2epub')
|
||||
metadata_cover = mi.cover
|
||||
if metadata_cover and not os.path.exists(metadata_cover):
|
||||
@ -211,14 +253,15 @@ def process_title_page(mi, filelist, htmlfilemap, opts, tdir):
|
||||
cpath = '/'.join(('resources', '_cover_.jpg'))
|
||||
cover_dest = os.path.join(tdir, 'content', *cpath.split('/'))
|
||||
if metadata_cover is not None:
|
||||
if not create_cover_image(metadata_cover, cover_dest, opts.profile.screen_size):
|
||||
if not create_cover_image(metadata_cover, cover_dest,
|
||||
opts.profile.screen_size):
|
||||
metadata_cover = None
|
||||
|
||||
specified_cover = opts.cover
|
||||
if specified_cover and not os.path.exists(specified_cover):
|
||||
specified_cover = None
|
||||
if specified_cover is not None:
|
||||
if not create_cover_image(specified_cover, cover_dest, opts.profile.screen_size):
|
||||
if not create_cover_image(specified_cover, cover_dest,
|
||||
opts.profile.screen_size):
|
||||
specified_cover = None
|
||||
|
||||
cover = metadata_cover if specified_cover is None or (opts.prefer_metadata_cover and metadata_cover is not None) else specified_cover
|
||||
@ -233,9 +276,26 @@ def process_title_page(mi, filelist, htmlfilemap, opts, tdir):
|
||||
elif os.path.exists(cover_dest):
|
||||
os.remove(cover_dest)
|
||||
return None, old_title_page is not None
|
||||
|
||||
|
||||
def convert(htmlfile, opts, notification=None):
|
||||
def find_oeb_cover(htmlfile):
|
||||
if os.stat(htmlfile).st_size > 2048:
|
||||
return None
|
||||
match = re.search(r'(?i)<img[^<>]+src\s*=\s*[\'"](.+?)[\'"]', open(htmlfile, 'rb').read())
|
||||
if match:
|
||||
return match.group(1)
|
||||
|
||||
def condense_ncx(ncx_path):
|
||||
tree = etree.parse(ncx_path)
|
||||
for tag in tree.getroot().iter(tag=etree.Element):
|
||||
if tag.text:
|
||||
tag.text = tag.text.strip()
|
||||
if tag.tail:
|
||||
tag.tail = tag.tail.strip()
|
||||
compressed = etree.tostring(tree.getroot(), encoding='utf-8')
|
||||
open(ncx_path, 'wb').write(compressed)
|
||||
|
||||
def convert(htmlfile, opts, notification=None, create_epub=True,
|
||||
oeb_cover=False, extract_to=None):
|
||||
htmlfile = os.path.abspath(htmlfile)
|
||||
if opts.output is None:
|
||||
opts.output = os.path.splitext(os.path.basename(htmlfile))[0] + '.epub'
|
||||
@ -287,7 +347,7 @@ def convert(htmlfile, opts, notification=None):
|
||||
|
||||
title_page, has_title_page = process_title_page(mi, filelist, htmlfile_map, opts, tdir)
|
||||
spine = [htmlfile_map[f.path] for f in filelist]
|
||||
if title_page is not None:
|
||||
if not oeb_cover and title_page is not None:
|
||||
spine = [title_page] + spine
|
||||
mi.cover = None
|
||||
mi.cover_data = (None, None)
|
||||
@ -316,26 +376,55 @@ def convert(htmlfile, opts, notification=None):
|
||||
if opts.show_ncx:
|
||||
print toc
|
||||
split(opf_path, opts, stylesheet_map)
|
||||
check_links(opf_path, opts.pretty_print)
|
||||
|
||||
opf = OPF(opf_path, tdir)
|
||||
opf.remove_guide()
|
||||
if has_title_page:
|
||||
oeb_cover_file = None
|
||||
if oeb_cover and title_page is not None:
|
||||
oeb_cover_file = find_oeb_cover(os.path.join(tdir, 'content', title_page))
|
||||
if has_title_page or (oeb_cover and oeb_cover_file):
|
||||
opf.create_guide_element()
|
||||
opf.add_guide_item('cover', 'Cover', 'content/'+spine[0])
|
||||
if has_title_page and not oeb_cover:
|
||||
opf.add_guide_item('cover', 'Cover', 'content/'+spine[0])
|
||||
if oeb_cover and oeb_cover_file:
|
||||
opf.add_guide_item('cover', 'Cover', 'content/'+oeb_cover_file)
|
||||
|
||||
opf.add_path_to_manifest(os.path.join(tdir, 'content', 'resources', '_cover_.jpg'), 'image/jpeg')
|
||||
cpath = os.path.join(tdir, 'content', 'resources', '_cover_.jpg')
|
||||
if os.path.exists(cpath):
|
||||
opf.add_path_to_manifest(cpath, 'image/jpeg')
|
||||
with open(opf_path, 'wb') as f:
|
||||
raw = opf.render()
|
||||
if not raw.startswith('<?xml '):
|
||||
raw = '<?xml version="1.0" encoding="UTF-8"?>\n'+raw
|
||||
f.write(raw)
|
||||
epub = initialize_container(opts.output)
|
||||
epub.add_dir(tdir)
|
||||
ncx_path = os.path.join(os.path.dirname(opf_path), 'toc.ncx')
|
||||
if os.path.exists(ncx_path) and os.stat(ncx_path).st_size > opts.profile.flow_size:
|
||||
logger.info('Condensing NCX from %d bytes...'%os.stat(ncx_path).st_size)
|
||||
condense_ncx(ncx_path)
|
||||
if os.stat(ncx_path).st_size > opts.profile.flow_size:
|
||||
logger.warn('NCX still larger than allowed size at %d bytes. Menu based Table of Contents may not work on device.'%os.stat(ncx_path).st_size)
|
||||
|
||||
if create_epub:
|
||||
epub = initialize_container(opts.output)
|
||||
epub.add_dir(tdir)
|
||||
epub.close()
|
||||
run_plugins_on_postprocess(opts.output, 'epub')
|
||||
logger.info(_('Output written to ')+opts.output)
|
||||
|
||||
if opts.show_opf:
|
||||
print open(os.path.join(tdir, 'metadata.opf')).read()
|
||||
logger.info('Output written to %s'%opts.output)
|
||||
|
||||
if opts.extract_to is not None:
|
||||
epub.extractall(opts.extract_to)
|
||||
epub.close()
|
||||
if os.path.exists(opts.extract_to):
|
||||
shutil.rmtree(opts.extract_to)
|
||||
shutil.copytree(tdir, opts.extract_to)
|
||||
|
||||
if extract_to is not None:
|
||||
if os.path.exists(extract_to):
|
||||
shutil.rmtree(extract_to)
|
||||
shutil.copytree(tdir, extract_to)
|
||||
|
||||
|
||||
|
||||
def main(args=sys.argv):
|
||||
|
||||
@ -37,14 +37,16 @@ class UnsupportedFormatError(Exception):
|
||||
|
||||
class SpineItem(unicode):
|
||||
|
||||
def __init__(self, path):
|
||||
unicode.__init__(self, path)
|
||||
def __new__(cls, *args):
|
||||
obj = super(SpineItem, cls).__new__(cls, *args)
|
||||
path = args[0]
|
||||
raw = open(path, 'rb').read()
|
||||
raw, self.encoding = xml_to_unicode(raw)
|
||||
self.character_count = character_count(raw)
|
||||
self.start_page = -1
|
||||
self.pages = -1
|
||||
self.max_page = -1
|
||||
raw, obj.encoding = xml_to_unicode(raw)
|
||||
obj.character_count = character_count(raw)
|
||||
obj.start_page = -1
|
||||
obj.pages = -1
|
||||
obj.max_page = -1
|
||||
return obj
|
||||
|
||||
def html2opf(path, tdir, opts):
|
||||
opts = copy.copy(opts)
|
||||
@ -166,8 +168,12 @@ class EbookIterator(object):
|
||||
if bookmarks is None:
|
||||
bookmarks = self.bookmarks
|
||||
dat = self.serialize_bookmarks(bookmarks)
|
||||
if os.path.splitext(self.pathtoebook)[1].lower() == '.epub':
|
||||
zf = open(self.pathtoebook, 'r+b')
|
||||
if os.path.splitext(self.pathtoebook)[1].lower() == '.epub' and \
|
||||
os.access(self.pathtoebook, os.R_OK):
|
||||
try:
|
||||
zf = open(self.pathtoebook, 'r+b')
|
||||
except IOError:
|
||||
return
|
||||
zipf = ZipFile(zf, mode='a')
|
||||
for name in zipf.namelist():
|
||||
if name == 'META-INF/calibre_bookmarks.txt':
|
||||
|
||||
@ -7,7 +7,7 @@ __docformat__ = 'restructuredtext en'
|
||||
Split the flows in an epub file to conform to size limitations.
|
||||
'''
|
||||
|
||||
import os, math, logging, functools, collections, re, copy
|
||||
import os, math, logging, functools, collections, re, copy, sys
|
||||
|
||||
from lxml.etree import XPath as _XPath
|
||||
from lxml import etree, html
|
||||
@ -34,29 +34,61 @@ class SplitError(ValueError):
|
||||
|
||||
class Splitter(LoggingInterface):
|
||||
|
||||
def __init__(self, path, opts, stylesheet_map, always_remove=False):
|
||||
def __init__(self, path, opts, stylesheet_map, opf):
|
||||
LoggingInterface.__init__(self, logging.getLogger('htmlsplit'))
|
||||
self.setup_cli_handler(opts.verbose)
|
||||
self.path = path
|
||||
self.always_remove = always_remove
|
||||
self.always_remove = not opts.preserve_tag_structure or \
|
||||
os.stat(content(path)).st_size > 5*opts.profile.flow_size
|
||||
self.base = (os.path.splitext(path)[0].replace('%', '%%') + '_split_%d.html')
|
||||
self.opts = opts
|
||||
self.orig_size = os.stat(content(path)).st_size
|
||||
self.log_info('\tSplitting %s (%d KB)', path, self.orig_size/1024.)
|
||||
root = html.fromstring(open(content(path)).read())
|
||||
|
||||
self.page_breaks = []
|
||||
self.find_page_breaks(stylesheet_map[self.path], root)
|
||||
|
||||
self.trees = []
|
||||
self.page_breaks, self.trees = [], []
|
||||
self.split_size = 0
|
||||
self.split(root.getroottree())
|
||||
self.commit()
|
||||
self.log_info('\t\tSplit into %d parts.', len(self.trees))
|
||||
if self.opts.verbose:
|
||||
for f in self.files:
|
||||
self.log_info('\t\t\t%s - %d KB', f, os.stat(content(f)).st_size/1024.)
|
||||
|
||||
# Split on page breaks
|
||||
self.log_info('\tSplitting on page breaks...')
|
||||
if self.path in stylesheet_map:
|
||||
self.find_page_breaks(stylesheet_map[self.path], root)
|
||||
self.split_on_page_breaks(root.getroottree())
|
||||
trees = list(self.trees)
|
||||
|
||||
# Split any remaining over-sized trees
|
||||
if self.opts.profile.flow_size < sys.maxint:
|
||||
lt_found = False
|
||||
self.log_info('\tLooking for large trees...')
|
||||
for i, tree in enumerate(list(trees)):
|
||||
self.trees = []
|
||||
size = len(tostring(tree.getroot()))
|
||||
if size > self.opts.profile.flow_size:
|
||||
lt_found = True
|
||||
try:
|
||||
self.split_to_size(tree)
|
||||
except (SplitError, RuntimeError): # Splitting fails
|
||||
if not self.always_remove:
|
||||
self.always_remove = True
|
||||
self.split_to_size(tree)
|
||||
else:
|
||||
raise
|
||||
trees[i:i+1] = list(self.trees)
|
||||
if not lt_found:
|
||||
self.log_info('\tNo large trees found')
|
||||
|
||||
self.trees = trees
|
||||
self.was_split = len(self.trees) > 1
|
||||
if self.was_split:
|
||||
self.commit()
|
||||
self.log_info('\t\tSplit into %d parts.', len(self.trees))
|
||||
if self.opts.verbose:
|
||||
for f in self.files:
|
||||
self.log_info('\t\t\t%s - %d KB', f, os.stat(content(f)).st_size/1024.)
|
||||
self.fix_opf(opf)
|
||||
|
||||
self.trees = None
|
||||
|
||||
|
||||
def split_text(self, text, root, size):
|
||||
self.log_debug('\t\t\tSplitting text of length: %d'%len(text))
|
||||
@ -76,12 +108,7 @@ class Splitter(LoggingInterface):
|
||||
return ans
|
||||
|
||||
|
||||
def split(self, tree):
|
||||
'''
|
||||
Split ``tree`` into a *before* and *after* tree, preserving tag structure,
|
||||
but not duplicating any text. All tags that have had their text and tail
|
||||
removed have the attribute ``calibre_split`` set to 1.
|
||||
'''
|
||||
def split_to_size(self, tree):
|
||||
self.log_debug('\t\tSplitting...')
|
||||
root = tree.getroot()
|
||||
# Split large <pre> tags
|
||||
@ -108,10 +135,50 @@ class Splitter(LoggingInterface):
|
||||
if not self.always_remove:
|
||||
self.log_warn(_('\t\tToo much markup. Re-splitting without structure preservation. This may cause incorrect rendering.'))
|
||||
raise SplitError(self.path, root)
|
||||
tree2 = copy.deepcopy(tree)
|
||||
root2 = tree2.getroot()
|
||||
body, body2 = root.body, root2.body
|
||||
path = tree.getpath(split_point)
|
||||
|
||||
for t in self.do_split(tree, split_point, before):
|
||||
r = t.getroot()
|
||||
if self.is_page_empty(r):
|
||||
continue
|
||||
size = len(tostring(r))
|
||||
if size <= self.opts.profile.flow_size:
|
||||
self.trees.append(t)
|
||||
#print tostring(t.getroot(), pretty_print=True)
|
||||
self.log_debug('\t\t\tCommitted sub-tree #%d (%d KB)', len(self.trees), size/1024.)
|
||||
self.split_size += size
|
||||
else:
|
||||
self.split_to_size(t)
|
||||
|
||||
def is_page_empty(self, root):
|
||||
body = root.find('body')
|
||||
if body is None:
|
||||
return False
|
||||
txt = re.sub(r'\s+', '', html.tostring(body, method='text', encoding=unicode))
|
||||
if len(txt) > 4:
|
||||
#if len(txt) < 100:
|
||||
# print 1111111, html.tostring(body, method='html', encoding=unicode)
|
||||
return False
|
||||
for img in root.xpath('//img'):
|
||||
if img.get('style', '') != 'display:none':
|
||||
return False
|
||||
return True
|
||||
|
||||
def do_split(self, tree, split_point, before):
|
||||
'''
|
||||
Split ``tree`` into a *before* and *after* tree at ``split_point``,
|
||||
preserving tag structure, but not duplicating any text.
|
||||
All tags that have had their text and tail
|
||||
removed have the attribute ``calibre_split`` set to 1.
|
||||
|
||||
:param before: If True tree is split before split_point, otherwise after split_point
|
||||
:return: before_tree, after_tree
|
||||
'''
|
||||
path = tree.getpath(split_point)
|
||||
tree, tree2 = copy.deepcopy(tree), copy.deepcopy(tree)
|
||||
root = tree.getroot()
|
||||
root2 = tree2.getroot()
|
||||
body, body2 = root.body, root2.body
|
||||
split_point = root.xpath(path)[0]
|
||||
split_point2 = root2.xpath(path)[0]
|
||||
|
||||
def nix_element(elem, top=True):
|
||||
@ -129,7 +196,7 @@ class Splitter(LoggingInterface):
|
||||
elem.tail = u''
|
||||
elem.set(SPLIT_ATTR, '1')
|
||||
if elem.tag.lower() in ['ul', 'ol', 'dl', 'table', 'hr', 'img']:
|
||||
elem.set('style', 'display:none;')
|
||||
elem.set('style', 'display:none')
|
||||
|
||||
def fix_split_point(sp):
|
||||
sp.set('style', sp.get('style', '')+'page-break-before:avoid;page-break-after:avoid')
|
||||
@ -163,20 +230,35 @@ class Splitter(LoggingInterface):
|
||||
if not hit_split_point:
|
||||
nix_element(elem, top=False)
|
||||
|
||||
for t, r in [(tree, root), (tree2, root2)]:
|
||||
size = len(tostring(r))
|
||||
if size <= self.opts.profile.flow_size:
|
||||
self.trees.append(t)
|
||||
#print tostring(t.getroot(), pretty_print=True)
|
||||
self.log_debug('\t\t\tCommitted sub-tree #%d (%d KB)', len(self.trees), size/1024.)
|
||||
self.split_size += size
|
||||
else:
|
||||
self.split(t)
|
||||
return tree, tree2
|
||||
|
||||
|
||||
def split_on_page_breaks(self, orig_tree):
|
||||
ordered_ids = []
|
||||
for elem in orig_tree.xpath('//*[@id]'):
|
||||
id = elem.get('id')
|
||||
if id in self.page_break_ids:
|
||||
ordered_ids.append(self.page_breaks[self.page_break_ids.index(id)])
|
||||
|
||||
self.trees = []
|
||||
tree = orig_tree
|
||||
for pattern, before in ordered_ids:
|
||||
self.log_info('\t\tSplitting on page-break')
|
||||
elem = pattern(tree)
|
||||
if elem:
|
||||
before, after = self.do_split(tree, elem[0], before)
|
||||
self.trees.append(before)
|
||||
tree = after
|
||||
self.trees.append(tree)
|
||||
self.trees = [t for t in self.trees if not self.is_page_empty(t.getroot())]
|
||||
|
||||
|
||||
|
||||
def find_page_breaks(self, stylesheets, root):
|
||||
'''
|
||||
Find all elements that have either page-break-before or page-break-after set.
|
||||
Populates `self.page_breaks` with id based XPath selectors (for elements that don't
|
||||
have ids, an id is created).
|
||||
'''
|
||||
page_break_selectors = set([])
|
||||
for rule in rules(stylesheets):
|
||||
@ -204,16 +286,18 @@ class Splitter(LoggingInterface):
|
||||
|
||||
page_breaks = list(page_breaks)
|
||||
page_breaks.sort(cmp=lambda x,y : cmp(x.pb_order, y.pb_order))
|
||||
self.page_break_ids = []
|
||||
for i, x in enumerate(page_breaks):
|
||||
x.set('id', x.get('id', 'calibre_pb_%d'%i))
|
||||
self.page_breaks.append((XPath('//*[@id="%s"]'%x.get('id')), x.pb_before))
|
||||
id = x.get('id')
|
||||
self.page_breaks.append((XPath('//*[@id="%s"]'%id), x.pb_before))
|
||||
self.page_break_ids.append(id)
|
||||
|
||||
|
||||
def find_split_point(self, root):
|
||||
'''
|
||||
Find the tag at which to split the tree rooted at `root`.
|
||||
Search order is:
|
||||
* page breaks
|
||||
* Heading tags
|
||||
* <div> tags
|
||||
* <p> tags
|
||||
@ -229,19 +313,6 @@ class Splitter(LoggingInterface):
|
||||
elems[i].set(SPLIT_POINT_ATTR, '1')
|
||||
return elems[i]
|
||||
|
||||
page_breaks = []
|
||||
for x in self.page_breaks:
|
||||
pb = x[0](root)
|
||||
if pb:
|
||||
page_breaks.append(pb[0])
|
||||
|
||||
elem = pick_elem(page_breaks)
|
||||
if elem is not None:
|
||||
i = page_breaks.index(elem)
|
||||
return elem, self.page_breaks[i][1]
|
||||
|
||||
|
||||
|
||||
for path in (
|
||||
'//*[re:match(name(), "h[1-6]", "i")]',
|
||||
'/html/body/div',
|
||||
@ -315,6 +386,9 @@ class Splitter(LoggingInterface):
|
||||
frag = None
|
||||
if len(href) > 1:
|
||||
frag = href[1]
|
||||
if frag not in self.anchor_map:
|
||||
self.log_warning('\t\tUnable to re-map OPF link', href)
|
||||
continue
|
||||
new_file = self.anchor_map[frag]
|
||||
ref.set('href', 'content/'+new_file+('' if frag is None else ('#'+frag)))
|
||||
|
||||
@ -341,7 +415,11 @@ def fix_content_links(html_files, changes, opts):
|
||||
anchor = href[1] if len(href) > 1 else None
|
||||
href = href[0]
|
||||
if href in split_files:
|
||||
newf = anchor_maps[split_files.index(href)][anchor]
|
||||
try:
|
||||
newf = anchor_maps[split_files.index(href)][anchor]
|
||||
except:
|
||||
print '\t\tUnable to remap HTML link:', href, anchor
|
||||
continue
|
||||
frag = ('#'+anchor) if anchor else ''
|
||||
a.set('href', newf+frag)
|
||||
changed = True
|
||||
@ -354,52 +432,54 @@ def fix_ncx(path, changes):
|
||||
anchor_maps = [f.anchor_map for f in changes]
|
||||
tree = etree.parse(path)
|
||||
changed = False
|
||||
for content in tree.getroot().xpath('//x:content[@src]', namespaces={'x':"http://www.daisy.org/z3986/2005/ncx/"}):
|
||||
for content in tree.getroot().xpath('//x:content[@src]',
|
||||
namespaces={'x':"http://www.daisy.org/z3986/2005/ncx/"}):
|
||||
href = content.get('src')
|
||||
if not href.startswith('#'):
|
||||
href = href.split('#')
|
||||
anchor = href[1] if len(href) > 1 else None
|
||||
href = href[0].split('/')[-1]
|
||||
if href in split_files:
|
||||
newf = anchor_maps[split_files.index(href)][anchor]
|
||||
try:
|
||||
newf = anchor_maps[split_files.index(href)][anchor]
|
||||
except:
|
||||
print 'Unable to remap NCX link:', href, anchor
|
||||
frag = ('#'+anchor) if anchor else ''
|
||||
content.set('src', 'content/'+newf+frag)
|
||||
changed = True
|
||||
if changed:
|
||||
open(path, 'wb').write(etree.tostring(tree.getroot(), encoding='UTF-8', xml_declaration=True))
|
||||
|
||||
|
||||
def find_html_files(opf):
|
||||
'''
|
||||
Find all HTML files referenced by `opf`.
|
||||
'''
|
||||
html_files = []
|
||||
for item in opf.itermanifest():
|
||||
if 'html' in item.get('media-type', '').lower():
|
||||
f = item.get('href').split('/')[-1]
|
||||
f2 = f.replace('&', '%26')
|
||||
if not os.path.exists(content(f)) and os.path.exists(content(f2)):
|
||||
f = f2
|
||||
item.set('href', item.get('href').replace('&', '%26'))
|
||||
if os.path.exists(content(f)):
|
||||
html_files.append(f)
|
||||
return html_files
|
||||
|
||||
|
||||
def split(pathtoopf, opts, stylesheet_map):
|
||||
pathtoopf = os.path.abspath(pathtoopf)
|
||||
opf = OPF(open(pathtoopf, 'rb'), os.path.dirname(pathtoopf))
|
||||
|
||||
with CurrentDir(os.path.dirname(pathtoopf)):
|
||||
opf = OPF(open(pathtoopf, 'rb'), os.path.dirname(pathtoopf))
|
||||
html_files = []
|
||||
for item in opf.itermanifest():
|
||||
if 'html' in item.get('media-type', '').lower():
|
||||
f = item.get('href').split('/')[-1]
|
||||
f2 = f.replace('&', '%26')
|
||||
if not os.path.exists(content(f)) and os.path.exists(content(f2)):
|
||||
f = f2
|
||||
item.set('href', item.get('href').replace('&', '%26'))
|
||||
html_files.append(f)
|
||||
changes = []
|
||||
always_remove = not opts.preserve_tag_structure
|
||||
for f in html_files:
|
||||
if os.stat(content(f)).st_size > opts.profile.flow_size:
|
||||
try:
|
||||
changes.append(Splitter(f, opts, stylesheet_map,
|
||||
always_remove=(always_remove or \
|
||||
os.stat(content(f)).st_size > 5*opts.profile.flow_size)))
|
||||
except (SplitError, RuntimeError):
|
||||
if not always_remove:
|
||||
changes.append(Splitter(f, opts, stylesheet_map, always_remove=True))
|
||||
else:
|
||||
raise
|
||||
changes[-1].fix_opf(opf)
|
||||
html_files = find_html_files(opf)
|
||||
changes = [Splitter(f, opts, stylesheet_map, opf) for f in html_files]
|
||||
changes = [c for c in changes if c.was_split]
|
||||
|
||||
open(pathtoopf, 'wb').write(opf.render())
|
||||
fix_content_links(html_files, changes, opts)
|
||||
|
||||
for item in opf.itermanifest():
|
||||
if item.get('media-type', '') == 'application/x-dtbncx+xml':
|
||||
fix_ncx(item.get('href'), changes)
|
||||
break
|
||||
|
||||
open(pathtoopf, 'wb').write(opf.render())
|
||||
|
||||
@ -314,10 +314,22 @@ def opf_traverse(opf_reader, verbose=0, encoding=None):
|
||||
|
||||
|
||||
convert_entities = functools.partial(entity_to_unicode, exceptions=['quot', 'apos', 'lt', 'gt', 'amp'])
|
||||
_span_pat = re.compile('<span.*?</span>', re.DOTALL|re.IGNORECASE)
|
||||
|
||||
def sanitize_head(match):
|
||||
x = match.group(1)
|
||||
x = _span_pat.sub('', x)
|
||||
return '<head>\n'+x+'\n</head>'
|
||||
|
||||
class PreProcessor(object):
|
||||
PREPROCESS = [
|
||||
# Some idiotic HTML generators (Frontpage I'm looking at you)
|
||||
# Put all sorts of crap into <head>. This messes up lxml
|
||||
(re.compile(r'<head[^>]*>(.*?)</head>', re.IGNORECASE|re.DOTALL),
|
||||
sanitize_head),
|
||||
# Convert all entities, since lxml doesn't handle them well
|
||||
(re.compile(r'&(\S+?);'), convert_entities),
|
||||
|
||||
]
|
||||
|
||||
# Fix pdftohtml markup
|
||||
@ -446,7 +458,10 @@ class Parser(PreProcessor, LoggingInterface):
|
||||
def parse_html(self):
|
||||
''' Create lxml ElementTree from HTML '''
|
||||
self.log_info('\tParsing '+os.sep.join(self.htmlfile.path.split(os.sep)[-3:]))
|
||||
if self.htmlfile.is_binary:
|
||||
raise ValueError('Not a valid HTML file: '+self.htmlfile.path)
|
||||
src = open(self.htmlfile.path, 'rb').read().decode(self.htmlfile.encoding, 'replace').strip()
|
||||
src = src.replace('\x00', '')
|
||||
src = self.preprocess(src)
|
||||
# lxml chokes on unicode input when it contains encoding declarations
|
||||
for pat in ENCODING_PATS:
|
||||
@ -527,6 +542,7 @@ class Processor(Parser):
|
||||
|
||||
LINKS_PATH = XPath('//a[@href]')
|
||||
PIXEL_PAT = re.compile(r'([-]?\d+|[-]?\d*\.\d+)px')
|
||||
PAGE_PAT = re.compile(r'@page[^{]*?{[^}]*?}')
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
Parser.__init__(self, *args, **kwargs)
|
||||
@ -696,7 +712,9 @@ class Processor(Parser):
|
||||
return ''
|
||||
return '%fpt'%(72 * val/dpi)
|
||||
|
||||
return cls.PIXEL_PAT.sub(rescale, css)
|
||||
css = cls.PIXEL_PAT.sub(rescale, css)
|
||||
css = cls.PAGE_PAT.sub('', css)
|
||||
return css
|
||||
|
||||
def extract_css(self, parsed_sheets):
|
||||
'''
|
||||
@ -732,7 +750,12 @@ class Processor(Parser):
|
||||
self.log_error('Failed to open stylesheet: %s'%file)
|
||||
else:
|
||||
try:
|
||||
parsed_sheets[file] = self.css_parser.parseString(css)
|
||||
try:
|
||||
parsed_sheets[file] = self.css_parser.parseString(css)
|
||||
except ValueError:
|
||||
parsed_sheets[file] = \
|
||||
self.css_parser.parseString(\
|
||||
css.decode('utf8', 'replace'))
|
||||
except:
|
||||
parsed_sheets[file] = css.decode('utf8', 'replace')
|
||||
self.log_warning('Failed to parse stylesheet: %s'%file)
|
||||
@ -762,10 +785,15 @@ class Processor(Parser):
|
||||
class_counter = 0
|
||||
for font in self.root.xpath('//font'):
|
||||
try:
|
||||
size = int(font.attrib.pop('size', '3'))
|
||||
size = font.attrib.pop('size', '3')
|
||||
except:
|
||||
size = 3
|
||||
setting = 'font-size: %d%%;'%int((float(size)/3) * 100)
|
||||
size = '3'
|
||||
if size and size.strip() and size.strip()[0] in ('+', '-'):
|
||||
size = 3 + float(size) # Hack assumes basefont=3
|
||||
try:
|
||||
setting = 'font-size: %d%%;'%int((float(size)/3) * 100)
|
||||
except ValueError:
|
||||
setting = ''
|
||||
face = font.attrib.pop('face', None)
|
||||
if face is not None:
|
||||
setting += 'font-face:%s;'%face
|
||||
@ -798,16 +826,17 @@ class Processor(Parser):
|
||||
|
||||
css = '\n'.join(['.%s {%s;}'%(cn, setting) for \
|
||||
setting, cn in cache.items()])
|
||||
sheet = self.css_parser.parseString(self.preprocess_css(css))
|
||||
sheet = self.css_parser.parseString(self.preprocess_css(css.replace(';;}', ';}')))
|
||||
for rule in sheet:
|
||||
self.stylesheet.add(rule)
|
||||
css = ''
|
||||
css += '\n\n' + 'body {margin-top: 0pt; margin-bottom: 0pt; margin-left: 0pt; margin-right: 0pt;}'
|
||||
css += '\n\n@page {margin-top: %fpt; margin-bottom: %fpt; margin-left: %fpt; margin-right: %fpt}'%(self.opts.margin_top, self.opts.margin_bottom, self.opts.margin_left, self.opts.margin_right)
|
||||
css += '\n\n@page {margin-top: %fpt; margin-bottom: %fpt; }'%(self.opts.margin_top, self.opts.margin_bottom)
|
||||
css += '\n\nbody {margin-left: %fpt; margin-right: %fpt}'%(self.opts.margin_left, self.opts.margin_right)
|
||||
# Workaround for anchor rendering bug in ADE
|
||||
css += '\n\na { color: inherit; text-decoration: inherit; cursor: default; }\na[href] { color: blue; text-decoration: underline; cursor:pointer; }'
|
||||
if self.opts.remove_paragraph_spacing:
|
||||
css += '\n\np {text-indent: 2em; margin-top:1pt; margin-bottom:1pt; padding:0pt; border:0pt;}'
|
||||
css += '\n\np {text-indent: 2em; margin-top:0pt; margin-bottom:0pt; padding:0pt; border:0pt;}'
|
||||
if self.opts.override_css:
|
||||
css += '\n\n' + self.opts.override_css
|
||||
self.override_css = self.css_parser.parseString(self.preprocess_css(css))
|
||||
@ -863,7 +892,7 @@ def option_parser():
|
||||
%prog [options] file.html|opf
|
||||
|
||||
Follow all links in an HTML file and collect them into the specified directory.
|
||||
Also collects any references resources like images, stylesheets, scripts, etc.
|
||||
Also collects any resources like images, stylesheets, scripts, etc.
|
||||
If an OPF file is specified instead, the list of files in its <spine> element
|
||||
is used.
|
||||
'''))
|
||||
@ -1042,11 +1071,12 @@ def main(args=sys.argv):
|
||||
|
||||
return 0
|
||||
|
||||
def gui_main(htmlfile):
|
||||
def gui_main(htmlfile, pt=None):
|
||||
'''
|
||||
Convenience wrapper for use in recursively importing HTML files.
|
||||
'''
|
||||
pt = PersistentTemporaryFile('_html2oeb_gui.oeb.zip')
|
||||
if pt is None:
|
||||
pt = PersistentTemporaryFile('_html2oeb_gui.oeb.zip')
|
||||
pt.close()
|
||||
opts = '''
|
||||
pretty_print = True
|
||||
|
||||
59
src/calibre/ebooks/lit/from_any.py
Normal file
59
src/calibre/ebooks/lit/from_any.py
Normal file
@ -0,0 +1,59 @@
|
||||
from __future__ import with_statement
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
|
||||
'''
|
||||
Convert any ebook format to LIT.
|
||||
'''
|
||||
|
||||
import sys, os, glob, logging
|
||||
|
||||
from calibre.ebooks.epub.from_any import any2epub, formats, USAGE
|
||||
from calibre.ebooks.epub import config as common_config
|
||||
from calibre.ptempfile import TemporaryDirectory
|
||||
from calibre.ebooks.lit.writer import oeb2lit
|
||||
|
||||
def config(defaults=None):
|
||||
c = common_config(defaults=defaults, name='lit')
|
||||
return c
|
||||
|
||||
def option_parser(usage=USAGE):
|
||||
return config().option_parser(usage=usage%('LIT', formats()))
|
||||
|
||||
def any2lit(opts, path):
|
||||
ext = os.path.splitext(path)[1]
|
||||
if not ext:
|
||||
raise ValueError('Unknown file type: '+path)
|
||||
ext = ext.lower()[1:]
|
||||
|
||||
if opts.output is None:
|
||||
opts.output = os.path.splitext(os.path.basename(path))[0]+'.lit'
|
||||
|
||||
opts.output = os.path.abspath(opts.output)
|
||||
orig_output = opts.output
|
||||
|
||||
with TemporaryDirectory('_any2lit') as tdir:
|
||||
oebdir = os.path.join(tdir, 'oeb')
|
||||
os.mkdir(oebdir)
|
||||
opts.output = os.path.join(tdir, 'dummy.epub')
|
||||
opts.profile = 'None'
|
||||
any2epub(opts, path, create_epub=False, oeb_cover=True, extract_to=oebdir)
|
||||
opf = glob.glob(os.path.join(oebdir, '*.opf'))[0]
|
||||
opts.output = orig_output
|
||||
logging.getLogger('html2epub').info(_('Creating LIT file from EPUB...'))
|
||||
oeb2lit(opts, opf)
|
||||
|
||||
|
||||
def main(args=sys.argv):
|
||||
parser = option_parser()
|
||||
opts, args = parser.parse_args(args)
|
||||
if len(args) < 2:
|
||||
parser.print_help()
|
||||
print 'No input file specified.'
|
||||
return 1
|
||||
any2lit(opts, args[1])
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
426
src/calibre/ebooks/lit/html.css
Normal file
426
src/calibre/ebooks/lit/html.css
Normal file
@ -0,0 +1,426 @@
|
||||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
*
|
||||
* The contents of this file are subject to the Mozilla Public License Version
|
||||
* 1.1 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
* http://www.mozilla.org/MPL/
|
||||
*
|
||||
* Software distributed under the License is distributed on an "AS IS" basis,
|
||||
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
* for the specific language governing rights and limitations under the
|
||||
* License.
|
||||
*
|
||||
* The Original Code is mozilla.org code.
|
||||
*
|
||||
* The Initial Developer of the Original Code is
|
||||
* Netscape Communications Corporation.
|
||||
* Portions created by the Initial Developer are Copyright (C) 1998
|
||||
* the Initial Developer. All Rights Reserved.
|
||||
*
|
||||
* Contributor(s):
|
||||
* Blake Ross <BlakeR1234@aol.com>
|
||||
*
|
||||
* Alternatively, the contents of this file may be used under the terms of
|
||||
* either of the GNU General Public License Version 2 or later (the "GPL"),
|
||||
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
* in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
* of those above. If you wish to allow use of your version of this file only
|
||||
* under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
* use your version of this file under the terms of the MPL, indicate your
|
||||
* decision by deleting the provisions above and replace them with the notice
|
||||
* and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
* the provisions above, a recipient may use your version of this file under
|
||||
* the terms of any one of the MPL, the GPL or the LGPL.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
@namespace url(http://www.w3.org/1999/xhtml); /* set default namespace to HTML */
|
||||
|
||||
/* blocks */
|
||||
|
||||
html, div, map, dt, isindex, form {
|
||||
display: block;
|
||||
}
|
||||
|
||||
body {
|
||||
display: block;
|
||||
margin: 8px;
|
||||
}
|
||||
|
||||
p, dl, multicol {
|
||||
display: block;
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
dd {
|
||||
display: block;
|
||||
}
|
||||
|
||||
blockquote {
|
||||
display: block;
|
||||
margin: 1em 40px;
|
||||
}
|
||||
|
||||
address {
|
||||
display: block;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
center {
|
||||
display: block;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
blockquote[type=cite] {
|
||||
display: block;
|
||||
margin: 1em 0px;
|
||||
border-color: blue;
|
||||
border-width: thin;
|
||||
}
|
||||
|
||||
span[_moz_quote=true] {
|
||||
color: blue;
|
||||
}
|
||||
|
||||
pre[_moz_quote=true] {
|
||||
color: blue;
|
||||
}
|
||||
|
||||
h1 {
|
||||
display: block;
|
||||
font-size: 2em;
|
||||
font-weight: bold;
|
||||
margin: .67em 0;
|
||||
}
|
||||
|
||||
h2 {
|
||||
display: block;
|
||||
font-size: 1.5em;
|
||||
font-weight: bold;
|
||||
margin: .83em 0;
|
||||
}
|
||||
|
||||
h3 {
|
||||
display: block;
|
||||
font-size: 1.17em;
|
||||
font-weight: bold;
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
h4 {
|
||||
display: block;
|
||||
font-weight: bold;
|
||||
margin: 1.33em 0;
|
||||
}
|
||||
|
||||
h5 {
|
||||
display: block;
|
||||
font-size: 0.83em;
|
||||
font-weight: bold;
|
||||
margin: 1.67em 0;
|
||||
}
|
||||
|
||||
h6 {
|
||||
display: block;
|
||||
font-size: 0.67em;
|
||||
font-weight: bold;
|
||||
margin: 2.33em 0;
|
||||
}
|
||||
|
||||
listing {
|
||||
display: block;
|
||||
font-family: monospace;
|
||||
font-size: medium;
|
||||
white-space: pre;
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
xmp, pre, plaintext {
|
||||
display: block;
|
||||
font-family: monospace;
|
||||
white-space: pre;
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
/* tables */
|
||||
|
||||
table {
|
||||
display: table;
|
||||
border-spacing: 2px;
|
||||
border-collapse: separate;
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
text-indent: 0;
|
||||
}
|
||||
|
||||
table[align="left"] {
|
||||
float: left;
|
||||
}
|
||||
|
||||
table[align="right"] {
|
||||
float: right;
|
||||
}
|
||||
|
||||
table[rules]:not([rules="none"]) {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
/* caption inherits from table not table-outer */
|
||||
caption {
|
||||
display: table-caption;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
table[align="center"] > caption {
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
table[align="center"] > caption[align="left"] {
|
||||
margin-right: 0;
|
||||
}
|
||||
|
||||
table[align="center"] > caption[align="right"] {
|
||||
margin-left: 0;
|
||||
}
|
||||
|
||||
tr {
|
||||
display: table-row;
|
||||
vertical-align: inherit;
|
||||
}
|
||||
|
||||
col {
|
||||
display: table-column;
|
||||
}
|
||||
|
||||
colgroup {
|
||||
display: table-column-group;
|
||||
}
|
||||
|
||||
tbody {
|
||||
display: table-row-group;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
thead {
|
||||
display: table-header-group;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
tfoot {
|
||||
display: table-footer-group;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
/* for XHTML tables without tbody */
|
||||
table > tr {
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
td {
|
||||
display: table-cell;
|
||||
vertical-align: inherit;
|
||||
text-align: inherit;
|
||||
padding: 1px;
|
||||
}
|
||||
|
||||
th {
|
||||
display: table-cell;
|
||||
vertical-align: inherit;
|
||||
font-weight: bold;
|
||||
padding: 1px;
|
||||
}
|
||||
|
||||
/* inlines */
|
||||
|
||||
q:before {
|
||||
content: open-quote;
|
||||
}
|
||||
|
||||
q:after {
|
||||
content: close-quote;
|
||||
}
|
||||
|
||||
b, strong {
|
||||
font-weight: bolder;
|
||||
}
|
||||
|
||||
i, cite, em, var, dfn {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
tt, code, kbd, samp {
|
||||
font-family: monospace;
|
||||
}
|
||||
|
||||
u, ins {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
s, strike, del {
|
||||
text-decoration: line-through;
|
||||
}
|
||||
|
||||
blink {
|
||||
text-decoration: blink;
|
||||
}
|
||||
|
||||
big {
|
||||
font-size: larger;
|
||||
}
|
||||
|
||||
small {
|
||||
font-size: smaller;
|
||||
}
|
||||
|
||||
sub {
|
||||
vertical-align: sub;
|
||||
font-size: smaller;
|
||||
line-height: normal;
|
||||
}
|
||||
|
||||
sup {
|
||||
vertical-align: super;
|
||||
font-size: smaller;
|
||||
line-height: normal;
|
||||
}
|
||||
|
||||
nobr {
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
/* titles */
|
||||
abbr[title], acronym[title] {
|
||||
border-bottom: dotted 1px;
|
||||
}
|
||||
|
||||
/* lists */
|
||||
|
||||
ul, menu, dir {
|
||||
display: block;
|
||||
list-style-type: disc;
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
ol {
|
||||
display: block;
|
||||
list-style-type: decimal;
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
li {
|
||||
display: list-item;
|
||||
}
|
||||
|
||||
/* nested lists have no top/bottom margins */
|
||||
ul ul, ul ol, ul dir, ul menu, ul dl,
|
||||
ol ul, ol ol, ol dir, ol menu, ol dl,
|
||||
dir ul, dir ol, dir dir, dir menu, dir dl,
|
||||
menu ul, menu ol, menu dir, menu menu, menu dl,
|
||||
dl ul, dl ol, dl dir, dl menu, dl dl {
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
/* 2 deep unordered lists use a circle */
|
||||
ol ul, ul ul, menu ul, dir ul,
|
||||
ol menu, ul menu, menu menu, dir menu,
|
||||
ol dir, ul dir, menu dir, dir dir {
|
||||
list-style-type: circle;
|
||||
}
|
||||
|
||||
/* 3 deep (or more) unordered lists use a square */
|
||||
ol ol ul, ol ul ul, ol menu ul, ol dir ul,
|
||||
ol ol menu, ol ul menu, ol menu menu, ol dir menu,
|
||||
ol ol dir, ol ul dir, ol menu dir, ol dir dir,
|
||||
ul ol ul, ul ul ul, ul menu ul, ul dir ul,
|
||||
ul ol menu, ul ul menu, ul menu menu, ul dir menu,
|
||||
ul ol dir, ul ul dir, ul menu dir, ul dir dir,
|
||||
menu ol ul, menu ul ul, menu menu ul, menu dir ul,
|
||||
menu ol menu, menu ul menu, menu menu menu, menu dir menu,
|
||||
menu ol dir, menu ul dir, menu menu dir, menu dir dir,
|
||||
dir ol ul, dir ul ul, dir menu ul, dir dir ul,
|
||||
dir ol menu, dir ul menu, dir menu menu, dir dir menu,
|
||||
dir ol dir, dir ul dir, dir menu dir, dir dir dir {
|
||||
list-style-type: square;
|
||||
}
|
||||
|
||||
|
||||
/* leafs */
|
||||
|
||||
/* <hr> noshade and color attributes are handled completely by
|
||||
* the nsHTMLHRElement attribute mapping code
|
||||
*/
|
||||
hr {
|
||||
display: block;
|
||||
height: 2px;
|
||||
border: 1px inset;
|
||||
margin: 0.5em auto 0.5em auto;
|
||||
color: gray;
|
||||
}
|
||||
|
||||
hr[size="1"] {
|
||||
border-style: solid none none none;
|
||||
}
|
||||
|
||||
img[usemap], object[usemap] {
|
||||
color: blue;
|
||||
}
|
||||
|
||||
frameset {
|
||||
display: block ! important;
|
||||
position: static ! important;
|
||||
float: none ! important;
|
||||
border: none ! important;
|
||||
}
|
||||
|
||||
frame {
|
||||
border: none ! important;
|
||||
}
|
||||
|
||||
iframe {
|
||||
border: 2px inset;
|
||||
}
|
||||
|
||||
noframes {
|
||||
display: none;
|
||||
}
|
||||
|
||||
spacer {
|
||||
position: static ! important;
|
||||
float: none ! important;
|
||||
}
|
||||
|
||||
/* focusable content: anything w/ tabindex >=0 is focusable */
|
||||
abbr:focus, acronym:focus, address:focus, applet:focus, b:focus,
|
||||
base:focus, big:focus, blockquote:focus, br:focus, canvas:focus, caption:focus,
|
||||
center:focus, cite:focus, code:focus, col:focus, colgroup:focus, dd:focus,
|
||||
del:focus, dfn:focus, dir:focus, div:focus, dl:focus, dt:focus, em:focus,
|
||||
fieldset:focus, font:focus, form:focus, h1:focus, h2:focus, h3:focus, h4:focus,
|
||||
h5:focus, h6:focus, hr:focus, i:focus, img:focus, ins:focus,
|
||||
kbd:focus, label:focus, legend:focus, li:focus, link:focus, menu:focus,
|
||||
object:focus, ol:focus, p:focus, pre:focus, q:focus, s:focus, samp:focus,
|
||||
small:focus, span:focus, strike:focus, strong:focus, sub:focus, sup:focus,
|
||||
table:focus, tbody:focus, td:focus, tfoot:focus, th:focus, thead:focus,
|
||||
tr:focus, tt:focus, u:focus, ul:focus, var:focus {
|
||||
/* Don't specify the outline-color, we should always use initial value. */
|
||||
outline: 1px dotted;
|
||||
}
|
||||
|
||||
/* hidden elements */
|
||||
area, base, basefont, head, meta, script, style, title,
|
||||
noembed, param, link {
|
||||
display: none;
|
||||
}
|
||||
|
||||
/* Page breaks at body tags, to help out with LIT-generation */
|
||||
body {
|
||||
page-break-before: always;
|
||||
}
|
||||
|
||||
/* Explicit line-breaks are blocks, sure... */
|
||||
br {
|
||||
display: block;
|
||||
}
|
||||
|
||||
31
src/calibre/ebooks/lit/lzx.py
Normal file
31
src/calibre/ebooks/lit/lzx.py
Normal file
@ -0,0 +1,31 @@
|
||||
'''
|
||||
LZX compression/decompression wrapper.
|
||||
'''
|
||||
from __future__ import with_statement
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Marshall T. Vandegrift <llasram@gmail.com>'
|
||||
|
||||
import sys
|
||||
from calibre import plugins
|
||||
|
||||
_lzx, _error = plugins['lzx']
|
||||
if _lzx is None:
|
||||
raise RuntimeError('Failed to load the lzx plugin: %s' % _error)
|
||||
|
||||
__all__ = ['Compressor', 'Decompressor', 'LZXError']
|
||||
|
||||
LZXError = _lzx.LZXError
|
||||
Compressor = _lzx.Compressor
|
||||
|
||||
class Decompressor(object):
|
||||
def __init__(self, wbits):
|
||||
self.wbits = wbits
|
||||
self.blocksize = 1 << wbits
|
||||
_lzx.init(wbits)
|
||||
|
||||
def decompress(self, data, outlen):
|
||||
return _lzx.decompress(data, outlen)
|
||||
|
||||
def reset(self):
|
||||
return _lzx.reset()
|
||||
@ -4,6 +4,9 @@ Modified version of SHA-1 used in Microsoft LIT files.
|
||||
Adapted from the PyPy pure-Python SHA-1 implementation.
|
||||
"""
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Marshall T. Vandegrift <llasram@gmail.com>'
|
||||
|
||||
import struct, copy
|
||||
|
||||
# ======================================================================
|
||||
|
||||
770
src/calibre/ebooks/lit/oeb.py
Normal file
770
src/calibre/ebooks/lit/oeb.py
Normal file
@ -0,0 +1,770 @@
|
||||
'''
|
||||
Basic support for manipulating OEB 1.x/2.0 content and metadata.
|
||||
'''
|
||||
from __future__ import with_statement
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Marshall T. Vandegrift <llasram@gmail.com>'
|
||||
|
||||
import os
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from types import StringTypes
|
||||
from itertools import izip, count
|
||||
from urlparse import urldefrag, urlparse, urlunparse
|
||||
from urllib import unquote as urlunquote
|
||||
import logging
|
||||
from lxml import etree
|
||||
from calibre import LoggingInterface
|
||||
|
||||
XML_PARSER = etree.XMLParser(recover=True, resolve_entities=False)
|
||||
XML_NS = 'http://www.w3.org/XML/1998/namespace'
|
||||
XHTML_NS = 'http://www.w3.org/1999/xhtml'
|
||||
OPF1_NS = 'http://openebook.org/namespaces/oeb-package/1.0/'
|
||||
OPF2_NS = 'http://www.idpf.org/2007/opf'
|
||||
DC09_NS = 'http://purl.org/metadata/dublin_core'
|
||||
DC10_NS = 'http://purl.org/dc/elements/1.0/'
|
||||
DC11_NS = 'http://purl.org/dc/elements/1.1/'
|
||||
XSI_NS = 'http://www.w3.org/2001/XMLSchema-instance'
|
||||
DCTERMS_NS = 'http://purl.org/dc/terms/'
|
||||
NCX_NS = 'http://www.daisy.org/z3986/2005/ncx/'
|
||||
XPNSMAP = {'h': XHTML_NS, 'o1': OPF1_NS, 'o2': OPF2_NS,
|
||||
'd09': DC09_NS, 'd10': DC10_NS, 'd11': DC11_NS,
|
||||
'xsi': XSI_NS, 'dt': DCTERMS_NS, 'ncx': NCX_NS}
|
||||
|
||||
def XML(name): return '{%s}%s' % (XML_NS, name)
|
||||
def XHTML(name): return '{%s}%s' % (XHTML_NS, name)
|
||||
def OPF(name): return '{%s}%s' % (OPF2_NS, name)
|
||||
def DC(name): return '{%s}%s' % (DC11_NS, name)
|
||||
def NCX(name): return '{%s}%s' % (NCX_NS, name)
|
||||
|
||||
XHTML_MIME = 'application/xhtml+xml'
|
||||
CSS_MIME = 'text/css'
|
||||
NCX_MIME = 'application/x-dtbncx+xml'
|
||||
OPF_MIME = 'application/oebps-package+xml'
|
||||
OEB_DOC_MIME = 'text/x-oeb1-document'
|
||||
OEB_CSS_MIME = 'text/x-oeb1-css'
|
||||
|
||||
OEB_STYLES = set([CSS_MIME, OEB_CSS_MIME, 'text/x-oeb-css'])
|
||||
OEB_DOCS = set([XHTML_MIME, 'text/html', OEB_DOC_MIME, 'text/x-oeb-document'])
|
||||
|
||||
|
||||
def element(parent, *args, **kwargs):
|
||||
if parent is not None:
|
||||
return etree.SubElement(parent, *args, **kwargs)
|
||||
return etree.Element(*args, **kwargs)
|
||||
|
||||
def namespace(name):
|
||||
if '}' in name:
|
||||
return name.split('}', 1)[0][1:]
|
||||
return ''
|
||||
|
||||
def barename(name):
|
||||
if '}' in name:
|
||||
return name.split('}', 1)[1]
|
||||
return name
|
||||
|
||||
def xpath(elem, expr):
|
||||
return elem.xpath(expr, namespaces=XPNSMAP)
|
||||
|
||||
URL_UNSAFE = r"""`!@#$%^&*[](){}?+=;:'",<>\| """
|
||||
def urlquote(href):
|
||||
result = []
|
||||
for char in href:
|
||||
if char in URL_UNSAFE:
|
||||
char = "%%%02x" % ord(char)
|
||||
result.append(char)
|
||||
return ''.join(result)
|
||||
|
||||
def urlnormalize(href):
|
||||
parts = urlparse(href)
|
||||
parts = (part.replace('\\', '/') for part in parts)
|
||||
parts = (urlunquote(part) for part in parts)
|
||||
parts = (urlquote(part) for part in parts)
|
||||
return urlunparse(parts)
|
||||
|
||||
|
||||
class FauxLogger(object):
|
||||
def __getattr__(self, name):
|
||||
return self
|
||||
def __call__(self, message):
|
||||
print message
|
||||
|
||||
|
||||
class AbstractContainer(object):
|
||||
def read_xml(self, path):
|
||||
return etree.fromstring(
|
||||
self.read(path), parser=XML_PARSER,
|
||||
base_url=os.path.dirname(path))
|
||||
|
||||
class DirContainer(AbstractContainer):
|
||||
def __init__(self, rootdir):
|
||||
self.rootdir = rootdir
|
||||
|
||||
def read(self, path):
|
||||
path = os.path.join(self.rootdir, path)
|
||||
with open(urlunquote(path), 'rb') as f:
|
||||
return f.read()
|
||||
|
||||
def write(self, path, data):
|
||||
path = os.path.join(self.rootdir, path)
|
||||
with open(urlunquote(path), 'wb') as f:
|
||||
return f.write(data)
|
||||
|
||||
def exists(self, path):
|
||||
path = os.path.join(self.rootdir, path)
|
||||
return os.path.isfile(path)
|
||||
|
||||
|
||||
class Metadata(object):
|
||||
TERMS = set(['contributor', 'coverage', 'creator', 'date', 'description',
|
||||
'format', 'identifier', 'language', 'publisher', 'relation',
|
||||
'rights', 'source', 'subject', 'title', 'type'])
|
||||
OPF1_NSMAP = {'dc': DC11_NS, 'oebpackage': OPF1_NS}
|
||||
OPF2_NSMAP = {'opf': OPF2_NS, 'dc': DC11_NS, 'dcterms': DCTERMS_NS,
|
||||
'xsi': XSI_NS}
|
||||
|
||||
class Item(object):
|
||||
def __init__(self, term, value, fq_attrib={}):
|
||||
self.fq_attrib = dict(fq_attrib)
|
||||
if term == OPF('meta') and not value:
|
||||
term = self.fq_attrib.pop('name')
|
||||
value = self.fq_attrib.pop('content')
|
||||
elif term in Metadata.TERMS and not namespace(term):
|
||||
term = DC(term)
|
||||
self.term = term
|
||||
self.value = value
|
||||
self.attrib = attrib = {}
|
||||
for fq_attr in fq_attrib:
|
||||
attr = barename(fq_attr)
|
||||
attrib[attr] = fq_attrib[fq_attr]
|
||||
|
||||
def __getattr__(self, name):
|
||||
name = name.replace('_', '-')
|
||||
try:
|
||||
return self.attrib[name]
|
||||
except KeyError:
|
||||
raise AttributeError(
|
||||
'%r object has no attribute %r' \
|
||||
% (self.__class__.__name__, name))
|
||||
|
||||
def __repr__(self):
|
||||
return 'Item(term=%r, value=%r, attrib=%r)' \
|
||||
% (barename(self.term), self.value, self.attrib)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.value)
|
||||
|
||||
def __unicode__(self):
|
||||
return unicode(self.value)
|
||||
|
||||
def to_opf1(self, dcmeta=None, xmeta=None):
|
||||
if namespace(self.term) == DC11_NS:
|
||||
name = DC(barename(self.term).title())
|
||||
elem = element(dcmeta, name, attrib=self.attrib)
|
||||
elem.text = self.value
|
||||
else:
|
||||
elem = element(xmeta, 'meta', attrib=self.attrib)
|
||||
elem.attrib['name'] = self.term
|
||||
elem.attrib['content'] = self.value
|
||||
return elem
|
||||
|
||||
def to_opf2(self, parent=None):
|
||||
if namespace(self.term) == DC11_NS:
|
||||
elem = element(parent, self.term, attrib=self.fq_attrib)
|
||||
elem.text = self.value
|
||||
else:
|
||||
elem = element(parent, OPF('meta'), attrib=self.fq_attrib)
|
||||
elem.attrib['name'] = self.term
|
||||
elem.attrib['content'] = self.value
|
||||
return elem
|
||||
|
||||
def __init__(self, oeb):
|
||||
self.oeb = oeb
|
||||
self.items = defaultdict(list)
|
||||
|
||||
def add(self, term, value, attrib={}):
|
||||
item = self.Item(term, value, attrib)
|
||||
items = self.items[barename(item.term)]
|
||||
items.append(item)
|
||||
return item
|
||||
|
||||
def iterkeys(self):
|
||||
for key in self.items:
|
||||
yield key
|
||||
__iter__ = iterkeys
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.items[key]
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self.items
|
||||
|
||||
def __getattr__(self, term):
|
||||
return self.items[term]
|
||||
|
||||
def to_opf1(self, parent=None):
|
||||
elem = element(parent, 'metadata')
|
||||
dcmeta = element(elem, 'dc-metadata', nsmap=self.OPF1_NSMAP)
|
||||
xmeta = element(elem, 'x-metadata')
|
||||
for term in self.items:
|
||||
for item in self.items[term]:
|
||||
item.to_opf1(dcmeta, xmeta)
|
||||
if 'ms-chaptertour' not in self.items:
|
||||
chaptertour = self.Item('ms-chaptertour', 'chaptertour')
|
||||
chaptertour.to_opf1(dcmeta, xmeta)
|
||||
return elem
|
||||
|
||||
def to_opf2(self, parent=None):
|
||||
elem = element(parent, OPF('metadata'), nsmap=self.OPF2_NSMAP)
|
||||
for term in self.items:
|
||||
for item in self.items[term]:
|
||||
item.to_opf2(elem)
|
||||
return elem
|
||||
|
||||
|
||||
class Manifest(object):
|
||||
class Item(object):
|
||||
def __init__(self, id, href, media_type, fallback=None, loader=str):
|
||||
self.id = id
|
||||
self.href = self.path = urlnormalize(href)
|
||||
self.media_type = media_type
|
||||
self.fallback = fallback
|
||||
self.spine_position = None
|
||||
self.linear = True
|
||||
self._loader = loader
|
||||
self._data = None
|
||||
|
||||
def __repr__(self):
|
||||
return 'Item(id=%r, href=%r, media_type=%r)' \
|
||||
% (self.id, self.href, self.media_type)
|
||||
|
||||
def data():
|
||||
def fget(self):
|
||||
if self._data:
|
||||
return self._data
|
||||
data = self._loader(self.href)
|
||||
if self.media_type == XHTML_MIME:
|
||||
data = etree.fromstring(data, parser=XML_PARSER)
|
||||
if namespace(data.tag) != XHTML_NS:
|
||||
data.attrib['xmlns'] = XHTML_NS
|
||||
data = etree.tostring(data)
|
||||
data = etree.fromstring(data, parser=XML_PARSER)
|
||||
elif self.media_type.startswith('application/') \
|
||||
and self.media_type.endswith('+xml'):
|
||||
data = etree.fromstring(data, parser=XML_PARSER)
|
||||
return data
|
||||
def fset(self, value):
|
||||
self._data = value
|
||||
def fdel(self):
|
||||
self._data = None
|
||||
return property(fget, fset, fdel)
|
||||
data = data()
|
||||
|
||||
def __cmp__(self, other):
|
||||
result = cmp(self.spine_position, other.spine_position)
|
||||
if result != 0:
|
||||
return result
|
||||
return cmp(self.id, other.id)
|
||||
|
||||
def __init__(self, oeb):
|
||||
self.oeb = oeb
|
||||
self.items = {}
|
||||
self.hrefs = {}
|
||||
|
||||
def add(self, id, href, media_type, fallback=None):
|
||||
item = self.Item(
|
||||
id, href, media_type, fallback, self.oeb.container.read)
|
||||
self.items[item.id] = item
|
||||
self.hrefs[item.href] = item
|
||||
return item
|
||||
|
||||
def remove(self, id):
|
||||
href = self.items[id].href
|
||||
del self.items[id]
|
||||
del self.hrefs[href]
|
||||
|
||||
def __iter__(self):
|
||||
for id in self.items:
|
||||
yield id
|
||||
|
||||
def __getitem__(self, id):
|
||||
return self.items[id]
|
||||
|
||||
def values(self):
|
||||
for item in self.items.values():
|
||||
yield item
|
||||
|
||||
def items(self):
|
||||
for id, item in self.refs.items():
|
||||
yield id, items
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self.items
|
||||
|
||||
def to_opf1(self, parent=None):
|
||||
elem = element(parent, 'manifest')
|
||||
for item in self.items.values():
|
||||
media_type = item.media_type
|
||||
if media_type == XHTML_MIME:
|
||||
media_type = OEB_DOC_MIME
|
||||
elif media_type == CSS_MIME:
|
||||
media_type = OEB_CSS_MIME
|
||||
attrib = {'id': item.id, 'href': item.href,
|
||||
'media-type': media_type}
|
||||
if item.fallback:
|
||||
attrib['fallback'] = item.fallback
|
||||
element(elem, 'item', attrib=attrib)
|
||||
return elem
|
||||
|
||||
def to_opf2(self, parent=None):
|
||||
elem = element(parent, OPF('manifest'))
|
||||
for item in self.items.values():
|
||||
attrib = {'id': item.id, 'href': item.href,
|
||||
'media-type': item.media_type}
|
||||
if item.fallback:
|
||||
attrib['fallback'] = item.fallback
|
||||
element(elem, OPF('item'), attrib=attrib)
|
||||
return elem
|
||||
|
||||
|
||||
class Spine(object):
|
||||
def __init__(self, oeb):
|
||||
self.oeb = oeb
|
||||
self.items = []
|
||||
|
||||
def add(self, item, linear):
|
||||
if isinstance(linear, StringTypes):
|
||||
linear = linear.lower()
|
||||
if linear is None or linear in ('yes', 'true'):
|
||||
linear = True
|
||||
elif linear in ('no', 'false'):
|
||||
linear = False
|
||||
item.linear = linear
|
||||
item.spine_position = len(self.items)
|
||||
self.items.append(item)
|
||||
return item
|
||||
|
||||
def __iter__(self):
|
||||
for item in self.items:
|
||||
yield item
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self.items[index]
|
||||
|
||||
def __len__(self):
|
||||
return len(self.items)
|
||||
|
||||
def __contains__(self, item):
|
||||
return (item in self.items)
|
||||
|
||||
def to_opf1(self, parent=None):
|
||||
elem = element(parent, 'spine')
|
||||
for item in self.items:
|
||||
if item.linear:
|
||||
element(elem, 'itemref', attrib={'idref': item.id})
|
||||
return elem
|
||||
|
||||
def to_opf2(self, parent=None):
|
||||
elem = element(parent, OPF('spine'))
|
||||
for item in self.items:
|
||||
attrib = {'idref': item.id}
|
||||
if not item.linear:
|
||||
attrib['linear'] = 'no'
|
||||
element(elem, OPF('itemref'), attrib=attrib)
|
||||
return elem
|
||||
|
||||
|
||||
class Guide(object):
|
||||
class Reference(object):
|
||||
def __init__(self, type, title, href):
|
||||
self.type = type
|
||||
self.title = title
|
||||
self.href = urlnormalize(href)
|
||||
|
||||
def __repr__(self):
|
||||
return 'Reference(type=%r, title=%r, href=%r)' \
|
||||
% (self.type, self.title, self.href)
|
||||
|
||||
def __init__(self, oeb):
|
||||
self.oeb = oeb
|
||||
self.refs = {}
|
||||
|
||||
def add(self, type, title, href):
|
||||
ref = self.Reference(type, title, href)
|
||||
self.refs[type] = ref
|
||||
return ref
|
||||
|
||||
def by_type(self, type):
|
||||
return self.ref_types[type]
|
||||
|
||||
def iterkeys(self):
|
||||
for type in self.refs:
|
||||
yield type
|
||||
__iter__ = iterkeys
|
||||
|
||||
def values(self):
|
||||
for ref in self.refs.values():
|
||||
yield ref
|
||||
|
||||
def items(self):
|
||||
for type, ref in self.refs.items():
|
||||
yield type, ref
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self.refs[index]
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self.refs
|
||||
|
||||
def to_opf1(self, parent=None):
|
||||
elem = element(parent, 'guide')
|
||||
for ref in self.refs.values():
|
||||
attrib = {'type': ref.type, 'href': ref.href}
|
||||
if ref.title:
|
||||
attrib['title'] = ref.title
|
||||
element(elem, 'reference', attrib=attrib)
|
||||
return elem
|
||||
|
||||
def to_opf2(self, parent=None):
|
||||
elem = element(parent, OPF('guide'))
|
||||
for ref in self.refs.values():
|
||||
attrib = {'type': ref.type, 'href': ref.href}
|
||||
if ref.title:
|
||||
attrib['title'] = ref.title
|
||||
element(elem, OPF('reference'), attrib=attrib)
|
||||
return elem
|
||||
|
||||
|
||||
class TOC(object):
|
||||
def __init__(self, title=None, href=None, klass=None, id=None):
|
||||
self.title = title
|
||||
self.href = urlnormalize(href) if href else href
|
||||
self.klass = klass
|
||||
self.id = id
|
||||
self.nodes = []
|
||||
|
||||
def add(self, title, href, klass=None, id=None):
|
||||
node = TOC(title, href, klass, id)
|
||||
self.nodes.append(node)
|
||||
return node
|
||||
|
||||
def __iter__(self):
|
||||
for node in self.nodes:
|
||||
yield node
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self.nodes[index]
|
||||
|
||||
def depth(self, level=0):
|
||||
if self.nodes:
|
||||
return self.nodes[0].depth(level+1)
|
||||
return level
|
||||
|
||||
def to_opf1(self, tour):
|
||||
for node in self.nodes:
|
||||
element(tour, 'site', attrib={
|
||||
'title': node.title, 'href': node.href})
|
||||
node.to_opf1(tour)
|
||||
return tour
|
||||
|
||||
def to_ncx(self, parent, playorder=None, depth=1):
|
||||
if not playorder: playorder = [0]
|
||||
for node in self.nodes:
|
||||
playorder[0] += 1
|
||||
point = etree.SubElement(parent,
|
||||
NCX('navPoint'), attrib={'playOrder': str(playorder[0])})
|
||||
if self.klass:
|
||||
point.attrib['class'] = node.klass
|
||||
if self.id:
|
||||
point.attrib['id'] = node.id
|
||||
label = etree.SubElement(point, NCX('navLabel'))
|
||||
etree.SubElement(label, NCX('text')).text = node.title
|
||||
href = node.href if depth > 1 else urldefrag(node.href)[0]
|
||||
child = etree.SubElement(point,
|
||||
NCX('content'), attrib={'src': href})
|
||||
node.to_ncx(point, playorder, depth+1)
|
||||
return parent
|
||||
|
||||
|
||||
class OEBBook(object):
|
||||
def __init__(self, opfpath, container=None, logger=FauxLogger()):
|
||||
if not container:
|
||||
container = DirContainer(os.path.dirname(opfpath))
|
||||
opfpath = os.path.basename(opfpath)
|
||||
self.container = container
|
||||
self.logger = logger
|
||||
opf = self._read_opf(opfpath)
|
||||
self._all_from_opf(opf)
|
||||
|
||||
def _convert_opf1(self, opf):
|
||||
nroot = etree.Element(OPF('package'),
|
||||
nsmap={None: OPF2_NS}, version="2.0", **dict(opf.attrib))
|
||||
metadata = etree.SubElement(nroot, OPF('metadata'),
|
||||
nsmap={'opf': OPF2_NS, 'dc': DC11_NS,
|
||||
'xsi': XSI_NS, 'dcterms': DCTERMS_NS})
|
||||
for prefix in ('d11', 'd10', 'd09'):
|
||||
elements = xpath(opf, 'metadata/dc-metadata/%s:*' % prefix)
|
||||
if elements: break
|
||||
for element in elements:
|
||||
if not element.text: continue
|
||||
tag = barename(element.tag).lower()
|
||||
element.tag = '{%s}%s' % (DC11_NS, tag)
|
||||
for name in element.attrib:
|
||||
if name in ('role', 'file-as', 'scheme'):
|
||||
nsname = '{%s}%s' % (OPF2_NS, name)
|
||||
element.attrib[nsname] = element.attrib[name]
|
||||
del element.attrib[name]
|
||||
metadata.append(element)
|
||||
for element in opf.xpath('metadata/x-metadata/meta'):
|
||||
metadata.append(element)
|
||||
for item in opf.xpath('manifest/item'):
|
||||
media_type = item.attrib['media-type'].lower()
|
||||
if media_type in OEB_DOCS:
|
||||
media_type = XHTML_MIME
|
||||
elif media_type in OEB_STYLES:
|
||||
media_type = CSS_MIME
|
||||
item.attrib['media-type'] = media_type
|
||||
for tag in ('manifest', 'spine', 'tours', 'guide'):
|
||||
for element in opf.xpath(tag):
|
||||
nroot.append(element)
|
||||
return etree.fromstring(etree.tostring(nroot), parser=XML_PARSER)
|
||||
|
||||
def _read_opf(self, opfpath):
|
||||
opf = self.container.read_xml(opfpath)
|
||||
version = float(opf.get('version', 1.0))
|
||||
if version < 2.0:
|
||||
opf = self._convert_opf1(opf)
|
||||
return opf
|
||||
|
||||
def _metadata_from_opf(self, opf):
|
||||
uid = opf.attrib['unique-identifier']
|
||||
self.metadata = metadata = Metadata(self)
|
||||
for elem in xpath(opf, '/o2:package/o2:metadata/*'):
|
||||
if elem.text or elem.attrib:
|
||||
metadata.add(elem.tag, elem.text, elem.attrib)
|
||||
for item in metadata.identifier:
|
||||
if item.id == uid:
|
||||
self.uid = item
|
||||
break
|
||||
else:
|
||||
self.logger.log_warn(u'Unique-identifier %r not found.' % uid)
|
||||
self.uid = metadata.identifier[0]
|
||||
|
||||
def _manifest_from_opf(self, opf):
|
||||
self.manifest = manifest = Manifest(self)
|
||||
for elem in xpath(opf, '/o2:package/o2:manifest/o2:item'):
|
||||
href = elem.get('href')
|
||||
if not self.container.exists(href):
|
||||
self.logger.log_warn(u'Manifest item %r not found.' % href)
|
||||
continue
|
||||
manifest.add(elem.get('id'), href, elem.get('media-type'),
|
||||
elem.get('fallback'))
|
||||
|
||||
def _spine_from_opf(self, opf):
|
||||
self.spine = spine = Spine(self)
|
||||
for elem in xpath(opf, '/o2:package/o2:spine/o2:itemref'):
|
||||
idref = elem.get('idref')
|
||||
if idref not in self.manifest:
|
||||
self.logger.log_warn(u'Spine item %r not found.' % idref)
|
||||
continue
|
||||
item = self.manifest[idref]
|
||||
spine.add(item, elem.get('linear'))
|
||||
extras = []
|
||||
for item in self.manifest.values():
|
||||
if item.media_type == XHTML_MIME \
|
||||
and item not in spine:
|
||||
extras.append(item)
|
||||
extras.sort()
|
||||
for item in extras:
|
||||
spine.add(item, False)
|
||||
|
||||
def _guide_from_opf(self, opf):
|
||||
self.guide = guide = Guide(self)
|
||||
for elem in xpath(opf, '/o2:package/o2:guide/o2:reference'):
|
||||
href = elem.get('href')
|
||||
path, frag = urldefrag(href)
|
||||
if path not in self.manifest.hrefs:
|
||||
self.logger.log_warn(u'Guide reference %r not found' % href)
|
||||
continue
|
||||
guide.add(elem.get('type'), elem.get('title'), href)
|
||||
|
||||
def _toc_from_navpoint(self, toc, navpoint):
|
||||
children = xpath(navpoint, 'ncx:navPoint')
|
||||
for child in children:
|
||||
title = ''.join(xpath(child, 'ncx:navLabel/ncx:text/text()'))
|
||||
href = xpath(child, 'ncx:content/@src')[0]
|
||||
id = child.get('id')
|
||||
klass = child.get('class')
|
||||
node = toc.add(title, href, id=id, klass=klass)
|
||||
self._toc_from_navpoint(node, child)
|
||||
|
||||
def _toc_from_ncx(self, opf):
|
||||
result = xpath(opf, '/o2:package/o2:spine/@toc')
|
||||
if not result:
|
||||
expr = '/o2:package/o2:manifest/o2:item[@media-type="%s"]/@id'
|
||||
result = xpath(opf, expr % NCX_MIME)
|
||||
if len(result) != 1:
|
||||
return False
|
||||
id = result[0]
|
||||
ncx = self.manifest[id].data
|
||||
self.manifest.remove(id)
|
||||
title = xpath(ncx, 'ncx:docTitle/ncx:text/text()')[0]
|
||||
self.toc = toc = TOC(title)
|
||||
navmaps = xpath(ncx, 'ncx:navMap')
|
||||
for navmap in navmaps:
|
||||
self._toc_from_navpoint(toc, navmap)
|
||||
return True
|
||||
|
||||
def _toc_from_tour(self, opf):
|
||||
result = xpath(opf, '/o2:package/o2:tours/o2:tour')
|
||||
if not result:
|
||||
return False
|
||||
tour = result[0]
|
||||
self.toc = toc = TOC(tour.get('title'))
|
||||
sites = xpath(tour, 'o2:site')
|
||||
for site in sites:
|
||||
toc.add(site.get('title'), site.get('href'))
|
||||
return True
|
||||
|
||||
def _toc_from_html(self, opf):
|
||||
if 'toc' not in self.guide:
|
||||
return False
|
||||
self.toc = toc = TOC()
|
||||
itempath, frag = urldefrag(self.guide['toc'].href)
|
||||
item = self.manifest.hrefs[itempath]
|
||||
html = item.data
|
||||
if frag:
|
||||
elems = xpath(html, './/*[@id="%s"]' % frag)
|
||||
if not elems:
|
||||
elems = xpath(html, './/*[@name="%s"]' % frag)
|
||||
elem = elems[0] if elems else html
|
||||
while elem != html and not xpath(elem, './/h:a[@href]'):
|
||||
elem = elem.getparent()
|
||||
html = elem
|
||||
titles = defaultdict(list)
|
||||
order = []
|
||||
for anchor in xpath(html, './/h:a[@href]'):
|
||||
href = anchor.attrib['href']
|
||||
path, frag = urldefrag(href)
|
||||
if not path:
|
||||
href = '#'.join((itempath, frag))
|
||||
title = ' '.join(xpath(anchor, './/text()'))
|
||||
href = urlnormalize(href)
|
||||
if href not in titles:
|
||||
order.append(href)
|
||||
titles[href].append(title)
|
||||
for href in order:
|
||||
toc.add(' '.join(titles[href]), href)
|
||||
return True
|
||||
|
||||
def _toc_from_spine(self, opf):
|
||||
self.toc = toc = TOC()
|
||||
titles = []
|
||||
headers = []
|
||||
for item in self.spine:
|
||||
if not item.linear: continue
|
||||
html = item.data
|
||||
title = xpath(html, '/h:html/h:head/h:title/text()')
|
||||
if title: titles.append(title[0])
|
||||
headers.append('(unlabled)')
|
||||
for tag in ('h1', 'h2', 'h3', 'h4', 'h5', 'strong'):
|
||||
expr = '/h:html/h:body//h:%s[position()=1]/text()' % (tag,)
|
||||
header = xpath(html, expr)
|
||||
if header:
|
||||
headers[-1] = header[0]
|
||||
break
|
||||
use = titles
|
||||
if len(titles) > len(set(titles)):
|
||||
use = headers
|
||||
for title, item in izip(use, self.spine):
|
||||
if not item.linear: continue
|
||||
toc.add(title, item.href)
|
||||
return True
|
||||
|
||||
def _toc_from_opf(self, opf):
|
||||
if self._toc_from_ncx(opf): return
|
||||
if self._toc_from_tour(opf): return
|
||||
if self._toc_from_html(opf): return
|
||||
self._toc_from_spine(opf)
|
||||
|
||||
def _all_from_opf(self, opf):
|
||||
self._metadata_from_opf(opf)
|
||||
self._manifest_from_opf(opf)
|
||||
self._spine_from_opf(opf)
|
||||
self._guide_from_opf(opf)
|
||||
self._toc_from_opf(opf)
|
||||
|
||||
def to_opf1(self):
|
||||
package = etree.Element('package',
|
||||
attrib={'unique-identifier': self.uid.id})
|
||||
metadata = self.metadata.to_opf1(package)
|
||||
manifest = self.manifest.to_opf1(package)
|
||||
spine = self.spine.to_opf1(package)
|
||||
tours = element(package, 'tours')
|
||||
tour = element(tours, 'tour',
|
||||
attrib={'id': 'chaptertour', 'title': 'Chapter Tour'})
|
||||
self.toc.to_opf1(tour)
|
||||
guide = self.guide.to_opf1(package)
|
||||
return {OPF_MIME: ('content.opf', package)}
|
||||
|
||||
def _generate_ncx_item(self):
|
||||
id = 'ncx'
|
||||
index = 0
|
||||
while id in self.manifest:
|
||||
id = 'ncx' + str(index)
|
||||
index = index + 1
|
||||
href = 'toc'
|
||||
index = 0
|
||||
while (href + '.ncx') in self.manifest.hrefs:
|
||||
href = 'toc' + str(index)
|
||||
href += '.ncx'
|
||||
return (id, href)
|
||||
|
||||
def _to_ncx(self):
|
||||
ncx = etree.Element(NCX('ncx'), attrib={'version': '2005-1'},
|
||||
nsmap={None: NCX_NS})
|
||||
head = etree.SubElement(ncx, NCX('head'))
|
||||
etree.SubElement(head, NCX('meta'),
|
||||
attrib={'name': 'dtb:uid', 'content': unicode(self.uid)})
|
||||
etree.SubElement(head, NCX('meta'),
|
||||
attrib={'name': 'dtb:depth', 'content': str(self.toc.depth())})
|
||||
etree.SubElement(head, NCX('meta'),
|
||||
attrib={'name': 'dtb:totalPageCount', 'content': '0'})
|
||||
etree.SubElement(head, NCX('meta'),
|
||||
attrib={'name': 'dtb:maxPageNumber', 'content': '0'})
|
||||
title = etree.SubElement(ncx, NCX('docTitle'))
|
||||
text = etree.SubElement(title, NCX('text'))
|
||||
text.text = unicode(self.metadata.title[0])
|
||||
navmap = etree.SubElement(ncx, NCX('navMap'))
|
||||
self.toc.to_ncx(navmap)
|
||||
return ncx
|
||||
|
||||
def to_opf2(self):
|
||||
package = etree.Element(OPF('package'),
|
||||
attrib={'version': '2.0', 'unique-identifier': self.uid.id},
|
||||
nsmap={None: OPF2_NS})
|
||||
metadata = self.metadata.to_opf2(package)
|
||||
manifest = self.manifest.to_opf2(package)
|
||||
id, href = self._generate_ncx_item()
|
||||
etree.SubElement(manifest, OPF('item'),
|
||||
attrib={'id': id, 'href': href, 'media-type': NCX_MIME})
|
||||
spine = self.spine.to_opf2(package)
|
||||
spine.attrib['toc'] = id
|
||||
guide = self.guide.to_opf2(package)
|
||||
ncx = self._to_ncx()
|
||||
return {OPF_MIME: ('content.opf', package),
|
||||
NCX_MIME: (href, ncx)}
|
||||
|
||||
|
||||
def main(argv=sys.argv):
|
||||
for arg in argv[1:]:
|
||||
oeb = OEBBook(arg)
|
||||
for name, doc in oeb.to_opf1().values():
|
||||
print etree.tostring(doc, pretty_print=True)
|
||||
for name, doc in oeb.to_opf2().values():
|
||||
print etree.tostring(doc, pretty_print=True)
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
@ -10,10 +10,12 @@ __copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net> ' \
|
||||
import sys, struct, cStringIO, os
|
||||
import functools
|
||||
import re
|
||||
from urlparse import urldefrag
|
||||
from lxml import etree
|
||||
from calibre.ebooks.lit import LitError
|
||||
from calibre.ebooks.lit.maps import OPF_MAP, HTML_MAP
|
||||
import calibre.ebooks.lit.mssha1 as mssha1
|
||||
from calibre.ebooks.lit.oeb import urlnormalize
|
||||
from calibre.ebooks import DRMError
|
||||
from calibre import plugins
|
||||
lzx, lxzerror = plugins['lzx']
|
||||
@ -110,7 +112,7 @@ class UnBinary(object):
|
||||
AMPERSAND_RE = re.compile(
|
||||
r'&(?!(?:#[0-9]+|#x[0-9a-fA-F]+|[a-zA-Z_:][a-zA-Z0-9.-_:]+);)')
|
||||
OPEN_ANGLE_RE = re.compile(r'<<(?![!]--)')
|
||||
CLOSE_ANGLE_RE = re.compile(r'(?<!--)>>')
|
||||
CLOSE_ANGLE_RE = re.compile(r'(?<!--)>>(?=>>|[^>])')
|
||||
DOUBLE_ANGLE_RE = re.compile(r'([<>])\1')
|
||||
|
||||
def __init__(self, bin, path, manifest={}, map=HTML_MAP):
|
||||
@ -322,12 +324,12 @@ class UnBinary(object):
|
||||
href += c
|
||||
count -= 1
|
||||
if count == 0:
|
||||
doc, m, frag = href[1:].partition('#')
|
||||
doc, frag = urldefrag(href[1:])
|
||||
path = self.item_path(doc)
|
||||
if m and frag:
|
||||
path += m + frag
|
||||
self.buf.write((u'"%s"' % path).encode(
|
||||
'ascii', 'xmlcharrefreplace'))
|
||||
if frag:
|
||||
path = '#'.join((path, frag))
|
||||
path = urlnormalize(path)
|
||||
self.buf.write((u'"%s"' % path).encode('utf-8'))
|
||||
state = 'get attr'
|
||||
return index
|
||||
|
||||
@ -385,7 +387,7 @@ def preserve(function):
|
||||
class LitReader(object):
|
||||
PIECE_SIZE = 16
|
||||
XML_PARSER = etree.XMLParser(
|
||||
remove_blank_text=True, resolve_entities=False)
|
||||
recover=True, resolve_entities=False)
|
||||
|
||||
def magic():
|
||||
@preserve
|
||||
@ -781,7 +783,7 @@ class LitReader(object):
|
||||
try:
|
||||
result.append(
|
||||
lzx.decompress(content[base:size], window_bytes))
|
||||
except lzx.LzxError:
|
||||
except lzx.LZXError:
|
||||
self._warn("LZX decompression error; skipping chunk")
|
||||
bytes_remaining -= window_bytes
|
||||
base = size
|
||||
@ -791,7 +793,7 @@ class LitReader(object):
|
||||
lzx.reset()
|
||||
try:
|
||||
result.append(lzx.decompress(content[base:], bytes_remaining))
|
||||
except lzx.LzxError:
|
||||
except lzx.LZXError:
|
||||
self._warn("LZX decompression error; skipping chunk")
|
||||
bytes_remaining = 0
|
||||
if bytes_remaining > 0:
|
||||
|
||||
444
src/calibre/ebooks/lit/stylizer.py
Normal file
444
src/calibre/ebooks/lit/stylizer.py
Normal file
@ -0,0 +1,444 @@
|
||||
# -*- encoding: utf-8 -*-
|
||||
|
||||
'''
|
||||
CSS property propagation class.
|
||||
'''
|
||||
from __future__ import with_statement
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Marshall T. Vandegrift <llasram@gmail.com>'
|
||||
|
||||
import sys
|
||||
import os
|
||||
import locale
|
||||
import codecs
|
||||
import itertools
|
||||
import types
|
||||
import re
|
||||
import copy
|
||||
import cssutils
|
||||
from cssutils.css import CSSStyleRule, CSSPageRule, CSSStyleDeclaration, \
|
||||
CSSValueList, cssproperties
|
||||
from lxml import etree
|
||||
from calibre.ebooks.lit.oeb import XHTML_NS, CSS_MIME, OEB_STYLES
|
||||
from calibre.ebooks.lit.oeb import barename, urlnormalize
|
||||
from calibre.resources import html_css
|
||||
|
||||
HTML_CSS_STYLESHEET = cssutils.parseString(html_css)
|
||||
XHTML_CSS_NAMESPACE = "@namespace url(http://www.w3.org/1999/xhtml);\n"
|
||||
|
||||
INHERITED = set(['azimuth', 'border-collapse', 'border-spacing',
|
||||
'caption-side', 'color', 'cursor', 'direction', 'elevation',
|
||||
'empty-cells', 'font-family', 'font-size', 'font-style',
|
||||
'font-variant', 'font-weight', 'letter-spacing',
|
||||
'line-height', 'list-style-image', 'list-style-position',
|
||||
'list-style-type', 'orphans', 'page-break-inside',
|
||||
'pitch-range', 'pitch', 'quotes', 'richness', 'speak-header',
|
||||
'speak-numeral', 'speak-punctuation', 'speak', 'speech-rate',
|
||||
'stress', 'text-align', 'text-indent', 'text-transform',
|
||||
'visibility', 'voice-family', 'volume', 'white-space',
|
||||
'widows', 'word-spacing'])
|
||||
|
||||
DEFAULTS = {'azimuth': 'center', 'background-attachment': 'scroll',
|
||||
'background-color': 'transparent', 'background-image': 'none',
|
||||
'background-position': '0% 0%', 'background-repeat': 'repeat',
|
||||
'border-bottom-color': ':color', 'border-bottom-style': 'none',
|
||||
'border-bottom-width': 'medium', 'border-collapse': 'separate',
|
||||
'border-left-color': ':color', 'border-left-style': 'none',
|
||||
'border-left-width': 'medium', 'border-right-color': ':color',
|
||||
'border-right-style': 'none', 'border-right-width': 'medium',
|
||||
'border-spacing': 0, 'border-top-color': ':color',
|
||||
'border-top-style': 'none', 'border-top-width': 'medium', 'bottom':
|
||||
'auto', 'caption-side': 'top', 'clear': 'none', 'clip': 'auto',
|
||||
'color': 'black', 'content': 'normal', 'counter-increment': 'none',
|
||||
'counter-reset': 'none', 'cue-after': 'none', 'cue-before': 'none',
|
||||
'cursor': 'auto', 'direction': 'ltr', 'display': 'inline',
|
||||
'elevation': 'level', 'empty-cells': 'show', 'float': 'none',
|
||||
'font-family': 'serif', 'font-size': 'medium', 'font-style':
|
||||
'normal', 'font-variant': 'normal', 'font-weight': 'normal',
|
||||
'height': 'auto', 'left': 'auto', 'letter-spacing': 'normal',
|
||||
'line-height': 'normal', 'list-style-image': 'none',
|
||||
'list-style-position': 'outside', 'list-style-type': 'disc',
|
||||
'margin-bottom': 0, 'margin-left': 0, 'margin-right': 0,
|
||||
'margin-top': 0, 'max-height': 'none', 'max-width': 'none',
|
||||
'min-height': 0, 'min-width': 0, 'orphans': '2',
|
||||
'outline-color': 'invert', 'outline-style': 'none',
|
||||
'outline-width': 'medium', 'overflow': 'visible', 'padding-bottom':
|
||||
0, 'padding-left': 0, 'padding-right': 0, 'padding-top': 0,
|
||||
'page-break-after': 'auto', 'page-break-before': 'auto',
|
||||
'page-break-inside': 'auto', 'pause-after': 0, 'pause-before':
|
||||
0, 'pitch': 'medium', 'pitch-range': '50', 'play-during': 'auto',
|
||||
'position': 'static', 'quotes': u"'“' '”' '‘' '’'", 'richness':
|
||||
'50', 'right': 'auto', 'speak': 'normal', 'speak-header': 'once',
|
||||
'speak-numeral': 'continuous', 'speak-punctuation': 'none',
|
||||
'speech-rate': 'medium', 'stress': '50', 'table-layout': 'auto',
|
||||
'text-align': 'left', 'text-decoration': 'none', 'text-indent':
|
||||
0, 'text-transform': 'none', 'top': 'auto', 'unicode-bidi':
|
||||
'normal', 'vertical-align': 'baseline', 'visibility': 'visible',
|
||||
'voice-family': 'default', 'volume': 'medium', 'white-space':
|
||||
'normal', 'widows': '2', 'width': 'auto', 'word-spacing': 'normal',
|
||||
'z-index': 'auto'}
|
||||
|
||||
FONT_SIZE_NAMES = set(['xx-small', 'x-small', 'small', 'medium', 'large',
|
||||
'x-large', 'xx-large'])
|
||||
|
||||
FONT_SIZE_LIST = [('xx-small', 1, 6.),
|
||||
('x-small', None, 7.),
|
||||
('small', 2, 8.),
|
||||
('medium', 3, 9.),
|
||||
('large', 4, 11.),
|
||||
('x-large', 5, 13.),
|
||||
('xx-large', 6, 15.),
|
||||
(None, 7, 17.)]
|
||||
|
||||
FONT_SIZE_BY_NAME = {}
|
||||
FONT_SIZE_BY_NUM = {}
|
||||
for name, num, size in FONT_SIZE_LIST:
|
||||
FONT_SIZE_BY_NAME[name] = size
|
||||
FONT_SIZE_BY_NUM[num] = size
|
||||
|
||||
XPNSMAP = {'h': XHTML_NS,}
|
||||
def xpath(elem, expr):
|
||||
return elem.xpath(expr, namespaces=XPNSMAP)
|
||||
|
||||
|
||||
class Page(object):
|
||||
def __init__(self, width, height, dpi):
|
||||
self.width = float(width)
|
||||
self.height = float(height)
|
||||
self.dpi = float(dpi)
|
||||
|
||||
class Profiles(object):
|
||||
PRS500 = Page(584, 754, 168.451)
|
||||
PRS505 = PRS500
|
||||
|
||||
|
||||
class Stylizer(object):
|
||||
STYLESHEETS = {}
|
||||
|
||||
def __init__(self, tree, path, oeb, page=Profiles.PRS505):
|
||||
self.page = page
|
||||
base = os.path.dirname(path)
|
||||
basename = os.path.basename(path)
|
||||
cssname = os.path.splitext(basename)[0] + '.css'
|
||||
stylesheets = [HTML_CSS_STYLESHEET]
|
||||
head = xpath(tree, '/h:html/h:head')[0]
|
||||
parser = cssutils.CSSParser()
|
||||
parser.setFetcher(lambda path: ('utf-8', oeb.container.read(path)))
|
||||
for elem in head:
|
||||
tag = barename(elem.tag)
|
||||
if tag == 'style':
|
||||
text = ''.join(elem.text)
|
||||
stylesheet = parser.parseString(text, href=cssname)
|
||||
stylesheets.append(stylesheet)
|
||||
elif tag == 'link' \
|
||||
and elem.get('rel', 'stylesheet') == 'stylesheet' \
|
||||
and elem.get('type', CSS_MIME) in OEB_STYLES:
|
||||
href = urlnormalize(elem.attrib['href'])
|
||||
path = os.path.join(base, href)
|
||||
path = os.path.normpath(path).replace('\\', '/')
|
||||
if path in self.STYLESHEETS:
|
||||
stylesheet = self.STYLESHEETS[path]
|
||||
else:
|
||||
data = XHTML_CSS_NAMESPACE
|
||||
data += oeb.manifest.hrefs[path].data
|
||||
stylesheet = parser.parseString(data, href=path)
|
||||
self.STYLESHEETS[path] = stylesheet
|
||||
stylesheets.append(stylesheet)
|
||||
rules = []
|
||||
index = 0
|
||||
self.stylesheets = set()
|
||||
for stylesheet in stylesheets:
|
||||
href = stylesheet.href
|
||||
self.stylesheets.add(href)
|
||||
for rule in stylesheet.cssRules:
|
||||
rules.extend(self.flatten_rule(rule, href, index))
|
||||
index = index + 1
|
||||
rules.sort()
|
||||
self.rules = rules
|
||||
self._styles = {}
|
||||
|
||||
def flatten_rule(self, rule, href, index):
|
||||
results = []
|
||||
if isinstance(rule, CSSStyleRule):
|
||||
style = self.flatten_style(rule.style)
|
||||
for selector in rule.selectorList:
|
||||
specificity = selector.specificity + (index,)
|
||||
text = selector.selectorText
|
||||
selector = list(selector.seq)
|
||||
results.append((specificity, selector, style, text, href))
|
||||
elif isinstance(rule, CSSPageRule):
|
||||
style = self.flatten_style(rule.style)
|
||||
results.append(((0, 0, 0, 0), [], style, '@page', href))
|
||||
return results
|
||||
|
||||
def flatten_style(self, cssstyle):
|
||||
style = {}
|
||||
for prop in cssstyle:
|
||||
name = prop.name
|
||||
if name in ('margin', 'padding'):
|
||||
style.update(self._normalize_edge(prop.cssValue, name))
|
||||
elif name == 'font':
|
||||
style.update(self._normalize_font(prop.cssValue))
|
||||
else:
|
||||
style[name] = prop.value
|
||||
if 'font-size' in style:
|
||||
size = style['font-size']
|
||||
if size == 'normal': size = 'medium'
|
||||
if size in FONT_SIZE_NAMES:
|
||||
style['font-size'] = "%dpt" % FONT_SIZE_BY_NAME[size]
|
||||
return style
|
||||
|
||||
def _normalize_edge(self, cssvalue, name):
|
||||
style = {}
|
||||
if isinstance(cssvalue, CSSValueList):
|
||||
primitives = [v.cssText for v in cssvalue]
|
||||
else:
|
||||
primitives = [cssvalue.cssText]
|
||||
if len(primitives) == 1:
|
||||
value, = primitives
|
||||
values = [value, value, value, value]
|
||||
elif len(primitives) == 2:
|
||||
vert, horiz = primitives
|
||||
values = [vert, horiz, vert, horiz]
|
||||
elif len(primitives) == 3:
|
||||
top, horiz, bottom = primitives
|
||||
values = [top, horiz, bottom, horiz]
|
||||
else:
|
||||
values = primitives[:4]
|
||||
edges = ('top', 'right', 'bottom', 'left')
|
||||
for edge, value in itertools.izip(edges, values):
|
||||
style["%s-%s" % (name, edge)] = value
|
||||
return style
|
||||
|
||||
def _normalize_font(self, cssvalue):
|
||||
composition = ('font-style', 'font-variant', 'font-weight',
|
||||
'font-size', 'line-height', 'font-family')
|
||||
style = {}
|
||||
if cssvalue.cssText == 'inherit':
|
||||
for key in composition:
|
||||
style[key] = 'inherit'
|
||||
else:
|
||||
primitives = [v.cssText for v in cssvalue]
|
||||
primitites.reverse()
|
||||
value = primitives.pop()
|
||||
for key in composition:
|
||||
if cssproperties.cssvalues[key](value):
|
||||
style[key] = value
|
||||
if not primitives: break
|
||||
value = primitives.pop()
|
||||
for key in composition:
|
||||
if key not in style:
|
||||
style[key] = DEFAULTS[key]
|
||||
return style
|
||||
|
||||
def style(self, element):
|
||||
try: return self._styles[element]
|
||||
except: pass
|
||||
return Style(element, self)
|
||||
|
||||
def stylesheet(self, name, font_scale=None):
|
||||
rules = []
|
||||
for _, _, style, selector, href in self.rules:
|
||||
if href != name: continue
|
||||
if font_scale and 'font-size' in style and \
|
||||
style['font-size'].endswith('pt'):
|
||||
style = copy.copy(style)
|
||||
size = float(style['font-size'][:-2])
|
||||
style['font-size'] = "%.2fpt" % (size * font_scale)
|
||||
style = ';\n '.join(': '.join(item) for item in style.items())
|
||||
rules.append('%s {\n %s;\n}' % (selector, style))
|
||||
return '\n'.join(rules)
|
||||
|
||||
class Style(object):
|
||||
def __init__(self, element, stylizer):
|
||||
self._element = element
|
||||
self._page = stylizer.page
|
||||
self._stylizer = stylizer
|
||||
self._style = self._assemble_style(element, stylizer)
|
||||
stylizer._styles[element] = self
|
||||
|
||||
def _assemble_style(self, element, stylizer):
|
||||
result = {}
|
||||
rules = stylizer.rules
|
||||
for _, selector, style, _, _ in rules:
|
||||
if self._selects_element(element, selector):
|
||||
result.update(style)
|
||||
try:
|
||||
style = CSSStyleDeclaration(element.attrib['style'])
|
||||
result.update(stylizer.flatten_style(style))
|
||||
except KeyError:
|
||||
pass
|
||||
return result
|
||||
|
||||
def _selects_element(self, element, selector):
|
||||
def _selects_element(element, items, index):
|
||||
if index == -1:
|
||||
return True
|
||||
item = items[index]
|
||||
if item.type == 'universal':
|
||||
pass
|
||||
elif item.type == 'type-selector':
|
||||
name1 = ("{%s}%s" % item.value).lower()
|
||||
name2 = element.tag.lower()
|
||||
if name1 != name2:
|
||||
return False
|
||||
elif item.type == 'id':
|
||||
name1 = item.value[1:]
|
||||
name2 = element.get('id', '')
|
||||
if name1 != name2:
|
||||
return False
|
||||
elif item.type == 'class':
|
||||
name = item.value[1:].lower()
|
||||
classes = element.get('class', '').lower().split()
|
||||
if name not in classes:
|
||||
return False
|
||||
elif item.type == 'child':
|
||||
parent = element.getparent()
|
||||
if parent is None:
|
||||
return False
|
||||
element = parent
|
||||
elif item.type == 'descendant':
|
||||
element = element.getparent()
|
||||
while element is not None:
|
||||
if _selects_element(element, items, index - 1):
|
||||
return True
|
||||
element = element.getparent()
|
||||
return False
|
||||
elif item.type == 'pseudo-class':
|
||||
if item.value == ':first-child':
|
||||
e = element.getprevious()
|
||||
if e is not None:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
elif item.type == 'pseudo-element':
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
return _selects_element(element, items, index - 1)
|
||||
return _selects_element(element, selector, len(selector) - 1)
|
||||
|
||||
def _has_parent(self):
|
||||
parent = self._element.getparent()
|
||||
return (parent is not None) \
|
||||
and (parent in self._stylizer._styles)
|
||||
|
||||
def __getitem__(self, name):
|
||||
domname = cssproperties._toDOMname(name)
|
||||
if hasattr(self, domname):
|
||||
return getattr(self, domname)
|
||||
return self._unit_convert(self._get(name))
|
||||
|
||||
def _get(self, name):
|
||||
result = None
|
||||
if name in self._style:
|
||||
result = self._style[name]
|
||||
if (result == 'inherit'
|
||||
or (result is None and name in INHERITED
|
||||
and self._has_parent())):
|
||||
styles = self._stylizer._styles
|
||||
result = styles[self._element.getparent()]._get(name)
|
||||
if result is None:
|
||||
result = DEFAULTS[name]
|
||||
return result
|
||||
|
||||
def _unit_convert(self, value, base=None, font=None):
|
||||
if isinstance(value, (int, long, float)):
|
||||
return value
|
||||
try:
|
||||
if float(value) == 0:
|
||||
return 0.0
|
||||
except:
|
||||
pass
|
||||
result = value
|
||||
m = re.search(
|
||||
r"^(-*[0-9]*\.?[0-9]*)\s*(%|em|px|mm|cm|in|pt|pc)$", value)
|
||||
if m is not None and m.group(1):
|
||||
value = float(m.group(1))
|
||||
unit = m.group(2)
|
||||
if unit == '%':
|
||||
base = base or self.width
|
||||
result = (value/100.0) * base
|
||||
elif unit == 'px':
|
||||
result = value * 72.0 / self._page.dpi
|
||||
elif unit == 'in':
|
||||
result = value * 72.0
|
||||
elif unit == 'pt':
|
||||
result = value
|
||||
elif unit == 'em':
|
||||
font = font or self.fontSize
|
||||
result = value * font
|
||||
elif unit == 'pc':
|
||||
result = value * 12.0
|
||||
elif unit == 'mm':
|
||||
result = value * 0.04
|
||||
elif unit == 'cm':
|
||||
result = value * 0.40
|
||||
return result
|
||||
|
||||
@property
|
||||
def fontSize(self):
|
||||
def normalize_fontsize(value, base=None):
|
||||
result = None
|
||||
factor = None
|
||||
if value == 'inherit':
|
||||
value = 'medium'
|
||||
if value in FONT_SIZE_NAMES:
|
||||
result = FONT_SIZE_BY_NAME[value]
|
||||
elif value == 'smaller':
|
||||
factor = 1.0/1.2
|
||||
for _, _, size in FONT_SIZE_LIST:
|
||||
if base <= size: break
|
||||
factor = None
|
||||
result = size
|
||||
elif value == 'larger':
|
||||
factor = 1.2
|
||||
for _, _, size in reversed(FONT_SIZE_LIST):
|
||||
if base >= size: break
|
||||
factor = None
|
||||
result = size
|
||||
else:
|
||||
result = self._unit_convert(value, base=base, font=base)
|
||||
if result < 0:
|
||||
result = normalize_fontsize("smaller", base)
|
||||
if factor:
|
||||
result = factor * base
|
||||
return result
|
||||
result = None
|
||||
if self._has_parent():
|
||||
styles = self._stylizer._styles
|
||||
base = styles[self._element.getparent()].fontSize
|
||||
else:
|
||||
base = normalize_fontsize(DEFAULTS['font-size'])
|
||||
if 'font-size' in self._style:
|
||||
size = self._style['font-size']
|
||||
result = normalize_fontsize(size, base)
|
||||
else:
|
||||
result = base
|
||||
self.__dict__['fontSize'] = result
|
||||
return result
|
||||
|
||||
@property
|
||||
def width(self):
|
||||
result = None
|
||||
base = None
|
||||
if self._has_parent():
|
||||
styles = self._stylizer._styles
|
||||
base = styles[self._element.getparent()].width
|
||||
else:
|
||||
base = self._page.width
|
||||
if 'width' in self._style:
|
||||
width = self._style['width']
|
||||
if width == 'auto':
|
||||
result = base
|
||||
else:
|
||||
result = self._unit_convert(width, base=base)
|
||||
else:
|
||||
result = base
|
||||
self.__dict__['width'] = result
|
||||
return result
|
||||
|
||||
def __str__(self):
|
||||
items = self._style.items()
|
||||
return '; '.join("%s: %s" % (key, val) for key, val in items)
|
||||
753
src/calibre/ebooks/lit/writer.py
Normal file
753
src/calibre/ebooks/lit/writer.py
Normal file
@ -0,0 +1,753 @@
|
||||
'''
|
||||
Basic support for writing LIT files.
|
||||
'''
|
||||
from __future__ import with_statement
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Marshall T. Vandegrift <llasram@gmail.com>'
|
||||
|
||||
import sys
|
||||
import os
|
||||
from cStringIO import StringIO
|
||||
from struct import pack
|
||||
from itertools import izip, count, chain
|
||||
import time
|
||||
import random
|
||||
import re
|
||||
import copy
|
||||
import uuid
|
||||
import functools
|
||||
import logging
|
||||
from urlparse import urldefrag
|
||||
from urllib import unquote as urlunquote
|
||||
from lxml import etree
|
||||
from calibre.ebooks.lit.reader import DirectoryEntry
|
||||
import calibre.ebooks.lit.maps as maps
|
||||
from calibre.ebooks.lit.oeb import OEB_DOCS, OEB_STYLES, OEB_CSS_MIME, \
|
||||
CSS_MIME, OPF_MIME, XML_NS, XML
|
||||
from calibre.ebooks.lit.oeb import namespace, barename, urlnormalize, xpath
|
||||
from calibre.ebooks.lit.oeb import FauxLogger, OEBBook
|
||||
from calibre.ebooks.lit.stylizer import Stylizer
|
||||
from calibre.ebooks.lit.lzx import Compressor
|
||||
import calibre
|
||||
from calibre import LoggingInterface
|
||||
from calibre import plugins
|
||||
msdes, msdeserror = plugins['msdes']
|
||||
import calibre.ebooks.lit.mssha1 as mssha1
|
||||
from calibre.customize.ui import run_plugins_on_postprocess
|
||||
|
||||
__all__ = ['LitWriter']
|
||||
|
||||
LIT_IMAGES = set(['image/png', 'image/jpeg', 'image/gif'])
|
||||
LIT_MIMES = OEB_DOCS | OEB_STYLES | LIT_IMAGES
|
||||
|
||||
MS_COVER_TYPE = 'other.ms-coverimage-standard'
|
||||
ALL_MS_COVER_TYPES = [
|
||||
(MS_COVER_TYPE, 'Standard cover image'),
|
||||
('other.ms-thumbimage-standard', 'Standard thumbnail image'),
|
||||
('other.ms-coverimage', 'PocketPC cover image'),
|
||||
('other.ms-thumbimage', 'PocketPC thumbnail image'),
|
||||
]
|
||||
|
||||
def invert_tag_map(tag_map):
|
||||
tags, dattrs, tattrs = tag_map
|
||||
tags = dict((tags[i], i) for i in xrange(len(tags)))
|
||||
dattrs = dict((v, k) for k, v in dattrs.items())
|
||||
tattrs = [dict((v, k) for k, v in (map or {}).items()) for map in tattrs]
|
||||
for map in tattrs:
|
||||
if map: map.update(dattrs)
|
||||
tattrs[0] = dattrs
|
||||
return tags, tattrs
|
||||
|
||||
OPF_MAP = invert_tag_map(maps.OPF_MAP)
|
||||
HTML_MAP = invert_tag_map(maps.HTML_MAP)
|
||||
|
||||
LIT_MAGIC = 'ITOLITLS'
|
||||
|
||||
LITFILE_GUID = "{0A9007C1-4076-11D3-8789-0000F8105754}"
|
||||
PIECE3_GUID = "{0A9007C3-4076-11D3-8789-0000F8105754}"
|
||||
PIECE4_GUID = "{0A9007C4-4076-11D3-8789-0000F8105754}"
|
||||
DESENCRYPT_GUID = "{67F6E4A2-60BF-11D3-8540-00C04F58C3CF}"
|
||||
LZXCOMPRESS_GUID = "{0A9007C6-4076-11D3-8789-0000F8105754}"
|
||||
|
||||
def packguid(guid):
|
||||
values = guid[1:9], guid[10:14], guid[15:19], \
|
||||
guid[20:22], guid[22:24], guid[25:27], guid[27:29], \
|
||||
guid[29:31], guid[31:33], guid[33:35], guid[35:37]
|
||||
values = [int(value, 16) for value in values]
|
||||
return pack("<LHHBBBBBBBB", *values)
|
||||
|
||||
FLAG_OPENING = (1 << 0)
|
||||
FLAG_CLOSING = (1 << 1)
|
||||
FLAG_BLOCK = (1 << 2)
|
||||
FLAG_HEAD = (1 << 3)
|
||||
FLAG_ATOM = (1 << 4)
|
||||
FLAG_CUSTOM = (1 << 15)
|
||||
ATTR_NUMBER = 0xffff
|
||||
|
||||
PIECE_SIZE = 16
|
||||
PRIMARY_SIZE = 40
|
||||
SECONDARY_SIZE = 232
|
||||
DCHUNK_SIZE = 0x2000
|
||||
CCHUNK_SIZE = 0x0200
|
||||
ULL_NEG1 = 0xffffffffffffffff
|
||||
ROOT_OFFSET = 1284508585713721976
|
||||
ROOT_SIZE = 4165955342166943123
|
||||
|
||||
BLOCK_CAOL = \
|
||||
"\x43\x41\x4f\x4c\x02\x00\x00\x00" \
|
||||
"\x50\x00\x00\x00\x37\x13\x03\x00" \
|
||||
"\x00\x00\x00\x00\x00\x20\x00\x00" \
|
||||
"\x00\x02\x00\x00\x00\x00\x10\x00" \
|
||||
"\x00\x00\x02\x00\x00\x00\x00\x00" \
|
||||
"\x00\x00\x00\x00\x00\x00\x00\x00"
|
||||
BLOCK_ITSF = \
|
||||
"\x49\x54\x53\x46\x04\x00\x00\x00" \
|
||||
"\x20\x00\x00\x00\x01\x00\x00\x00"
|
||||
|
||||
MSDES_CONTROL = \
|
||||
"\x03\x00\x00\x00\x29\x17\x00\x00" \
|
||||
"\x01\x00\x00\x00\xa5\xa5\x00\x00"
|
||||
LZXC_CONTROL = \
|
||||
"\x07\x00\x00\x00\x4c\x5a\x58\x43" \
|
||||
"\x03\x00\x00\x00\x04\x00\x00\x00" \
|
||||
"\x04\x00\x00\x00\x02\x00\x00\x00" \
|
||||
"\x00\x00\x00\x00\x00\x00\x00\x00"
|
||||
|
||||
COLLAPSE = re.compile(r'[ \t\r\n\v]+')
|
||||
|
||||
def prefixname(name, nsrmap):
|
||||
prefix = nsrmap[namespace(name)]
|
||||
if not prefix:
|
||||
return barename(name)
|
||||
return ':'.join((prefix, barename(name)))
|
||||
|
||||
def decint(value):
|
||||
bytes = []
|
||||
while True:
|
||||
b = value & 0x7f
|
||||
value >>= 7
|
||||
if bytes:
|
||||
b |= 0x80
|
||||
bytes.append(chr(b))
|
||||
if value == 0:
|
||||
break
|
||||
return ''.join(reversed(bytes))
|
||||
|
||||
def randbytes(n):
|
||||
return ''.join(chr(random.randint(0, 255)) for x in xrange(n))
|
||||
|
||||
def warn(x):
|
||||
print x
|
||||
|
||||
class ReBinary(object):
|
||||
NSRMAP = {'': None, XML_NS: 'xml'}
|
||||
|
||||
def __init__(self, root, path, oeb, map=HTML_MAP, logger=FauxLogger()):
|
||||
self.path = path
|
||||
self.logger = logger
|
||||
self.dir = os.path.dirname(path)
|
||||
self.manifest = oeb.manifest
|
||||
self.tags, self.tattrs = map
|
||||
self.buf = StringIO()
|
||||
self.anchors = []
|
||||
self.page_breaks = []
|
||||
self.is_html = is_html = map is HTML_MAP
|
||||
self.stylizer = Stylizer(root, path, oeb) if is_html else None
|
||||
self.tree_to_binary(root)
|
||||
self.content = self.buf.getvalue()
|
||||
self.ahc = self.build_ahc() if is_html else None
|
||||
self.aht = self.build_aht() if is_html else None
|
||||
|
||||
def write(self, *values):
|
||||
for value in values:
|
||||
if isinstance(value, (int, long)):
|
||||
value = unichr(value)
|
||||
self.buf.write(value.encode('utf-8'))
|
||||
|
||||
def is_block(self, style):
|
||||
return style['display'] not in ('inline', 'inline-block')
|
||||
|
||||
def tree_to_binary(self, elem, nsrmap=NSRMAP, parents=[],
|
||||
inhead=False, preserve=False):
|
||||
if not isinstance(elem.tag, basestring):
|
||||
self.write(etree.tostring(elem))
|
||||
return
|
||||
nsrmap = copy.copy(nsrmap)
|
||||
attrib = dict(elem.attrib)
|
||||
style = self.stylizer.style(elem) if self.stylizer else None
|
||||
for key, value in elem.nsmap.items():
|
||||
if value not in nsrmap or nsrmap[value] != key:
|
||||
xmlns = ('xmlns:' + key) if key else 'xmlns'
|
||||
attrib[xmlns] = value
|
||||
nsrmap[value] = key
|
||||
tag = prefixname(elem.tag, nsrmap)
|
||||
tag_offset = self.buf.tell()
|
||||
if tag == 'head':
|
||||
inhead = True
|
||||
flags = FLAG_OPENING
|
||||
if not elem.text and len(elem) == 0:
|
||||
flags |= FLAG_CLOSING
|
||||
if inhead:
|
||||
flags |= FLAG_HEAD
|
||||
if style and self.is_block(style):
|
||||
flags |= FLAG_BLOCK
|
||||
self.write(0, flags)
|
||||
tattrs = self.tattrs[0]
|
||||
if tag in self.tags:
|
||||
index = self.tags[tag]
|
||||
self.write(index)
|
||||
if self.tattrs[index]:
|
||||
tattrs = self.tattrs[index]
|
||||
else:
|
||||
self.write(FLAG_CUSTOM, len(tag)+1, tag)
|
||||
last_break = self.page_breaks[-1][0] if self.page_breaks else None
|
||||
if style and last_break != tag_offset \
|
||||
and style['page-break-before'] not in ('avoid', 'auto'):
|
||||
self.page_breaks.append((tag_offset, list(parents)))
|
||||
for attr, value in attrib.items():
|
||||
attr = prefixname(attr, nsrmap)
|
||||
if attr in ('href', 'src'):
|
||||
value = urlnormalize(value)
|
||||
path, frag = urldefrag(value)
|
||||
prefix = unichr(3)
|
||||
if path in self.manifest.hrefs:
|
||||
prefix = unichr(2)
|
||||
value = self.manifest.hrefs[path].id
|
||||
if frag:
|
||||
value = '#'.join((value, frag))
|
||||
value = prefix + value
|
||||
elif attr in ('id', 'name'):
|
||||
self.anchors.append((value, tag_offset))
|
||||
elif attr.startswith('ms--'):
|
||||
attr = '%' + attr[4:]
|
||||
elif tag == 'link' and attr == 'type' and value in OEB_STYLES:
|
||||
value = OEB_CSS_MIME
|
||||
if attr in tattrs:
|
||||
self.write(tattrs[attr])
|
||||
else:
|
||||
self.write(FLAG_CUSTOM, len(attr)+1, attr)
|
||||
try:
|
||||
self.write(ATTR_NUMBER, int(value)+1)
|
||||
except ValueError:
|
||||
self.write(len(value)+1, value)
|
||||
self.write(0)
|
||||
old_preserve = preserve
|
||||
if style:
|
||||
preserve = (style['white-space'] in ('pre', 'pre-wrap'))
|
||||
xml_space = elem.get(XML('space'))
|
||||
if xml_space == 'preserve':
|
||||
preserve = True
|
||||
elif xml_space == 'normal':
|
||||
preserve = False
|
||||
if elem.text:
|
||||
if preserve:
|
||||
self.write(elem.text)
|
||||
elif len(elem) == 0 or not elem.text.isspace():
|
||||
self.write(COLLAPSE.sub(' ', elem.text))
|
||||
# else: de nada
|
||||
parents.append(tag_offset)
|
||||
child = cstyle = nstyle = None
|
||||
for next in chain(elem, [None]):
|
||||
if self.stylizer:
|
||||
nstyle = None if next is None else self.stylizer.style(next)
|
||||
if child is not None:
|
||||
if not preserve \
|
||||
and (inhead or not nstyle
|
||||
or self.is_block(cstyle)
|
||||
or self.is_block(nstyle)) \
|
||||
and child.tail and child.tail.isspace():
|
||||
child.tail = None
|
||||
self.tree_to_binary(child, nsrmap, parents, inhead, preserve)
|
||||
child, cstyle = next, nstyle
|
||||
parents.pop()
|
||||
preserve = old_preserve
|
||||
if not flags & FLAG_CLOSING:
|
||||
self.write(0, (flags & ~FLAG_OPENING) | FLAG_CLOSING, 0)
|
||||
if elem.tail and tag != 'html':
|
||||
tail = elem.tail
|
||||
if not preserve:
|
||||
tail = COLLAPSE.sub(' ', tail)
|
||||
self.write(tail)
|
||||
if style and style['page-break-after'] not in ('avoid', 'auto'):
|
||||
self.page_breaks.append((self.buf.tell(), list(parents)))
|
||||
|
||||
def build_ahc(self):
|
||||
if len(self.anchors) > 6:
|
||||
self.logger.log_warn("More than six anchors in file %r. " \
|
||||
"Some links may not work properly." % self.path)
|
||||
data = StringIO()
|
||||
data.write(unichr(len(self.anchors)).encode('utf-8'))
|
||||
for anchor, offset in self.anchors:
|
||||
data.write(unichr(len(anchor)).encode('utf-8'))
|
||||
data.write(anchor)
|
||||
data.write(pack('<I', offset))
|
||||
return data.getvalue()
|
||||
|
||||
def build_aht(self):
|
||||
return pack('<I', 0)
|
||||
|
||||
|
||||
def preserve(function):
|
||||
def wrapper(self, *args, **kwargs):
|
||||
opos = self._stream.tell()
|
||||
try:
|
||||
return function(self, *args, **kwargs)
|
||||
finally:
|
||||
self._stream.seek(opos)
|
||||
functools.update_wrapper(wrapper, function)
|
||||
return wrapper
|
||||
|
||||
class LitWriter(object):
|
||||
def __init__(self, oeb, logger=FauxLogger()):
|
||||
self._oeb = oeb
|
||||
self._logger = logger
|
||||
self._litize_oeb()
|
||||
|
||||
def _litize_oeb(self):
|
||||
oeb = self._oeb
|
||||
oeb.metadata.add('calibre-oeb2lit-version', calibre.__version__)
|
||||
cover = None
|
||||
if oeb.metadata.cover:
|
||||
id = str(oeb.metadata.cover[0])
|
||||
cover = oeb.manifest[id]
|
||||
elif MS_COVER_TYPE in oeb.guide:
|
||||
href = oeb.guide[MS_COVER_TYPE].href
|
||||
cover = oeb.manifest.hrefs[href]
|
||||
elif 'cover' in oeb.guide:
|
||||
href = oeb.guide['cover'].href
|
||||
cover = oeb.manifest.hrefs[href]
|
||||
else:
|
||||
html = oeb.spine[0].data
|
||||
imgs = xpath(html, '//img[position()=1]')
|
||||
href = imgs[0].get('src') if imgs else None
|
||||
cover = oeb.manifest.hrefs[href] if href else None
|
||||
if cover:
|
||||
if not oeb.metadata.cover:
|
||||
oeb.metadata.add('cover', cover.id)
|
||||
for type, title in ALL_MS_COVER_TYPES:
|
||||
if type not in oeb.guide:
|
||||
oeb.guide.add(type, title, cover.href)
|
||||
else:
|
||||
self._logger.log_warn('No suitable cover image found.')
|
||||
|
||||
def dump(self, stream):
|
||||
self._stream = stream
|
||||
self._sections = [StringIO() for i in xrange(4)]
|
||||
self._directory = []
|
||||
self._meta = None
|
||||
self._dump()
|
||||
|
||||
def _write(self, *data):
|
||||
for datum in data:
|
||||
self._stream.write(datum)
|
||||
|
||||
@preserve
|
||||
def _writeat(self, pos, *data):
|
||||
self._stream.seek(pos)
|
||||
self._write(*data)
|
||||
|
||||
def _tell(self):
|
||||
return self._stream.tell()
|
||||
|
||||
def _dump(self):
|
||||
# Build content sections
|
||||
self._build_sections()
|
||||
|
||||
# Build directory chunks
|
||||
dcounts, dchunks, ichunk = self._build_dchunks()
|
||||
|
||||
# Write headers
|
||||
self._write(LIT_MAGIC)
|
||||
self._write(pack('<IIII',
|
||||
1, PRIMARY_SIZE, 5, SECONDARY_SIZE))
|
||||
self._write(packguid(LITFILE_GUID))
|
||||
offset = self._tell()
|
||||
pieces = list(xrange(offset, offset + (PIECE_SIZE * 5), PIECE_SIZE))
|
||||
self._write((5 * PIECE_SIZE) * '\0')
|
||||
aoli1 = len(dchunks) if ichunk else ULL_NEG1
|
||||
last = len(dchunks) - 1
|
||||
ddepth = 2 if ichunk else 1
|
||||
self._write(pack('<IIQQQQIIIIQIIQQQQIIIIQIIIIQ',
|
||||
2, 0x98, aoli1, 0, last, 0, DCHUNK_SIZE, 2, 0, ddepth, 0,
|
||||
len(self._directory), 0, ULL_NEG1, 0, 0, 0, CCHUNK_SIZE, 2,
|
||||
0, 1, 0, len(dcounts), 0, 0x100000, 0x20000, 0))
|
||||
self._write(BLOCK_CAOL)
|
||||
self._write(BLOCK_ITSF)
|
||||
conoff_offset = self._tell()
|
||||
timestamp = int(time.time())
|
||||
self._write(pack('<QII', 0, timestamp, 0x409))
|
||||
|
||||
# Piece #0
|
||||
piece0_offset = self._tell()
|
||||
self._write(pack('<II', 0x1fe, 0))
|
||||
filesz_offset = self._tell()
|
||||
self._write(pack('<QQ', 0, 0))
|
||||
self._writeat(pieces[0], pack('<QQ',
|
||||
piece0_offset, self._tell() - piece0_offset))
|
||||
|
||||
# Piece #1: Directory chunks
|
||||
piece1_offset = self._tell()
|
||||
number = len(dchunks) + ((ichunk and 1) or 0)
|
||||
self._write('IFCM', pack('<IIIQQ',
|
||||
1, DCHUNK_SIZE, 0x100000, ULL_NEG1, number))
|
||||
for dchunk in dchunks:
|
||||
self._write(dchunk)
|
||||
if ichunk:
|
||||
self._write(ichunk)
|
||||
self._writeat(pieces[1], pack('<QQ',
|
||||
piece1_offset, self._tell() - piece1_offset))
|
||||
|
||||
# Piece #2: Count chunks
|
||||
piece2_offset = self._tell()
|
||||
self._write('IFCM', pack('<IIIQQ',
|
||||
1, CCHUNK_SIZE, 0x20000, ULL_NEG1, 1))
|
||||
cchunk = StringIO()
|
||||
last = 0
|
||||
for i, dcount in izip(count(), dcounts):
|
||||
cchunk.write(decint(last))
|
||||
cchunk.write(decint(dcount))
|
||||
cchunk.write(decint(i))
|
||||
last = dcount
|
||||
cchunk = cchunk.getvalue()
|
||||
rem = CCHUNK_SIZE - (len(cchunk) + 50)
|
||||
self._write('AOLL', pack('<IQQQQQ',
|
||||
rem, 0, ULL_NEG1, ULL_NEG1, 0, 1))
|
||||
filler = '\0' * rem
|
||||
self._write(cchunk, filler, pack('<H', len(dcounts)))
|
||||
self._writeat(pieces[2], pack('<QQ',
|
||||
piece2_offset, self._tell() - piece2_offset))
|
||||
|
||||
# Piece #3: GUID3
|
||||
piece3_offset = self._tell()
|
||||
self._write(packguid(PIECE3_GUID))
|
||||
self._writeat(pieces[3], pack('<QQ',
|
||||
piece3_offset, self._tell() - piece3_offset))
|
||||
|
||||
# Piece #4: GUID4
|
||||
piece4_offset = self._tell()
|
||||
self._write(packguid(PIECE4_GUID))
|
||||
self._writeat(pieces[4], pack('<QQ',
|
||||
piece4_offset, self._tell() - piece4_offset))
|
||||
|
||||
# The actual section content
|
||||
content_offset = self._tell()
|
||||
self._writeat(conoff_offset, pack('<Q', content_offset))
|
||||
self._write(self._sections[0].getvalue())
|
||||
self._writeat(filesz_offset, pack('<Q', self._tell()))
|
||||
|
||||
def _add_file(self, name, data, secnum=0):
|
||||
if len(data) > 0:
|
||||
section = self._sections[secnum]
|
||||
offset = section.tell()
|
||||
section.write(data)
|
||||
else:
|
||||
offset = 0
|
||||
self._directory.append(
|
||||
DirectoryEntry(name, secnum, offset, len(data)))
|
||||
|
||||
def _add_folder(self, name, offset=0, size=0):
|
||||
if not name.endswith('/'):
|
||||
name += '/'
|
||||
self._directory.append(
|
||||
DirectoryEntry(name, 0, offset, size))
|
||||
|
||||
def _djoin(self, *names):
|
||||
return '/'.join(names)
|
||||
|
||||
def _build_sections(self):
|
||||
self._add_folder('/', ROOT_OFFSET, ROOT_SIZE)
|
||||
self._build_data()
|
||||
self._build_manifest()
|
||||
self._build_page_breaks()
|
||||
self._build_meta()
|
||||
self._build_drm_storage()
|
||||
self._build_version()
|
||||
self._build_namelist()
|
||||
self._build_storage()
|
||||
self._build_transforms()
|
||||
|
||||
def _build_data(self):
|
||||
self._add_folder('/data')
|
||||
for item in self._oeb.manifest.values():
|
||||
if item.media_type not in LIT_MIMES:
|
||||
self._logger.log_warn("File %r of unknown media-type %r " \
|
||||
"excluded from output." % (item.href, item.media_type))
|
||||
continue
|
||||
name = '/data/' + item.id
|
||||
data = item.data
|
||||
secnum = 0
|
||||
if not isinstance(data, basestring):
|
||||
self._add_folder(name)
|
||||
rebin = ReBinary(data, item.href, self._oeb, map=HTML_MAP,
|
||||
logger=self._logger)
|
||||
self._add_file(name + '/ahc', rebin.ahc, 0)
|
||||
self._add_file(name + '/aht', rebin.aht, 0)
|
||||
item.page_breaks = rebin.page_breaks
|
||||
data = rebin.content
|
||||
name = name + '/content'
|
||||
secnum = 1
|
||||
self._add_file(name, data, secnum)
|
||||
item.size = len(data)
|
||||
|
||||
def _build_manifest(self):
|
||||
states = ['linear', 'nonlinear', 'css', 'images']
|
||||
manifest = dict((state, []) for state in states)
|
||||
for item in self._oeb.manifest.values():
|
||||
if item.spine_position is not None:
|
||||
key = 'linear' if item.linear else 'nonlinear'
|
||||
manifest[key].append(item)
|
||||
elif item.media_type == CSS_MIME:
|
||||
manifest['css'].append(item)
|
||||
elif item.media_type in LIT_IMAGES:
|
||||
manifest['images'].append(item)
|
||||
data = StringIO()
|
||||
data.write(pack('<Bc', 1, '\\'))
|
||||
offset = 0
|
||||
for state in states:
|
||||
items = manifest[state]
|
||||
items.sort()
|
||||
data.write(pack('<I', len(items)))
|
||||
for item in items:
|
||||
id, media_type = item.id, item.media_type
|
||||
href = urlunquote(item.href)
|
||||
item.offset = offset \
|
||||
if state in ('linear', 'nonlinear') else 0
|
||||
data.write(pack('<I', item.offset))
|
||||
entry = [unichr(len(id)), unicode(id),
|
||||
unichr(len(href)), unicode(href),
|
||||
unichr(len(media_type)), unicode(media_type)]
|
||||
for value in entry:
|
||||
data.write(value.encode('utf-8'))
|
||||
data.write('\0')
|
||||
offset += item.size
|
||||
self._add_file('/manifest', data.getvalue())
|
||||
|
||||
def _build_page_breaks(self):
|
||||
pb1 = StringIO()
|
||||
pb2 = StringIO()
|
||||
pb3 = StringIO()
|
||||
pb3cur = 0
|
||||
bits = 0
|
||||
for item in self._oeb.spine:
|
||||
page_breaks = copy.copy(item.page_breaks)
|
||||
if not item.linear:
|
||||
page_breaks.insert(0, (0, []))
|
||||
for pbreak, parents in page_breaks:
|
||||
pb3cur = (pb3cur << 2) | 1
|
||||
if len(parents) > 1:
|
||||
pb3cur |= 0x2
|
||||
bits += 2
|
||||
if bits >= 8:
|
||||
pb3.write(pack('<B', pb3cur))
|
||||
pb3cur = 0
|
||||
bits = 0
|
||||
pbreak += item.offset
|
||||
pb1.write(pack('<II', pbreak, pb2.tell()))
|
||||
pb2.write(pack('<I', len(parents)))
|
||||
for parent in parents:
|
||||
pb2.write(pack('<I', parent))
|
||||
if bits != 0:
|
||||
pb3cur <<= (8 - bits)
|
||||
pb3.write(pack('<B', pb3cur))
|
||||
self._add_file('/pb1', pb1.getvalue(), 0)
|
||||
self._add_file('/pb2', pb2.getvalue(), 0)
|
||||
self._add_file('/pb3', pb3.getvalue(), 0)
|
||||
|
||||
def _build_meta(self):
|
||||
_, meta = self._oeb.to_opf1()[OPF_MIME]
|
||||
meta.attrib['ms--minimum_level'] = '0'
|
||||
meta.attrib['ms--attr5'] = '1'
|
||||
meta.attrib['ms--guid'] = '{%s}' % str(uuid.uuid4()).upper()
|
||||
rebin = ReBinary(meta, 'content.opf', self._oeb, map=OPF_MAP,
|
||||
logger=self._logger)
|
||||
meta = rebin.content
|
||||
self._meta = meta
|
||||
self._add_file('/meta', meta)
|
||||
|
||||
def _build_drm_storage(self):
|
||||
drmsource = u'Free as in freedom\0'.encode('utf-16-le')
|
||||
self._add_file('/DRMStorage/DRMSource', drmsource)
|
||||
tempkey = self._calculate_deskey([self._meta, drmsource])
|
||||
msdes.deskey(tempkey, msdes.EN0)
|
||||
self._add_file('/DRMStorage/DRMSealed', msdes.des("\0" * 16))
|
||||
self._bookkey = '\0' * 8
|
||||
self._add_file('/DRMStorage/ValidationStream', 'MSReader', 3)
|
||||
|
||||
def _build_version(self):
|
||||
self._add_file('/Version', pack('<HH', 8, 1))
|
||||
|
||||
def _build_namelist(self):
|
||||
data = StringIO()
|
||||
data.write(pack('<HH', 0x3c, len(self._sections)))
|
||||
names = ['Uncompressed', 'MSCompressed', 'EbEncryptDS',
|
||||
'EbEncryptOnlyDS']
|
||||
for name in names:
|
||||
data.write(pack('<H', len(name)))
|
||||
data.write(name.encode('utf-16-le'))
|
||||
data.write('\0\0')
|
||||
self._add_file('::DataSpace/NameList', data.getvalue())
|
||||
|
||||
def _build_storage(self):
|
||||
mapping = [(1, 'MSCompressed', (LZXCOMPRESS_GUID,)),
|
||||
(2, 'EbEncryptDS', (LZXCOMPRESS_GUID, DESENCRYPT_GUID)),
|
||||
(3, 'EbEncryptOnlyDS', (DESENCRYPT_GUID,)),]
|
||||
for secnum, name, transforms in mapping:
|
||||
root = '::DataSpace/Storage/' + name
|
||||
data = self._sections[secnum].getvalue()
|
||||
cdata, sdata, tdata, rdata = '', '', '', ''
|
||||
for guid in transforms:
|
||||
tdata = packguid(guid) + tdata
|
||||
sdata = sdata + pack('<Q', len(data))
|
||||
if guid == DESENCRYPT_GUID:
|
||||
cdata = MSDES_CONTROL + cdata
|
||||
if not data: continue
|
||||
msdes.deskey(self._bookkey, msdes.EN0)
|
||||
pad = 8 - (len(data) & 0x7)
|
||||
if pad != 8:
|
||||
data = data + ('\0' * pad)
|
||||
data = msdes.des(data)
|
||||
elif guid == LZXCOMPRESS_GUID:
|
||||
cdata = LZXC_CONTROL + cdata
|
||||
if not data: continue
|
||||
unlen = len(data)
|
||||
lzx = Compressor(17)
|
||||
data, rtable = lzx.compress(data, flush=True)
|
||||
rdata = StringIO()
|
||||
rdata.write(pack('<IIIIQQQQ',
|
||||
3, len(rtable), 8, 0x28, unlen, len(data), 0x8000, 0))
|
||||
for uncomp, comp in rtable[:-1]:
|
||||
rdata.write(pack('<Q', comp))
|
||||
rdata = rdata.getvalue()
|
||||
self._add_file(root + '/Content', data)
|
||||
self._add_file(root + '/ControlData', cdata)
|
||||
self._add_file(root + '/SpanInfo', sdata)
|
||||
self._add_file(root + '/Transform/List', tdata)
|
||||
troot = root + '/Transform'
|
||||
for guid in transforms:
|
||||
dname = self._djoin(troot, guid, 'InstanceData')
|
||||
self._add_folder(dname)
|
||||
if guid == LZXCOMPRESS_GUID:
|
||||
dname += '/ResetTable'
|
||||
self._add_file(dname, rdata)
|
||||
|
||||
def _build_transforms(self):
|
||||
for guid in (LZXCOMPRESS_GUID, DESENCRYPT_GUID):
|
||||
self._add_folder('::Transform/'+ guid)
|
||||
|
||||
def _calculate_deskey(self, hashdata):
|
||||
prepad = 2
|
||||
hash = mssha1.new()
|
||||
for data in hashdata:
|
||||
if prepad > 0:
|
||||
data = ("\000" * prepad) + data
|
||||
prepad = 0
|
||||
postpad = 64 - (len(data) % 64)
|
||||
if postpad < 64:
|
||||
data = data + ("\000" * postpad)
|
||||
hash.update(data)
|
||||
digest = hash.digest()
|
||||
key = [0] * 8
|
||||
for i in xrange(0, len(digest)):
|
||||
key[i % 8] ^= ord(digest[i])
|
||||
return ''.join(chr(x) for x in key)
|
||||
|
||||
def _build_dchunks(self):
|
||||
ddata = []
|
||||
directory = list(self._directory)
|
||||
directory.sort(cmp=lambda x, y: \
|
||||
cmp(x.name.lower(), y.name.lower()))
|
||||
qrn = 1 + (1 << 2)
|
||||
dchunk = StringIO()
|
||||
dcount = 0
|
||||
quickref = []
|
||||
name = directory[0].name
|
||||
for entry in directory:
|
||||
next = ''.join([decint(len(entry.name)), entry.name,
|
||||
decint(entry.section), decint(entry.offset),
|
||||
decint(entry.size)])
|
||||
usedlen = dchunk.tell() + len(next) + (len(quickref) * 2) + 52
|
||||
if usedlen >= DCHUNK_SIZE:
|
||||
ddata.append((dchunk.getvalue(), quickref, dcount, name))
|
||||
dchunk = StringIO()
|
||||
dcount = 0
|
||||
quickref = []
|
||||
name = entry.name
|
||||
if (dcount % qrn) == 0:
|
||||
quickref.append(dchunk.tell())
|
||||
dchunk.write(next)
|
||||
dcount = dcount + 1
|
||||
ddata.append((dchunk.getvalue(), quickref, dcount, name))
|
||||
cidmax = len(ddata) - 1
|
||||
rdcount = 0
|
||||
dchunks = []
|
||||
dcounts = []
|
||||
ichunk = None
|
||||
if len(ddata) > 1:
|
||||
ichunk = StringIO()
|
||||
for cid, (content, quickref, dcount, name) in izip(count(), ddata):
|
||||
dchunk = StringIO()
|
||||
prev = cid - 1 if cid > 0 else ULL_NEG1
|
||||
next = cid + 1 if cid < cidmax else ULL_NEG1
|
||||
rem = DCHUNK_SIZE - (len(content) + 50)
|
||||
pad = rem - (len(quickref) * 2)
|
||||
dchunk.write('AOLL')
|
||||
dchunk.write(pack('<IQQQQQ', rem, cid, prev, next, rdcount, 1))
|
||||
dchunk.write(content)
|
||||
dchunk.write('\0' * pad)
|
||||
for ref in reversed(quickref):
|
||||
dchunk.write(pack('<H', ref))
|
||||
dchunk.write(pack('<H', dcount))
|
||||
rdcount = rdcount + dcount
|
||||
dchunks.append(dchunk.getvalue())
|
||||
dcounts.append(dcount)
|
||||
if ichunk:
|
||||
ichunk.write(decint(len(name)))
|
||||
ichunk.write(name)
|
||||
ichunk.write(decint(cid))
|
||||
if ichunk:
|
||||
rem = DCHUNK_SIZE - (ichunk.tell() + 16)
|
||||
pad = rem - 2
|
||||
ichunk = ''.join(['AOLI', pack('<IQ', rem, len(dchunks)),
|
||||
ichunk.getvalue(), ('\0' * pad), pack('<H', len(dchunks))])
|
||||
return dcounts, dchunks, ichunk
|
||||
|
||||
|
||||
def option_parser():
|
||||
from calibre.utils.config import OptionParser
|
||||
parser = OptionParser(usage=_('%prog [options] OPFFILE'))
|
||||
parser.add_option(
|
||||
'-o', '--output', default=None,
|
||||
help=_('Output file. Default is derived from input filename.'))
|
||||
parser.add_option(
|
||||
'--verbose', default=False, action='store_true',
|
||||
help=_('Useful for debugging.'))
|
||||
return parser
|
||||
|
||||
def oeb2lit(opts, opfpath):
|
||||
logger = LoggingInterface(logging.getLogger('oeb2lit'))
|
||||
logger.setup_cli_handler(opts.verbose)
|
||||
litpath = opts.output
|
||||
if litpath is None:
|
||||
litpath = os.path.basename(opfpath)
|
||||
litpath = os.path.splitext(litpath)[0] + '.lit'
|
||||
litpath = os.path.abspath(litpath)
|
||||
lit = LitWriter(OEBBook(opfpath, logger=logger), logger=logger)
|
||||
with open(litpath, 'wb') as f:
|
||||
lit.dump(f)
|
||||
run_plugins_on_postprocess(litpath, 'lit')
|
||||
logger.log_info(_('Output written to ')+litpath)
|
||||
|
||||
|
||||
def main(argv=sys.argv):
|
||||
parser = option_parser()
|
||||
opts, args = parser.parse_args(argv[1:])
|
||||
if len(args) != 1:
|
||||
parser.print_help()
|
||||
return 1
|
||||
opfpath = args[0]
|
||||
oeb2lit(opts, opfpath)
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
@ -108,6 +108,8 @@ def option_parser(usage, gui_mode=False):
|
||||
help=_('Add a header to all the pages with title and author.'))
|
||||
laf.add_option('--headerformat', default="%t by %a", dest='headerformat', type='string',
|
||||
help=_('Set the format of the header. %a is replaced by the author and %t by the title. Default is %default'))
|
||||
laf.add_option('--header-separation', default=0, type='int',
|
||||
help=_('Add extra spacing below the header. Default is %default px.'))
|
||||
laf.add_option('--override-css', default=None, dest='_override_css', type='string',
|
||||
help=_('Override the CSS. Can be either a path to a CSS stylesheet or a string. If it is a string it is interpreted as CSS.'))
|
||||
laf.add_option('--use-spine', default=False, dest='use_spine', action='store_true',
|
||||
@ -260,10 +262,11 @@ def Book(options, logger, font_delta=0, header=None,
|
||||
hb.append(header)
|
||||
hdr.PutObj(hb)
|
||||
ps['headheight'] = profile.header_height
|
||||
ps['headsep'] = options.header_separation
|
||||
ps['header'] = hdr
|
||||
ps['topmargin'] = 0
|
||||
ps['textheight'] = profile.screen_height - (options.bottom_margin + ps['topmargin']) \
|
||||
- ps['headheight'] - profile.fudge
|
||||
- ps['headheight'] - ps['headsep'] - profile.fudge
|
||||
|
||||
fontsize = int(10*profile.font_size+font_delta*20)
|
||||
baselineskip = fontsize + 20
|
||||
|
||||
@ -18,6 +18,8 @@ from calibre.ebooks.lrf.epub.convert_from import process_file as epub2lrf
|
||||
from calibre.ebooks.lrf.mobi.convert_from import process_file as mobi2lrf
|
||||
from calibre.ebooks.lrf.fb2.convert_from import process_file as fb22lrf
|
||||
|
||||
from calibre.customize.ui import run_plugins_on_postprocess, run_plugins_on_preprocess
|
||||
|
||||
def largest_file(files):
|
||||
maxsize, file = 0, None
|
||||
for f in files:
|
||||
@ -108,6 +110,7 @@ def odt2lrf(path, options, logger):
|
||||
|
||||
def process_file(path, options, logger=None):
|
||||
path = os.path.abspath(os.path.expanduser(path))
|
||||
path = run_plugins_on_preprocess(path)
|
||||
tdir = None
|
||||
if logger is None:
|
||||
level = logging.DEBUG if options.verbose else logging.INFO
|
||||
@ -160,6 +163,7 @@ def process_file(path, options, logger=None):
|
||||
if not convertor:
|
||||
raise UnknownFormatError(_('Converting from %s to LRF is not supported.')%ext)
|
||||
convertor(path, options, logger)
|
||||
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
if tdir and os.path.exists(tdir):
|
||||
|
||||
@ -10,6 +10,14 @@ Based on ideas from comiclrf created by FangornUK.
|
||||
import os, sys, shutil, traceback, textwrap
|
||||
from uuid import uuid4
|
||||
|
||||
try:
|
||||
from reportlab.pdfgen import canvas
|
||||
_reportlab = True
|
||||
except:
|
||||
_reportlab = False
|
||||
|
||||
|
||||
|
||||
from calibre import extract, terminal_controller, __appname__, __version__
|
||||
from calibre.utils.config import Config, StringConfig
|
||||
from calibre.ptempfile import PersistentTemporaryDirectory
|
||||
@ -19,6 +27,7 @@ from calibre.ebooks.lrf.pylrs.pylrs import Book, BookSetting, ImageStream, Image
|
||||
from calibre.ebooks.metadata import MetaInformation
|
||||
from calibre.ebooks.metadata.opf import OPFCreator
|
||||
from calibre.ebooks.epub.from_html import config as html2epub_config, convert as html2epub
|
||||
from calibre.customize.ui import run_plugins_on_preprocess
|
||||
try:
|
||||
from calibre.utils.PythonMagickWand import \
|
||||
NewMagickWand, NewPixelWand, \
|
||||
@ -41,6 +50,8 @@ except:
|
||||
PROFILES = {
|
||||
# Name : (width, height) in pixels
|
||||
'prs500':(584, 754),
|
||||
# The SONY's LRF renderer (on the PRS500) only uses the first 800x600 block of the image
|
||||
'prs500-landscape': (784, 1200-92)
|
||||
}
|
||||
|
||||
def extract_comic(path_to_comic_file):
|
||||
@ -276,7 +287,7 @@ def process_pages(pages, opts, update):
|
||||
failures += failures_
|
||||
return ans, failures, tdir
|
||||
|
||||
def config(defaults=None):
|
||||
def config(defaults=None,output_format='lrf'):
|
||||
desc = _('Options to control the conversion of comics (CBR, CBZ) files into ebooks')
|
||||
if defaults is None:
|
||||
c = Config('comic', desc)
|
||||
@ -313,10 +324,13 @@ def config(defaults=None):
|
||||
help=_('Be verbose, useful for debugging. Can be specified multiple times for greater verbosity.'))
|
||||
c.add_opt('no_progress_bar', ['--no-progress-bar'], default=False,
|
||||
help=_("Don't show progress bar."))
|
||||
if output_format == 'pdf':
|
||||
c.add_opt('no_process',['--no_process'], default=False,
|
||||
help=_("Apply no processing to the image"))
|
||||
return c
|
||||
|
||||
def option_parser():
|
||||
c = config()
|
||||
def option_parser(output_format='lrf'):
|
||||
c = config(output_format=output_format)
|
||||
return c.option_parser(usage=_('''\
|
||||
%prog [options] comic.cb[z|r]
|
||||
|
||||
@ -379,38 +393,60 @@ def create_lrf(pages, profile, opts, thumbnail=None):
|
||||
book.renderLrf(open(opts.output, 'wb'))
|
||||
print _('Output written to'), opts.output
|
||||
|
||||
|
||||
def create_pdf(pages, profile, opts, thumbnail=None):
|
||||
width, height = PROFILES[profile]
|
||||
|
||||
if not _reportlab:
|
||||
raise RuntimeError('Failed to load reportlab')
|
||||
|
||||
pdf = canvas.Canvas(filename=opts.output, pagesize=(width,height+15))
|
||||
|
||||
for page in pages:
|
||||
pdf.drawImage(page, x=0,y=0,width=width, height=height)
|
||||
pdf.showPage()
|
||||
|
||||
# Write the document to disk
|
||||
pdf.save()
|
||||
|
||||
|
||||
def do_convert(path_to_file, opts, notification=lambda m, p: p, output_format='lrf'):
|
||||
path_to_file = run_plugins_on_preprocess(path_to_file)
|
||||
source = path_to_file
|
||||
|
||||
if not opts.title:
|
||||
opts.title = os.path.splitext(os.path.basename(source))[0]
|
||||
if not opts.output:
|
||||
opts.output = os.path.abspath(os.path.splitext(os.path.basename(source))[0]+'.'+output_format)
|
||||
tdir = extract_comic(source)
|
||||
pages = find_pages(tdir, sort_on_mtime=opts.no_sort, verbose=opts.verbose)
|
||||
thumbnail = None
|
||||
if not pages:
|
||||
raise ValueError('Could not find any pages in the comic: %s'%source)
|
||||
pages, failures, tdir2 = process_pages(pages, opts, notification)
|
||||
if not pages:
|
||||
raise ValueError('Could not find any valid pages in the comic: %s'%source)
|
||||
if failures:
|
||||
print 'Could not process the following pages (run with --verbose to see why):'
|
||||
for f in failures:
|
||||
print '\t', f
|
||||
thumbnail = os.path.join(tdir2, 'thumbnail.png')
|
||||
if not os.access(thumbnail, os.R_OK):
|
||||
thumbnail = None
|
||||
|
||||
if not opts.no_process:
|
||||
pages, failures, tdir2 = process_pages(pages, opts, notification)
|
||||
if not pages:
|
||||
raise ValueError('Could not find any valid pages in the comic: %s'%source)
|
||||
if failures:
|
||||
print 'Could not process the following pages (run with --verbose to see why):'
|
||||
for f in failures:
|
||||
print '\t', f
|
||||
thumbnail = os.path.join(tdir2, 'thumbnail.png')
|
||||
if not os.access(thumbnail, os.R_OK):
|
||||
thumbnail = None
|
||||
if output_format == 'lrf':
|
||||
create_lrf(pages, opts.profile, opts, thumbnail=thumbnail)
|
||||
else:
|
||||
if output_format == 'epub':
|
||||
create_epub(pages, opts.profile, opts, thumbnail=thumbnail)
|
||||
if output_format == 'pdf':
|
||||
create_pdf(pages, opts.profile, opts, thumbnail=thumbnail)
|
||||
shutil.rmtree(tdir)
|
||||
shutil.rmtree(tdir2)
|
||||
if not opts.no_process:
|
||||
shutil.rmtree(tdir2)
|
||||
|
||||
|
||||
def main(args=sys.argv, notification=None, output_format='lrf'):
|
||||
parser = option_parser()
|
||||
parser = option_parser(output_format=output_format)
|
||||
opts, args = parser.parse_args(args)
|
||||
if len(args) < 2:
|
||||
parser.print_help()
|
||||
@ -424,7 +460,6 @@ def main(args=sys.argv, notification=None, output_format='lrf'):
|
||||
|
||||
source = os.path.abspath(args[1])
|
||||
do_convert(source, opts, notification, output_format=output_format)
|
||||
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@ -2,14 +2,14 @@ __license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
|
||||
import os, sys, shutil, logging
|
||||
from tempfile import mkdtemp
|
||||
from calibre.ebooks.lrf import option_parser as lrf_option_parser
|
||||
from calibre.ebooks import ConversionError, DRMError
|
||||
from calibre.ebooks.lrf.html.convert_from import process_file as html_process_file
|
||||
from calibre.ebooks.metadata.opf import OPF
|
||||
from calibre.ebooks.metadata.epub import OCFDirReader
|
||||
from calibre.utils.zipfile import ZipFile
|
||||
from calibre import __appname__, setup_cli_handlers
|
||||
from calibre import setup_cli_handlers
|
||||
from calibre.ptempfile import PersistentTemporaryDirectory
|
||||
|
||||
|
||||
def option_parser():
|
||||
@ -22,17 +22,16 @@ _('''Usage: %prog [options] mybook.epub
|
||||
|
||||
def generate_html(pathtoepub, logger):
|
||||
if not os.access(pathtoepub, os.R_OK):
|
||||
raise ConversionError, 'Cannot read from ' + pathtoepub
|
||||
tdir = mkdtemp(prefix=__appname__+'_')
|
||||
os.rmdir(tdir)
|
||||
raise ConversionError('Cannot read from ' + pathtoepub)
|
||||
tdir = PersistentTemporaryDirectory('_epub2lrf')
|
||||
#os.rmdir(tdir)
|
||||
try:
|
||||
ZipFile(pathtoepub).extractall(tdir)
|
||||
if os.path.exists(os.path.join(tdir, 'META-INF', 'encryption.xml')):
|
||||
raise DRMError(os.path.basename(pathtoepub))
|
||||
except:
|
||||
if os.path.exists(tdir) and os.path.isdir(tdir):
|
||||
shutil.rmtree(tdir)
|
||||
raise ConversionError, '.epub extraction failed'
|
||||
if os.path.exists(os.path.join(tdir, 'META-INF', 'encryption.xml')):
|
||||
raise DRMError(os.path.basename(pathtoepub))
|
||||
|
||||
return tdir
|
||||
|
||||
def process_file(path, options, logger=None):
|
||||
|
||||
@ -12,24 +12,61 @@ except ImportError:
|
||||
'''
|
||||
Default fonts used in the PRS500
|
||||
'''
|
||||
from calibre.ebooks.lrf.fonts.prs500 import tt0003m_, tt0011m_, tt0419m_
|
||||
|
||||
SYSTEM_FONT_PATH = '/usr/share/fonts/truetype/ttf-liberation/'
|
||||
|
||||
FONT_MAP = {
|
||||
'Swis721 BT Roman' : tt0003m_,
|
||||
'Dutch801 Rm BT Roman' : tt0011m_,
|
||||
'Courier10 BT Roman' : tt0419m_,
|
||||
'Swis721 BT Roman' : 'tt0003m_',
|
||||
'Dutch801 Rm BT Roman' : 'tt0011m_',
|
||||
'Courier10 BT Roman' : 'tt0419m_',
|
||||
}
|
||||
|
||||
LIBERATION_FONT_MAP = {
|
||||
'Swis721 BT Roman' : 'LiberationSans_Regular',
|
||||
'Dutch801 Rm BT Roman' : 'LiberationSerif_Regular',
|
||||
'Courier10 BT Roman' : 'LiberationMono_Regular',
|
||||
}
|
||||
|
||||
SYSTEM_FONT_MAP = {}
|
||||
for key, val in LIBERATION_FONT_MAP.items():
|
||||
SYSTEM_FONT_MAP[key] = SYSTEM_FONT_PATH + val.replace('_', '-') + '.ttf'
|
||||
|
||||
FONT_FILE_MAP = {}
|
||||
|
||||
def get_font_path(name):
|
||||
if FONT_FILE_MAP.has_key(name) and os.access(FONT_FILE_MAP[name].name, os.R_OK):
|
||||
return FONT_FILE_MAP[name].name
|
||||
p = PersistentTemporaryFile('.ttf', 'font_')
|
||||
p.write(FONT_MAP[name].font_data)
|
||||
p.close()
|
||||
FONT_FILE_MAP[name] = p
|
||||
return p.name
|
||||
|
||||
# translate font into file name
|
||||
fname = FONT_MAP[name]
|
||||
|
||||
# first, check configuration in /etc/
|
||||
etc_file = os.path.join(os.path.sep, 'etc', 'calibre', 'fonts', fname + '.ttf')
|
||||
if os.access(etc_file, os.R_OK):
|
||||
return etc_file
|
||||
|
||||
# then, try calibre shipped ones
|
||||
try:
|
||||
try:
|
||||
font_mod = __import__('calibre.ebooks.lrf.fonts.prs500', {}, {},
|
||||
[fname], -1)
|
||||
except ImportError:
|
||||
font_mod = __import__('calibre.ebooks.lrf.fonts.liberation', {}, {},
|
||||
[LIBERATION_FONT_MAP[name]], -1)
|
||||
p = PersistentTemporaryFile('.ttf', 'font_')
|
||||
p.write(getattr(font_mod, fname).font_data)
|
||||
p.close()
|
||||
FONT_FILE_MAP[name] = p
|
||||
return p.name
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# finally, try system default ones
|
||||
if SYSTEM_FONT_MAP.has_key(name) and os.access(SYSTEM_FONT_MAP[name], os.R_OK):
|
||||
return SYSTEM_FONT_MAP[name]
|
||||
|
||||
# not found
|
||||
raise SystemError, 'font %s (in file %s) not installed' % (name, fname)
|
||||
|
||||
def get_font(name, size, encoding='unic'):
|
||||
'''
|
||||
|
||||
@ -12,6 +12,7 @@ from urllib import unquote
|
||||
from urlparse import urlparse
|
||||
from math import ceil, floor
|
||||
from functools import partial
|
||||
from calibre.customize.ui import run_plugins_on_postprocess
|
||||
|
||||
try:
|
||||
from PIL import Image as PILImage
|
||||
@ -32,7 +33,6 @@ from calibre.ebooks.lrf.html.table import Table
|
||||
from calibre import filename_to_utf8, setup_cli_handlers, __appname__, \
|
||||
fit_image, LoggingInterface, preferred_encoding
|
||||
from calibre.ptempfile import PersistentTemporaryFile
|
||||
from calibre.ebooks.metadata.opf import OPFReader
|
||||
from calibre.devices.interface import Device
|
||||
from calibre.ebooks.lrf.html.color_map import lrs_color
|
||||
from calibre.ebooks.chardet import xml_to_unicode
|
||||
@ -106,6 +106,8 @@ class HTMLConverter(object, LoggingInterface):
|
||||
(re.compile(r'(<style.*?</style>)', re.IGNORECASE|re.DOTALL),
|
||||
strip_style_comments),
|
||||
|
||||
# Remove self closing script tags as they also mess up BeautifulSoup
|
||||
(re.compile(r'(?i)<script[^<>]+?/>'), lambda match: ''),
|
||||
|
||||
]
|
||||
# Fix Baen markup
|
||||
@ -243,7 +245,6 @@ class HTMLConverter(object, LoggingInterface):
|
||||
|
||||
self.override_css = {}
|
||||
self.override_pcss = {}
|
||||
self.table_render_job_server = None
|
||||
|
||||
if self._override_css is not None:
|
||||
if os.access(self._override_css, os.R_OK):
|
||||
@ -264,41 +265,37 @@ class HTMLConverter(object, LoggingInterface):
|
||||
paths = [os.path.abspath(path) for path in paths]
|
||||
paths = [path.decode(sys.getfilesystemencoding()) if not isinstance(path, unicode) else path for path in paths]
|
||||
|
||||
try:
|
||||
while len(paths) > 0 and self.link_level <= self.link_levels:
|
||||
for path in paths:
|
||||
if path in self.processed_files:
|
||||
continue
|
||||
try:
|
||||
self.add_file(path)
|
||||
except KeyboardInterrupt:
|
||||
while len(paths) > 0 and self.link_level <= self.link_levels:
|
||||
for path in paths:
|
||||
if path in self.processed_files:
|
||||
continue
|
||||
try:
|
||||
self.add_file(path)
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except:
|
||||
if self.link_level == 0: # Die on errors in the first level
|
||||
raise
|
||||
except:
|
||||
if self.link_level == 0: # Die on errors in the first level
|
||||
raise
|
||||
for link in self.links:
|
||||
if link['path'] == path:
|
||||
self.links.remove(link)
|
||||
break
|
||||
self.log_warn('Could not process '+path)
|
||||
if self.verbose:
|
||||
self.log_exception(' ')
|
||||
self.links = self.process_links()
|
||||
self.link_level += 1
|
||||
paths = [link['path'] for link in self.links]
|
||||
|
||||
if self.current_page is not None and self.current_page.has_text():
|
||||
self.book.append(self.current_page)
|
||||
|
||||
for text, tb in self.extra_toc_entries:
|
||||
self.book.addTocEntry(text, tb)
|
||||
|
||||
if self.base_font_size > 0:
|
||||
self.log_info('\tRationalizing font sizes...')
|
||||
self.book.rationalize_font_sizes(self.base_font_size)
|
||||
finally:
|
||||
if self.table_render_job_server is not None:
|
||||
self.table_render_job_server.killall()
|
||||
for link in self.links:
|
||||
if link['path'] == path:
|
||||
self.links.remove(link)
|
||||
break
|
||||
self.log_warn('Could not process '+path)
|
||||
if self.verbose:
|
||||
self.log_exception(' ')
|
||||
self.links = self.process_links()
|
||||
self.link_level += 1
|
||||
paths = [link['path'] for link in self.links]
|
||||
|
||||
if self.current_page is not None and self.current_page.has_text():
|
||||
self.book.append(self.current_page)
|
||||
|
||||
for text, tb in self.extra_toc_entries:
|
||||
self.book.addTocEntry(text, tb)
|
||||
|
||||
if self.base_font_size > 0:
|
||||
self.log_info('\tRationalizing font sizes...')
|
||||
self.book.rationalize_font_sizes(self.base_font_size)
|
||||
|
||||
def is_baen(self, soup):
|
||||
return bool(soup.find('meta', attrs={'name':'Publisher',
|
||||
@ -334,7 +331,8 @@ class HTMLConverter(object, LoggingInterface):
|
||||
soup = BeautifulSoup(raw,
|
||||
convertEntities=BeautifulSoup.XHTML_ENTITIES,
|
||||
markupMassage=nmassage)
|
||||
|
||||
else:
|
||||
raise
|
||||
if not self.baen and self.is_baen(soup):
|
||||
self.baen = True
|
||||
self.log_info(_('\tBaen file detected. Re-parsing...'))
|
||||
@ -520,6 +518,8 @@ class HTMLConverter(object, LoggingInterface):
|
||||
self.book.append(self.current_page)
|
||||
self.current_page = None
|
||||
|
||||
if top not in top.parent.contents: # May have been removed for a cover image
|
||||
top = top.parent.contents[0]
|
||||
if not top.has_text() and top.parent.contents.index(top) == len(top.parent.contents)-1:
|
||||
# Empty block at the bottom of a page
|
||||
opage = top.parent
|
||||
@ -809,7 +809,7 @@ class HTMLConverter(object, LoggingInterface):
|
||||
|
||||
def append_text(src):
|
||||
fp, key, variant = self.font_properties(css)
|
||||
for x, y in [(u'\xa0', ' '), (u'\ufb00', 'ff'), (u'\ufb01', 'fi'), (u'\ufb02', 'fl'), (u'\ufb03', 'ffi'), (u'\ufb04', 'ffl')]:
|
||||
for x, y in [(u'\xad', ''), (u'\xa0', ' '), (u'\ufb00', 'ff'), (u'\ufb01', 'fi'), (u'\ufb02', 'fl'), (u'\ufb03', 'ffi'), (u'\ufb04', 'ffl')]:
|
||||
src = src.replace(x, y)
|
||||
|
||||
valigner = lambda x: x
|
||||
@ -1028,6 +1028,8 @@ class HTMLConverter(object, LoggingInterface):
|
||||
self.current_para = Paragraph()
|
||||
else:
|
||||
self.end_page()
|
||||
if len(self.current_page.contents) == 1 and not self.current_page.has_text():
|
||||
self.current_page.contents[0:1] = []
|
||||
self.current_page.append(Canvas(width=pwidth,
|
||||
height=height))
|
||||
left = int(floor((pwidth - width)/2.))
|
||||
@ -1725,15 +1727,11 @@ class HTMLConverter(object, LoggingInterface):
|
||||
self.process_children(tag, tag_css, tag_pseudo_css)
|
||||
elif tagname == 'table' and not self.ignore_tables and not self.in_table:
|
||||
if self.render_tables_as_images:
|
||||
if self.table_render_job_server is None:
|
||||
from calibre.parallel import Server
|
||||
self.table_render_job_server = Server(number_of_workers=1)
|
||||
print 'Rendering table...'
|
||||
from calibre.ebooks.lrf.html.table_as_image import render_table
|
||||
pheight = int(self.current_page.pageStyle.attrs['textheight'])
|
||||
pwidth = int(self.current_page.pageStyle.attrs['textwidth'])
|
||||
images = render_table(self.table_render_job_server,
|
||||
self.soup, tag, tag_css,
|
||||
images = render_table(self.soup, tag, tag_css,
|
||||
os.path.dirname(self.target_prefix),
|
||||
pwidth, pheight, self.profile.dpi,
|
||||
self.text_size_multiplier_for_rendered_tables)
|
||||
@ -1846,7 +1844,7 @@ def process_file(path, options, logger=None):
|
||||
scaled else im
|
||||
cf = PersistentTemporaryFile(prefix=__appname__+"_", suffix=".jpg")
|
||||
cf.close()
|
||||
cim.save(cf.name)
|
||||
cim.convert('RGB').save(cf.name)
|
||||
options.cover = cf.name
|
||||
|
||||
tim = im.resize((int(0.75*th), th), PILImage.ANTIALIAS).convert('RGB')
|
||||
@ -1899,6 +1897,8 @@ def process_file(path, options, logger=None):
|
||||
fpb = re.compile(options.force_page_break, re.IGNORECASE) if options.force_page_break else \
|
||||
re.compile('$')
|
||||
cq = options.chapter_attr.split(',')
|
||||
if len(cq) < 3:
|
||||
raise ValueError('The --chapter-attr setting must have 2 commas.')
|
||||
options.chapter_attr = [re.compile(cq[0], re.IGNORECASE), cq[1],
|
||||
re.compile(cq[2], re.IGNORECASE)]
|
||||
options.force_page_break = fpb
|
||||
@ -1916,7 +1916,7 @@ def process_file(path, options, logger=None):
|
||||
options.anchor_ids = True
|
||||
files = options.spine if (options.use_spine and hasattr(options, 'spine')) else [path]
|
||||
conv = HTMLConverter(book, fonts, options, logger, files)
|
||||
if options.use_spine and hasattr(options, 'toc'):
|
||||
if options.use_spine and hasattr(options, 'toc') and options.toc is not None:
|
||||
conv.create_toc(options.toc)
|
||||
oname = options.output
|
||||
if not oname:
|
||||
@ -1925,7 +1925,8 @@ def process_file(path, options, logger=None):
|
||||
oname = os.path.join(os.getcwd(), name)
|
||||
oname = os.path.abspath(os.path.expanduser(oname))
|
||||
conv.writeto(oname, lrs=options.lrs)
|
||||
logger.info('Output written to %s', oname)
|
||||
run_plugins_on_postprocess(oname, 'lrf')
|
||||
conv.log_info('Output written to %s', oname)
|
||||
conv.cleanup()
|
||||
return oname
|
||||
|
||||
@ -1944,7 +1945,8 @@ def try_opf(path, options, logger):
|
||||
return
|
||||
|
||||
dirpath = os.path.dirname(os.path.abspath(opf))
|
||||
opf = OPFReader(open(opf, 'rb'), dirpath)
|
||||
from calibre.ebooks.metadata.opf2 import OPF as OPF2
|
||||
opf = OPF2(open(opf, 'rb'), dirpath)
|
||||
try:
|
||||
title = opf.title
|
||||
if title and not getattr(options, 'title', None):
|
||||
@ -1958,10 +1960,6 @@ def try_opf(path, options, logger):
|
||||
publisher = opf.publisher
|
||||
if publisher:
|
||||
options.publisher = publisher
|
||||
if not getattr(options, 'category', None):
|
||||
category = opf.category
|
||||
if category:
|
||||
options.category = category
|
||||
if not getattr(options, 'cover', None) or options.use_metadata_cover:
|
||||
orig_cover = getattr(options, 'cover', None)
|
||||
options.cover = None
|
||||
@ -1975,17 +1973,7 @@ def try_opf(path, options, logger):
|
||||
PILImage.open(cover)
|
||||
options.cover = cover
|
||||
except:
|
||||
for prefix in opf.possible_cover_prefixes():
|
||||
if options.cover:
|
||||
break
|
||||
for suffix in ['.jpg', '.jpeg', '.gif', '.png', '.bmp']:
|
||||
cpath = os.path.join(os.path.dirname(path), prefix+suffix)
|
||||
try:
|
||||
PILImage.open(cpath)
|
||||
options.cover = cpath
|
||||
break
|
||||
except:
|
||||
continue
|
||||
pass
|
||||
if not getattr(options, 'cover', None) and orig_cover is not None:
|
||||
options.cover = orig_cover
|
||||
if getattr(opf, 'spine', False):
|
||||
|
||||
@ -6,14 +6,11 @@ __docformat__ = 'restructuredtext en'
|
||||
'''
|
||||
Render HTML tables as images.
|
||||
'''
|
||||
import os, tempfile, atexit, shutil, time
|
||||
from PyQt4.Qt import QUrl, QApplication, QSize, \
|
||||
import os, tempfile, atexit, shutil
|
||||
from PyQt4.Qt import QUrl, QApplication, QSize, QEventLoop, \
|
||||
SIGNAL, QPainter, QImage, QObject, Qt
|
||||
from PyQt4.QtWebKit import QWebPage
|
||||
|
||||
from calibre.parallel import ParallelJob
|
||||
|
||||
__app = None
|
||||
|
||||
class HTMLTableRenderer(QObject):
|
||||
|
||||
@ -27,13 +24,15 @@ class HTMLTableRenderer(QObject):
|
||||
self.app = None
|
||||
self.width, self.height, self.dpi = width, height, dpi
|
||||
self.base_dir = base_dir
|
||||
self.images = []
|
||||
self.tdir = tempfile.mkdtemp(prefix='calibre_render_table')
|
||||
self.loop = QEventLoop()
|
||||
self.page = QWebPage()
|
||||
self.connect(self.page, SIGNAL('loadFinished(bool)'), self.render_html)
|
||||
self.page.mainFrame().setTextSizeMultiplier(factor)
|
||||
self.page.mainFrame().setHtml(html,
|
||||
QUrl('file:'+os.path.abspath(self.base_dir)))
|
||||
self.images = []
|
||||
self.tdir = tempfile.mkdtemp(prefix='calibre_render_table')
|
||||
|
||||
|
||||
def render_html(self, ok):
|
||||
try:
|
||||
@ -63,7 +62,7 @@ class HTMLTableRenderer(QObject):
|
||||
finally:
|
||||
QApplication.quit()
|
||||
|
||||
def render_table(server, soup, table, css, base_dir, width, height, dpi, factor=1.0):
|
||||
def render_table(soup, table, css, base_dir, width, height, dpi, factor=1.0):
|
||||
head = ''
|
||||
for e in soup.findAll(['link', 'style']):
|
||||
head += unicode(e)+'\n\n'
|
||||
@ -83,24 +82,13 @@ def render_table(server, soup, table, css, base_dir, width, height, dpi, factor=
|
||||
</body>
|
||||
</html>
|
||||
'''%(head, width-10, style, unicode(table))
|
||||
job = ParallelJob('render_table', lambda j : j, None,
|
||||
args=[html, base_dir, width, height, dpi, factor])
|
||||
server.add_job(job)
|
||||
while not job.has_run:
|
||||
time.sleep(2)
|
||||
|
||||
if job.exception is not None:
|
||||
print 'Failed to render table'
|
||||
print job.exception
|
||||
print job.traceback
|
||||
images, tdir = job.result
|
||||
images, tdir = do_render(html, base_dir, width, height, dpi, factor)
|
||||
atexit.register(shutil.rmtree, tdir)
|
||||
return images
|
||||
|
||||
def do_render(html, base_dir, width, height, dpi, factor):
|
||||
app = QApplication.instance()
|
||||
if app is None:
|
||||
app = QApplication([])
|
||||
if QApplication.instance() is None:
|
||||
QApplication([])
|
||||
tr = HTMLTableRenderer(html, base_dir, width, height, dpi, factor)
|
||||
app.exec_()
|
||||
tr.loop.exec_()
|
||||
return tr.images, tr.tdir
|
||||
@ -77,7 +77,7 @@ class LRFDocument(LRFMetaFile):
|
||||
for obj in self.image_map.values() + self.font_map.values():
|
||||
open(obj.file, 'wb').write(obj.stream)
|
||||
|
||||
def to_xml(self):
|
||||
def to_xml(self, write_files=True):
|
||||
bookinfo = u'<BookInformation>\n<Info version="1.1">\n<BookInfo>\n'
|
||||
bookinfo += u'<Title reading="%s">%s</Title>\n'%(self.metadata.title_reading, self.metadata.title)
|
||||
bookinfo += u'<Author reading="%s">%s</Author>\n'%(self.metadata.author_reading, self.metadata.author)
|
||||
@ -89,9 +89,10 @@ class LRFDocument(LRFMetaFile):
|
||||
bookinfo += u'<FreeText reading="">%s</FreeText>\n</BookInfo>\n<DocInfo>\n'%(self.metadata.free_text,)
|
||||
th = self.doc_info.thumbnail
|
||||
if th:
|
||||
prefix = sanitize_file_name(self.metadata.title)
|
||||
prefix = sanitize_file_name(self.metadata.title, as_unicode=True)
|
||||
bookinfo += u'<CThumbnail file="%s" />\n'%(prefix+'_thumbnail.'+self.doc_info.thumbnail_extension,)
|
||||
open(prefix+'_thumbnail.'+self.doc_info.thumbnail_extension, 'wb').write(th)
|
||||
if write_files:
|
||||
open(prefix+'_thumbnail.'+self.doc_info.thumbnail_extension, 'wb').write(th)
|
||||
bookinfo += u'<Language reading="">%s</Language>\n'%(self.doc_info.language,)
|
||||
bookinfo += u'<Creator reading="">%s</Creator>\n'%(self.doc_info.creator,)
|
||||
bookinfo += u'<Producer reading="">%s</Producer>\n'%(self.doc_info.producer,)
|
||||
@ -127,12 +128,16 @@ class LRFDocument(LRFMetaFile):
|
||||
objects += unicode(obj)
|
||||
styles += '</Style>\n'
|
||||
objects += '</Objects>\n'
|
||||
self.write_files()
|
||||
if write_files:
|
||||
self.write_files()
|
||||
return '<BBeBXylog version="1.0">\n' + bookinfo + pages + styles + objects + '</BBeBXylog>'
|
||||
|
||||
def option_parser():
|
||||
parser = OptionParser(usage=_('%prog book.lrf\nConvert an LRF file into an LRS (XML UTF-8 encoded) file'))
|
||||
parser.add_option('--output', '-o', default=None, help=_('Output LRS file'), dest='out')
|
||||
parser.add_option('--dont-output-resources', default=True, action='store_false',
|
||||
help=_('Do not save embedded image and font files to disk'),
|
||||
dest='output_resources')
|
||||
parser.add_option('--verbose', default=False, action='store_true', dest='verbose')
|
||||
return parser
|
||||
|
||||
@ -154,7 +159,7 @@ def main(args=sys.argv, logger=None):
|
||||
d = LRFDocument(open(args[1], 'rb'))
|
||||
d.parse()
|
||||
logger.info(_('Creating XML...'))
|
||||
o.write(d.to_xml())
|
||||
o.write(d.to_xml(write_files=opts.output_resources))
|
||||
logger.info(_('LRS written to ')+opts.out)
|
||||
return 0
|
||||
|
||||
|
||||
@ -603,12 +603,18 @@ Show/edit the metadata in an LRF file.\n\n'''),
|
||||
parser.add_option("--get-thumbnail", action="store_true", \
|
||||
dest="get_thumbnail", default=False, \
|
||||
help=_("Extract thumbnail from LRF file"))
|
||||
parser.add_option('--publisher', default=None, help=_('Set the publisher'))
|
||||
parser.add_option('--classification', default=None, help=_('Set the book classification'))
|
||||
parser.add_option('--creator', default=None, help=_('Set the book creator'))
|
||||
parser.add_option('--producer', default=None, help=_('Set the book producer'))
|
||||
parser.add_option('--get-cover', action='store_true', default=False,
|
||||
help=_('Extract cover from LRF file. Note that the LRF format has no defined cover, so we use some heuristics to guess the cover.'))
|
||||
parser.add_option('--bookid', action='store', type='string', default=None,
|
||||
dest='book_id', help=_('Set book ID'))
|
||||
parser.add_option("-p", "--page", action="store", type="string", \
|
||||
dest="page", help=_("Don't know what this is for"))
|
||||
# The SumPage element specifies the number of "View"s (visible pages for the BookSetting element conditions) of the content.
|
||||
# Basically, the total pages per the page size, font size, etc. when the LRF is first created. Since this will change as the book is reflowed, it is probably not worth using.
|
||||
#parser.add_option("-p", "--page", action="store", type="string", \
|
||||
# dest="page", help=_("Don't know what this is for"))
|
||||
|
||||
return parser
|
||||
|
||||
@ -624,6 +630,8 @@ def set_metadata(stream, mi):
|
||||
lrf.free_text = mi.comments
|
||||
if mi.author_sort:
|
||||
lrf.author_reading = mi.author_sort
|
||||
if mi.publisher:
|
||||
lrf.publisher = mi.publisher
|
||||
|
||||
|
||||
def main(args=sys.argv):
|
||||
@ -644,10 +652,16 @@ def main(args=sys.argv):
|
||||
lrf.author_reading = options.author_reading
|
||||
if options.author:
|
||||
lrf.author = options.author
|
||||
if options.publisher:
|
||||
lrf.publisher = options.publisher
|
||||
if options.classification:
|
||||
lrf.classification = options.classification
|
||||
if options.category:
|
||||
lrf.category = options.category
|
||||
if options.page:
|
||||
lrf.page = options.page
|
||||
if options.creator:
|
||||
lrf.creator = options.creator
|
||||
if options.producer:
|
||||
lrf.producer = options.producer
|
||||
if options.thumbnail:
|
||||
path = os.path.expanduser(os.path.expandvars(options.thumbnail))
|
||||
f = open(path, "rb")
|
||||
|
||||
@ -2338,6 +2338,9 @@ class Canvas(LrsObject, LrsContainer, LrsAttributes):
|
||||
container.addLrfObject(c.objId)
|
||||
lrfWriter.append(c)
|
||||
|
||||
def has_text(self):
|
||||
return bool(self.contents)
|
||||
|
||||
|
||||
|
||||
class PutObj(LrsContainer):
|
||||
|
||||
@ -1,37 +1,6 @@
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
|
||||
from calibre.ebooks.lrf.web.profiles.nytimes import NYTimes
|
||||
from calibre.ebooks.lrf.web.profiles.bbc import BBC
|
||||
from calibre.ebooks.lrf.web.profiles.newsweek import Newsweek
|
||||
from calibre.ebooks.lrf.web.profiles.economist import Economist
|
||||
from calibre.ebooks.lrf.web.profiles.newyorkreview import NewYorkReviewOfBooks
|
||||
from calibre.ebooks.lrf.web.profiles.spiegelde import SpiegelOnline
|
||||
from calibre.ebooks.lrf.web.profiles.zeitde import ZeitNachrichten
|
||||
from calibre.ebooks.lrf.web.profiles.faznet import FazNet
|
||||
from calibre.ebooks.lrf.web.profiles.wsj import WallStreetJournal
|
||||
from calibre.ebooks.lrf.web.profiles.barrons import Barrons
|
||||
from calibre.ebooks.lrf.web.profiles.portfolio import Portfolio
|
||||
from calibre.ebooks.lrf.web.profiles.cnn import CNN
|
||||
from calibre.ebooks.lrf.web.profiles.chr_mon import ChristianScienceMonitor
|
||||
from calibre.ebooks.lrf.web.profiles.jpost import JerusalemPost
|
||||
from calibre.ebooks.lrf.web.profiles.reuters import Reuters
|
||||
from calibre.ebooks.lrf.web.profiles.atlantic import Atlantic
|
||||
from calibre.ebooks.lrf.web.profiles.ap import AssociatedPress
|
||||
from calibre.ebooks.lrf.web.profiles.newyorker import NewYorker
|
||||
from calibre.ebooks.lrf.web.profiles.jutarnji import Jutarnji
|
||||
from calibre.ebooks.lrf.web.profiles.usatoday import USAToday
|
||||
from calibre.ebooks.lrf.web.profiles.upi import UnitedPressInternational
|
||||
from calibre.ebooks.lrf.web.profiles.wash_post import WashingtonPost
|
||||
from calibre.ebooks.lrf.web.profiles.nasa import NASA
|
||||
|
||||
|
||||
builtin_profiles = [Atlantic, AssociatedPress, Barrons, BBC,
|
||||
ChristianScienceMonitor, CNN, Economist, FazNet,
|
||||
JerusalemPost, Jutarnji, NASA, Newsweek, NewYorker,
|
||||
NewYorkReviewOfBooks, NYTimes, UnitedPressInternational, USAToday,
|
||||
Portfolio, Reuters, SpiegelOnline, WallStreetJournal,
|
||||
WashingtonPost, ZeitNachrichten,
|
||||
]
|
||||
|
||||
available_profiles = [i.__module__.rpartition('.')[2] for i in builtin_profiles]
|
||||
builtin_profiles = []
|
||||
available_profiles = [i.__module__.rpartition('.')[2] for i in builtin_profiles]
|
||||
|
||||
@ -61,7 +61,7 @@ class NASA(DefaultProfile):
|
||||
(re.compile(r'<!-- Top Header starts -->.*?<!---->', re.IGNORECASE | re.DOTALL), lambda match : '<New Stuff>'),
|
||||
|
||||
## This removes the "download image" of various sizes from the Image of the day.
|
||||
(re.compile(r'<div id="download_image_box_print">.*?<div id="caption_region_print">', re.IGNORECASE | re.DOTALL), lambda match : '<New Stuff>'),
|
||||
(re.compile(r'(?is)<div id="download_image_box_print">.*?<div id="caption_region_print">'), lambda match : '<New Stuff>'),
|
||||
|
||||
|
||||
]
|
||||
|
||||
@ -103,6 +103,8 @@ class OCFDirReader(OCFReader):
|
||||
return open(os.path.join(self.root, path), *args, **kwargs)
|
||||
|
||||
class CoverRenderer(QObject):
|
||||
WIDTH = 1280
|
||||
HEIGHT = 1024
|
||||
|
||||
def __init__(self, url, size, loop):
|
||||
QObject.__init__(self)
|
||||
@ -111,6 +113,9 @@ class CoverRenderer(QObject):
|
||||
pal = self.page.palette()
|
||||
pal.setBrush(QPalette.Background, Qt.white)
|
||||
self.page.setPalette(pal)
|
||||
self.page.setViewportSize(QSize(600, 800))
|
||||
self.page.mainFrame().setScrollBarPolicy(Qt.Vertical, Qt.ScrollBarAlwaysOff)
|
||||
self.page.mainFrame().setScrollBarPolicy(Qt.Horizontal, Qt.ScrollBarAlwaysOff)
|
||||
QObject.connect(self.page, SIGNAL('loadFinished(bool)'), self.render_html)
|
||||
self.image_data = None
|
||||
self.rendered = False
|
||||
@ -122,7 +127,7 @@ class CoverRenderer(QObject):
|
||||
if not ok:
|
||||
return
|
||||
size = self.page.mainFrame().contentsSize()
|
||||
width, height = fit_image(size.width(), size.height(), 1280, 1024)[1:]
|
||||
width, height = fit_image(size.width(), size.height(), self.WIDTH, self.HEIGHT)[1:]
|
||||
self.page.setViewportSize(QSize(width, height))
|
||||
image = QImage(self.page.viewportSize(), QImage.Format_ARGB32)
|
||||
image.setDotsPerMeterX(96*(100/2.54))
|
||||
|
||||
@ -10,13 +10,14 @@ Try to read metadata from an HTML file.
|
||||
import re
|
||||
|
||||
from calibre.ebooks.metadata import MetaInformation
|
||||
from calibre.ebooks.chardet import xml_to_unicode
|
||||
|
||||
def get_metadata(stream):
|
||||
src = stream.read()
|
||||
src = xml_to_unicode(stream.read())[0]
|
||||
|
||||
# Title
|
||||
title = None
|
||||
pat = re.compile(r'<!--.*?TITLE=(?P<q>[\'"])(.+)(?P=q).*?-->', re.DOTALL)
|
||||
pat = re.compile(r'<!--.*?TITLE=(?P<q>[\'"])(.+?)(?P=q).*?-->', re.DOTALL)
|
||||
match = pat.search(src)
|
||||
if match:
|
||||
title = match.group(2)
|
||||
@ -28,7 +29,7 @@ def get_metadata(stream):
|
||||
|
||||
# Author
|
||||
author = None
|
||||
pat = re.compile(r'<!--.*?AUTHOR=(?P<q>[\'"])(.+)(?P=q).*?-->', re.DOTALL)
|
||||
pat = re.compile(r'<!--.*?AUTHOR=(?P<q>[\'"])(.+?)(?P=q).*?-->', re.DOTALL)
|
||||
match = pat.search(src)
|
||||
if match:
|
||||
author = match.group(2).replace(',', ';')
|
||||
@ -36,7 +37,7 @@ def get_metadata(stream):
|
||||
mi = MetaInformation(title, [author] if author else None)
|
||||
|
||||
# Publisher
|
||||
pat = re.compile(r'<!--.*?PUBLISHER=(?P<q>[\'"])(.+)(?P=q).*?-->', re.DOTALL)
|
||||
pat = re.compile(r'<!--.*?PUBLISHER=(?P<q>[\'"])(.+?)(?P=q).*?-->', re.DOTALL)
|
||||
match = pat.search(src)
|
||||
if match:
|
||||
mi.publisher = match.group(2)
|
||||
|
||||
89
src/calibre/ebooks/metadata/lrx.py
Normal file
89
src/calibre/ebooks/metadata/lrx.py
Normal file
@ -0,0 +1,89 @@
|
||||
#!/usr/bin/env python
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
|
||||
'''
|
||||
Read metadata from LRX files
|
||||
'''
|
||||
|
||||
import sys, struct
|
||||
from zlib import decompress
|
||||
from lxml import etree
|
||||
|
||||
from calibre.ebooks.metadata import MetaInformation, string_to_authors
|
||||
|
||||
def _read(f, at, amount):
|
||||
f.seek(at)
|
||||
return f.read(amount)
|
||||
|
||||
def word_be(buf):
|
||||
return struct.unpack('>L', buf)[0]
|
||||
|
||||
def word_le(buf):
|
||||
return struct.unpack('<L', buf)[0]
|
||||
|
||||
def short_le(buf):
|
||||
return struct.unpack('<H', buf)[0]
|
||||
|
||||
def short_be(buf):
|
||||
return struct.unpack('>H', buf)[0]
|
||||
|
||||
|
||||
def get_metadata(f):
|
||||
read = lambda at, amount: _read(f, at, amount)
|
||||
f.seek(0)
|
||||
buf = f.read(12)
|
||||
if buf[4:] == 'ftypLRX2':
|
||||
offset = 0
|
||||
while True:
|
||||
offset += word_be(buf[:4])
|
||||
try:
|
||||
buf = read(offset, 8)
|
||||
except:
|
||||
raise ValueError('Not a valid LRX file')
|
||||
if buf[4:] == 'bbeb':
|
||||
break
|
||||
offset += 8
|
||||
buf = read(offset, 16)
|
||||
if buf[:8].decode('utf-16-le') != 'LRF\x00':
|
||||
raise ValueError('Not a valid LRX file')
|
||||
lrf_version = word_le(buf[8:12])
|
||||
offset += 0x4c
|
||||
compressed_size = short_le(read(offset, 2))
|
||||
offset += 2
|
||||
if lrf_version >= 800:
|
||||
offset += 6
|
||||
compressed_size -= 4
|
||||
uncompressed_size = word_le(read(offset, 4))
|
||||
info = decompress(f.read(compressed_size))
|
||||
if len(info) != uncompressed_size:
|
||||
raise ValueError('LRX file has malformed metadata section')
|
||||
root = etree.fromstring(info)
|
||||
bi = root.find('BookInfo')
|
||||
title = bi.find('Title')
|
||||
title_sort = title.get('reading', None)
|
||||
title = title.text
|
||||
author = bi.find('Author')
|
||||
author_sort = author.get('reading', None)
|
||||
mi = MetaInformation(title, string_to_authors(author.text))
|
||||
mi.title_sort, mi.author_sort = title_sort, author_sort
|
||||
author = author.text
|
||||
publisher = bi.find('Publisher')
|
||||
mi.publisher = getattr(publisher, 'text', None)
|
||||
mi.tags = [x.text for x in bi.findall('Category')]
|
||||
mi.language = root.find('DocInfo').find('Language').text
|
||||
return mi
|
||||
|
||||
elif buf[4:8] == 'LRX':
|
||||
raise ValueError('Librie LRX format not supported')
|
||||
else:
|
||||
raise ValueError('Not a LRX file')
|
||||
|
||||
|
||||
def main(args=sys.argv):
|
||||
print get_metadata(open(args[1], 'rb'))
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
@ -5,36 +5,17 @@ __copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
import os, re, collections
|
||||
|
||||
from calibre.utils.config import prefs
|
||||
from calibre.ebooks.metadata.rtf import get_metadata as rtf_metadata
|
||||
from calibre.ebooks.metadata.fb2 import get_metadata as fb2_metadata
|
||||
from calibre.ebooks.lrf.meta import get_metadata as lrf_metadata
|
||||
from calibre.ebooks.metadata.pdf import get_metadata as pdf_metadata
|
||||
from calibre.ebooks.metadata.lit import get_metadata as lit_metadata
|
||||
from calibre.ebooks.metadata.imp import get_metadata as imp_metadata
|
||||
from calibre.ebooks.metadata.rb import get_metadata as rb_metadata
|
||||
from calibre.ebooks.metadata.epub import get_metadata as epub_metadata
|
||||
from calibre.ebooks.metadata.html import get_metadata as html_metadata
|
||||
from calibre.ebooks.mobi.reader import get_metadata as mobi_metadata
|
||||
from calibre.ebooks.metadata.odt import get_metadata as odt_metadata
|
||||
|
||||
from calibre.ebooks.metadata.opf2 import OPF
|
||||
from calibre.ebooks.metadata.rtf import set_metadata as set_rtf_metadata
|
||||
from calibre.ebooks.lrf.meta import set_metadata as set_lrf_metadata
|
||||
from calibre.ebooks.metadata.epub import set_metadata as set_epub_metadata
|
||||
from calibre.ebooks.metadata.pdf import set_metadata as set_pdf_metadata
|
||||
try:
|
||||
from calibre.libunrar import extract_member as rar_extract_first
|
||||
except OSError:
|
||||
rar_extract_first = None
|
||||
|
||||
from calibre.libunzip import extract_member as zip_extract_first
|
||||
|
||||
from calibre.customize.ui import get_file_type_metadata, set_file_type_metadata
|
||||
from calibre.ebooks.metadata import MetaInformation
|
||||
from calibre.ptempfile import TemporaryDirectory
|
||||
|
||||
_METADATA_PRIORITIES = [
|
||||
'html', 'htm', 'xhtml', 'xhtm',
|
||||
'rtf', 'fb2', 'pdf', 'prc', 'odt',
|
||||
'epub', 'lit', 'lrf', 'mobi', 'rb', 'imp'
|
||||
'epub', 'lit', 'lrx', 'lrf', 'mobi',
|
||||
'rb', 'imp'
|
||||
]
|
||||
|
||||
# The priorities for loading metadata from different file types
|
||||
@ -87,11 +68,7 @@ def get_metadata(stream, stream_type='lrf', use_libprs_metadata=False):
|
||||
|
||||
mi = MetaInformation(None, None)
|
||||
if prefs['read_file_metadata']:
|
||||
try:
|
||||
func = eval(stream_type + '_metadata')
|
||||
mi = func(stream)
|
||||
except NameError:
|
||||
pass
|
||||
mi = get_file_type_metadata(stream, stream_type)
|
||||
|
||||
name = os.path.basename(getattr(stream, 'name', ''))
|
||||
base = metadata_from_filename(name)
|
||||
@ -103,37 +80,14 @@ def get_metadata(stream, stream_type='lrf', use_libprs_metadata=False):
|
||||
if opf is not None:
|
||||
base.smart_update(opf)
|
||||
|
||||
if stream_type in ('cbr', 'cbz'):
|
||||
try:
|
||||
cdata = get_comic_cover(stream, stream_type)
|
||||
if cdata is not None:
|
||||
base.cover_data = cdata
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
pass
|
||||
|
||||
return base
|
||||
|
||||
def get_comic_cover(stream, type):
|
||||
extract_first = zip_extract_first if type.lower() == 'cbz' else rar_extract_first
|
||||
ret = extract_first(stream)
|
||||
if ret is not None:
|
||||
path, data = ret
|
||||
ext = os.path.splitext(path)[1][1:]
|
||||
return (ext.lower(), data)
|
||||
|
||||
def set_metadata(stream, mi, stream_type='lrf'):
|
||||
if stream_type: stream_type = stream_type.lower()
|
||||
if stream_type == 'lrf':
|
||||
set_lrf_metadata(stream, mi)
|
||||
elif stream_type == 'epub':
|
||||
set_epub_metadata(stream, mi)
|
||||
elif stream_type == 'rtf':
|
||||
set_rtf_metadata(stream, mi)
|
||||
#elif stream_type == 'pdf':
|
||||
# set_pdf_metadata(stream, mi)
|
||||
|
||||
if stream_type:
|
||||
stream_type = stream_type.lower()
|
||||
set_file_type_metadata(stream, mi, stream_type)
|
||||
|
||||
|
||||
def metadata_from_filename(name, pat=None):
|
||||
name = os.path.splitext(name)[0]
|
||||
mi = MetaInformation(None, None)
|
||||
|
||||
@ -70,7 +70,7 @@ class Manifest(ResourceCollection):
|
||||
@staticmethod
|
||||
def from_opf_manifest_element(manifest, dir):
|
||||
m = Manifest()
|
||||
for item in manifest.findAll('item'):
|
||||
for item in manifest.findAll(re.compile('item')):
|
||||
try:
|
||||
m.append(ManifestItem.from_opf_manifest_item(item, dir))
|
||||
id = item.get('id', '')
|
||||
@ -130,7 +130,7 @@ class Spine(ResourceCollection):
|
||||
@staticmethod
|
||||
def from_opf_spine_element(spine, manifest):
|
||||
s = Spine(manifest)
|
||||
for itemref in spine.findAll('itemref'):
|
||||
for itemref in spine.findAll(re.compile('itemref')):
|
||||
if itemref.has_key('idref'):
|
||||
r = Spine.Item(s.manifest.id_for_path,
|
||||
s.manifest.path_for_id(itemref['idref']), is_path=True)
|
||||
@ -216,7 +216,7 @@ class standard_field(object):
|
||||
def __get__(self, obj, typ=None):
|
||||
return getattr(obj, 'get_'+self.name)()
|
||||
|
||||
|
||||
|
||||
class OPF(MetaInformation):
|
||||
|
||||
MIMETYPE = 'application/oebps-package+xml'
|
||||
@ -242,14 +242,27 @@ class OPF(MetaInformation):
|
||||
def __init__(self):
|
||||
raise NotImplementedError('Abstract base class')
|
||||
|
||||
@apply
|
||||
def package():
|
||||
def fget(self):
|
||||
return self.soup.find(re.compile('package'))
|
||||
return property(fget=fget)
|
||||
|
||||
@apply
|
||||
def metadata():
|
||||
def fget(self):
|
||||
return self.package.find(re.compile('metadata'))
|
||||
return property(fget=fget)
|
||||
|
||||
|
||||
def get_title(self):
|
||||
title = self.soup.package.metadata.find('dc:title')
|
||||
title = self.metadata.find('dc:title')
|
||||
if title and title.string:
|
||||
return self.ENTITY_PATTERN.sub(entity_to_unicode, title.string).strip()
|
||||
return self.default_title.strip()
|
||||
|
||||
def get_authors(self):
|
||||
creators = self.soup.package.metadata.findAll('dc:creator')
|
||||
creators = self.metadata.findAll('dc:creator')
|
||||
for elem in creators:
|
||||
role = elem.get('role')
|
||||
if not role:
|
||||
@ -266,7 +279,7 @@ class OPF(MetaInformation):
|
||||
return []
|
||||
|
||||
def get_author_sort(self):
|
||||
creators = self.soup.package.metadata.findAll('dc:creator')
|
||||
creators = self.metadata.findAll('dc:creator')
|
||||
for elem in creators:
|
||||
role = elem.get('role')
|
||||
if not role:
|
||||
@ -277,7 +290,7 @@ class OPF(MetaInformation):
|
||||
return None
|
||||
|
||||
def get_title_sort(self):
|
||||
title = self.soup.package.find('dc:title')
|
||||
title = self.package.find('dc:title')
|
||||
if title:
|
||||
if title.has_key('file-as'):
|
||||
return title['file-as'].strip()
|
||||
@ -290,7 +303,7 @@ class OPF(MetaInformation):
|
||||
return None
|
||||
|
||||
def get_uid(self):
|
||||
package = self.soup.find('package')
|
||||
package = self.package
|
||||
if package.has_key('unique-identifier'):
|
||||
return package['unique-identifier']
|
||||
|
||||
@ -307,7 +320,7 @@ class OPF(MetaInformation):
|
||||
return None
|
||||
|
||||
def get_isbn(self):
|
||||
for item in self.soup.package.metadata.findAll('dc:identifier'):
|
||||
for item in self.metadata.findAll('dc:identifier'):
|
||||
scheme = item.get('scheme')
|
||||
if not scheme:
|
||||
scheme = item.get('opf:scheme')
|
||||
@ -316,13 +329,13 @@ class OPF(MetaInformation):
|
||||
return None
|
||||
|
||||
def get_language(self):
|
||||
item = self.soup.package.metadata.find('dc:language')
|
||||
item = self.metadata.find('dc:language')
|
||||
if not item:
|
||||
return _('Unknown')
|
||||
return ''.join(item.findAll(text=True)).strip()
|
||||
|
||||
def get_application_id(self):
|
||||
for item in self.soup.package.metadata.findAll('dc:identifier'):
|
||||
for item in self.metadata.findAll('dc:identifier'):
|
||||
scheme = item.get('scheme', None)
|
||||
if scheme is None:
|
||||
scheme = item.get('opf:scheme', None)
|
||||
@ -342,7 +355,7 @@ class OPF(MetaInformation):
|
||||
|
||||
def possible_cover_prefixes(self):
|
||||
isbn, ans = [], []
|
||||
for item in self.soup.package.metadata.findAll('dc:identifier'):
|
||||
for item in self.metadata.findAll('dc:identifier'):
|
||||
scheme = item.get('scheme')
|
||||
if not scheme:
|
||||
scheme = item.get('opf:scheme')
|
||||
@ -352,13 +365,13 @@ class OPF(MetaInformation):
|
||||
return ans
|
||||
|
||||
def get_series(self):
|
||||
s = self.soup.package.metadata.find('series')
|
||||
s = self.metadata.find('series')
|
||||
if s is not None:
|
||||
return str(s.string).strip()
|
||||
return None
|
||||
|
||||
def get_series_index(self):
|
||||
s = self.soup.package.metadata.find('series-index')
|
||||
s = self.metadata.find('series-index')
|
||||
if s and s.string:
|
||||
try:
|
||||
return int(str(s.string).strip())
|
||||
@ -367,7 +380,7 @@ class OPF(MetaInformation):
|
||||
return None
|
||||
|
||||
def get_rating(self):
|
||||
s = self.soup.package.metadata.find('rating')
|
||||
s = self.metadata.find('rating')
|
||||
if s and s.string:
|
||||
try:
|
||||
return int(str(s.string).strip())
|
||||
@ -400,17 +413,17 @@ class OPFReader(OPF):
|
||||
if manage:
|
||||
stream.close()
|
||||
self.manifest = Manifest()
|
||||
m = self.soup.find('manifest')
|
||||
m = self.soup.find(re.compile('manifest'))
|
||||
if m is not None:
|
||||
self.manifest = Manifest.from_opf_manifest_element(m, dir)
|
||||
self.spine = None
|
||||
spine = self.soup.find('spine')
|
||||
spine = self.soup.find(re.compile('spine'))
|
||||
if spine is not None:
|
||||
self.spine = Spine.from_opf_spine_element(spine, self.manifest)
|
||||
|
||||
self.toc = TOC(base_path=dir)
|
||||
self.toc.read_from_opf(self)
|
||||
guide = self.soup.find('guide')
|
||||
guide = self.soup.find(re.compile('guide'))
|
||||
if guide is not None:
|
||||
self.guide = Guide.from_opf_guide(guide, dir)
|
||||
self.base_dir = dir
|
||||
|
||||
@ -30,7 +30,6 @@ class Resource(object):
|
||||
:member:`path`
|
||||
:member:`mime_type`
|
||||
:method:`href`
|
||||
|
||||
'''
|
||||
|
||||
def __init__(self, href_or_path, basedir=os.getcwd(), is_path=True):
|
||||
@ -419,7 +418,8 @@ class OPF(object):
|
||||
tags_path = XPath('descendant::*[re:match(name(), "subject", "i")]')
|
||||
isbn_path = XPath('descendant::*[re:match(name(), "identifier", "i") and '+
|
||||
'(re:match(@scheme, "isbn", "i") or re:match(@opf:scheme, "isbn", "i"))]')
|
||||
application_id_path= XPath('descendant::*[re:match(name(), "identifier", "i") and '+
|
||||
identifier_path = XPath('descendant::*[re:match(name(), "identifier", "i")]')
|
||||
application_id_path = XPath('descendant::*[re:match(name(), "identifier", "i") and '+
|
||||
'(re:match(@opf:scheme, "calibre|libprs500", "i") or re:match(@scheme, "calibre|libprs500", "i"))]')
|
||||
manifest_path = XPath('descendant::*[re:match(name(), "manifest", "i")]/*[re:match(name(), "item", "i")]')
|
||||
manifest_ppath = XPath('descendant::*[re:match(name(), "manifest", "i")]')
|
||||
@ -720,6 +720,27 @@ class OPF(object):
|
||||
return property(fget=fget, fset=fset)
|
||||
|
||||
|
||||
def guess_cover(self):
|
||||
'''
|
||||
Try to guess a cover. Needed for some old/badly formed OPF files.
|
||||
'''
|
||||
if self.base_dir and os.path.exists(self.base_dir):
|
||||
for item in self.identifier_path(self.metadata):
|
||||
scheme = None
|
||||
for key in item.attrib.keys():
|
||||
if key.endswith('scheme'):
|
||||
scheme = item.get(key)
|
||||
break
|
||||
if scheme is None:
|
||||
continue
|
||||
if item.text:
|
||||
prefix = item.text.replace('-', '')
|
||||
for suffix in ['.jpg', '.jpeg', '.gif', '.png', '.bmp']:
|
||||
cpath = os.access(os.path.join(self.base_dir, prefix+suffix), os.R_OK)
|
||||
if os.access(os.path.join(self.base_dir, prefix+suffix), os.R_OK):
|
||||
return cpath
|
||||
|
||||
|
||||
@apply
|
||||
def cover():
|
||||
|
||||
@ -729,6 +750,10 @@ class OPF(object):
|
||||
for item in self.guide:
|
||||
if item.type.lower() == t:
|
||||
return item.path
|
||||
try:
|
||||
return self.guess_cover()
|
||||
except:
|
||||
pass
|
||||
|
||||
def fset(self, path):
|
||||
if self.guide is not None:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user