mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
Merge from trunk
This commit is contained in:
commit
b3e5d18975
@ -19,6 +19,59 @@
|
||||
# new recipes:
|
||||
# - title:
|
||||
|
||||
- version: 0.8.64
|
||||
date: 2012-08-09
|
||||
|
||||
new features:
|
||||
- title: "E-book viewer: Allow viewing images in the book in a separate pop-up window by right clicking on the image. Useful if you want to keep some image, like a map to the side while reading the book."
|
||||
|
||||
- title: "Catalogs: Allow generation of catalogs in AZW3 format. Also add more powerful configuration options to exclude books and set prefixes. See http://www.mobileread.com/forums/showthread.php?t=187298 for details."
|
||||
|
||||
- title: "Generate a PDF version of the User Manual"
|
||||
|
||||
bug fixes:
|
||||
- title: "News download: Fix broken handling of nesting for HTML 5 tags when parsing with BeautifulSoup"
|
||||
|
||||
- title: "EPUB: Handle files in the EPUB that have semi-colons in their file names. This means in particular using URL escaping when creating the NCX as ADE cannot handle unescaped semi-colons in the NCX."
|
||||
tickets: [1033665]
|
||||
|
||||
- title: "Conversion pipeline: Ignore unparseable CSS instead of erroring out on it."
|
||||
tickets: [1034074]
|
||||
|
||||
- title: "When setting up a column coloring rule based on the languages column, allow entry of localized language names instead of only ISO codes"
|
||||
|
||||
- title: "Catalogs: Generate cover for mobi/azw3 catalogs"
|
||||
|
||||
- title: "Update the last modified column record of a book, whenever a format is added to the book."
|
||||
|
||||
- title: "E-book viewer: Fix line scrolling stops at breaks option not working in paged mode"
|
||||
tickets: [1033430]
|
||||
|
||||
- title: "MOBI Output: Fix ToC at start option having no effect when converting some input documents that have an out-of-spine ToC."
|
||||
tickets: [1033656]
|
||||
|
||||
- title: "Catalog Generation: When generating EPUB/MOBI catalogs add more flexible rules for excluding books. Also add rules to customize the prefix characters used."
|
||||
|
||||
- title: "Make setting published date using metadata search/replace more robust."
|
||||
|
||||
- title: "Tag Browser: Flatten the display of sub-groups when sort by is not set to 'name'."
|
||||
tickets: [1032746]
|
||||
|
||||
- title: "Fix isbn:false not matching if other identifiers are attached to the book."
|
||||
|
||||
improved recipes:
|
||||
- The New Republic
|
||||
- ZDNet
|
||||
- Metro UK
|
||||
- FHM UK
|
||||
|
||||
new recipes:
|
||||
- title: eKundelek.pl
|
||||
author: Artur Stachecki
|
||||
|
||||
- title: Sueddeutsche Mobil
|
||||
author: Andreas Zeiser
|
||||
|
||||
- version: 0.8.63
|
||||
date: 2012-08-02
|
||||
|
||||
|
@ -83,7 +83,7 @@ p.author_index {
|
||||
font-size:large;
|
||||
font-weight:bold;
|
||||
text-align:left;
|
||||
margin-top:0px;
|
||||
margin-top:0.25px;
|
||||
margin-bottom:-2px;
|
||||
text-indent: 0em;
|
||||
}
|
||||
|
@ -169,7 +169,14 @@ if iswindows:
|
||||
cflags=['/X']
|
||||
),
|
||||
Extension('wpd',
|
||||
['calibre/devices/mtp/windows/wpd.cpp'],
|
||||
[
|
||||
'calibre/devices/mtp/windows/utils.cpp',
|
||||
'calibre/devices/mtp/windows/device_enumeration.cpp',
|
||||
'calibre/devices/mtp/windows/wpd.cpp',
|
||||
],
|
||||
headers=[
|
||||
'calibre/devices/mtp/windows/global.h',
|
||||
],
|
||||
libraries=['ole32', 'portabledeviceguids'],
|
||||
# needs_ddk=True,
|
||||
cflags=['/X']
|
||||
|
@ -4,7 +4,7 @@ __license__ = 'GPL v3'
|
||||
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
__appname__ = u'calibre'
|
||||
numeric_version = (0, 8, 63)
|
||||
numeric_version = (0, 8, 64)
|
||||
__version__ = u'.'.join(map(unicode, numeric_version))
|
||||
__author__ = u"Kovid Goyal <kovid@kovidgoyal.net>"
|
||||
|
||||
|
@ -86,14 +86,10 @@ class MTP_DEVICE(MTPDeviceBase):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
MTPDeviceBase.__init__(self, *args, **kwargs)
|
||||
self.detect = MTPDetect()
|
||||
self.dev = None
|
||||
self.filesystem_cache = None
|
||||
self.lock = RLock()
|
||||
self.blacklisted_devices = set()
|
||||
for x in vars(self.detect.libmtp):
|
||||
if x.startswith('LIBMTP'):
|
||||
setattr(self, x, getattr(self.detect.libmtp, x))
|
||||
|
||||
def set_debug_level(self, lvl):
|
||||
self.detect.libmtp.set_debug_level(lvl)
|
||||
@ -140,6 +136,13 @@ class MTP_DEVICE(MTPDeviceBase):
|
||||
def post_yank_cleanup(self):
|
||||
self.dev = self.filesystem_cache = None
|
||||
|
||||
@synchronous
|
||||
def startup(self):
|
||||
self.detect = MTPDetect()
|
||||
for x in vars(self.detect.libmtp):
|
||||
if x.startswith('LIBMTP'):
|
||||
setattr(self, x, getattr(self.detect.libmtp, x))
|
||||
|
||||
@synchronous
|
||||
def shutdown(self):
|
||||
self.dev = self.filesystem_cache = None
|
||||
@ -240,6 +243,7 @@ if __name__ == '__main__':
|
||||
|
||||
from pprint import pprint
|
||||
dev = MTP_DEVICE(None)
|
||||
dev.startup()
|
||||
from calibre.devices.scanner import linux_scanner
|
||||
devs = linux_scanner()
|
||||
mtp_devs = dev.detect(devs)
|
||||
|
@ -7,15 +7,17 @@
|
||||
#include "devices.h"
|
||||
|
||||
// Macros and utilities
|
||||
static PyObject *MTPError = NULL;
|
||||
|
||||
#define ENSURE_DEV(rval) \
|
||||
if (self->device == NULL) { \
|
||||
PyErr_SetString(PyExc_ValueError, "This device has not been initialized."); \
|
||||
PyErr_SetString(MTPError, "This device has not been initialized."); \
|
||||
return rval; \
|
||||
}
|
||||
|
||||
#define ENSURE_STORAGE(rval) \
|
||||
if (self->device->storage == NULL) { \
|
||||
PyErr_SetString(PyExc_RuntimeError, "The device has no storage information."); \
|
||||
PyErr_SetString(MTPError, "The device has no storage information."); \
|
||||
return rval; \
|
||||
}
|
||||
|
||||
@ -31,6 +33,7 @@
|
||||
#define AC_ReadOnly 0x0001
|
||||
#define AC_ReadOnly_with_Object_Deletion 0x0002
|
||||
|
||||
|
||||
typedef struct {
|
||||
PyObject *obj;
|
||||
PyObject *extra;
|
||||
@ -183,7 +186,7 @@ libmtp_Device_init(libmtp_Device *self, PyObject *args, PyObject *kwds)
|
||||
Py_END_ALLOW_THREADS;
|
||||
|
||||
if (dev == NULL) {
|
||||
PyErr_SetString(PyExc_ValueError, "Unable to open raw device.");
|
||||
PyErr_SetString(MTPError, "Unable to open raw device.");
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -272,7 +275,7 @@ static PyObject*
|
||||
libmtp_Device_update_storage_info(libmtp_Device *self, PyObject *args, PyObject *kwargs) {
|
||||
ENSURE_DEV(NULL);
|
||||
if (LIBMTP_Get_Storage(self->device, LIBMTP_STORAGE_SORTBY_NOTSORTED) < 0) {
|
||||
PyErr_SetString(PyExc_RuntimeError, "Failed to get storage infor for device.");
|
||||
PyErr_SetString(MTPError, "Failed to get storage infor for device.");
|
||||
return NULL;
|
||||
}
|
||||
Py_RETURN_NONE;
|
||||
@ -761,6 +764,8 @@ initlibmtp(void) {
|
||||
|
||||
m = Py_InitModule3("libmtp", libmtp_methods, "Interface to libmtp.");
|
||||
if (m == NULL) return;
|
||||
MTPError = PyErr_NewException("libmtp.MTPError", NULL, NULL);
|
||||
if (MTPError == NULL) return;
|
||||
|
||||
LIBMTP_Init();
|
||||
LIBMTP_Set_Debug(LIBMTP_DEBUG_NONE);
|
||||
|
11
src/calibre/devices/mtp/windows/__init__.py
Normal file
11
src/calibre/devices/mtp/windows/__init__.py
Normal file
@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:fdm=marker:ai
|
||||
from __future__ import (unicode_literals, division, absolute_import,
|
||||
print_function)
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2012, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
|
||||
|
||||
|
229
src/calibre/devices/mtp/windows/device_enumeration.cpp
Normal file
229
src/calibre/devices/mtp/windows/device_enumeration.cpp
Normal file
@ -0,0 +1,229 @@
|
||||
/*
|
||||
* device_enumeration.cpp
|
||||
* Copyright (C) 2012 Kovid Goyal <kovid at kovidgoyal.net>
|
||||
*
|
||||
* Distributed under terms of the MIT license.
|
||||
*/
|
||||
|
||||
#include "global.h"
|
||||
|
||||
namespace wpd {
|
||||
|
||||
IPortableDeviceValues *get_client_information() { // {{{
|
||||
IPortableDeviceValues *client_information;
|
||||
HRESULT hr;
|
||||
|
||||
ENSURE_WPD(NULL);
|
||||
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
hr = CoCreateInstance(CLSID_PortableDeviceValues, NULL,
|
||||
CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&client_information));
|
||||
Py_END_ALLOW_THREADS;
|
||||
if (FAILED(hr)) { hresult_set_exc("Failed to create IPortableDeviceValues", hr); return NULL; }
|
||||
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
hr = client_information->SetStringValue(WPD_CLIENT_NAME, client_info.name);
|
||||
Py_END_ALLOW_THREADS;
|
||||
if (FAILED(hr)) { hresult_set_exc("Failed to set client name", hr); return NULL; }
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
hr = client_information->SetUnsignedIntegerValue(WPD_CLIENT_MAJOR_VERSION, client_info.major_version);
|
||||
Py_END_ALLOW_THREADS;
|
||||
if (FAILED(hr)) { hresult_set_exc("Failed to set major version", hr); return NULL; }
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
hr = client_information->SetUnsignedIntegerValue(WPD_CLIENT_MINOR_VERSION, client_info.minor_version);
|
||||
Py_END_ALLOW_THREADS;
|
||||
if (FAILED(hr)) { hresult_set_exc("Failed to set minor version", hr); return NULL; }
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
hr = client_information->SetUnsignedIntegerValue(WPD_CLIENT_REVISION, client_info.revision);
|
||||
Py_END_ALLOW_THREADS;
|
||||
if (FAILED(hr)) { hresult_set_exc("Failed to set revision", hr); return NULL; }
|
||||
// Some device drivers need to impersonate the caller in order to function correctly. Since our application does not
|
||||
// need to restrict its identity, specify SECURITY_IMPERSONATION so that we work with all devices.
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
hr = client_information->SetUnsignedIntegerValue(WPD_CLIENT_SECURITY_QUALITY_OF_SERVICE, SECURITY_IMPERSONATION);
|
||||
Py_END_ALLOW_THREADS;
|
||||
if (FAILED(hr)) { hresult_set_exc("Failed to set quality of service", hr); return NULL; }
|
||||
return client_information;
|
||||
} // }}}
|
||||
|
||||
IPortableDevice *open_device(const wchar_t *pnp_id, IPortableDeviceValues *client_information) { // {{{
|
||||
IPortableDevice *device = NULL;
|
||||
HRESULT hr;
|
||||
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
hr = CoCreateInstance(CLSID_PortableDevice, NULL, CLSCTX_INPROC_SERVER,
|
||||
IID_PPV_ARGS(&device));
|
||||
Py_END_ALLOW_THREADS;
|
||||
if (FAILED(hr)) hresult_set_exc("Failed to create IPortableDevice", hr);
|
||||
else {
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
hr = device->Open(pnp_id, client_information);
|
||||
Py_END_ALLOW_THREADS;
|
||||
if FAILED(hr) {
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
device->Release();
|
||||
Py_END_ALLOW_THREADS;
|
||||
device = NULL;
|
||||
hresult_set_exc((hr == E_ACCESSDENIED) ? "Read/write access to device is denied": "Failed to open device", hr);
|
||||
}
|
||||
}
|
||||
|
||||
return device;
|
||||
|
||||
} // }}}
|
||||
|
||||
PyObject* get_device_information(IPortableDevice *device) { // {{{
|
||||
IPortableDeviceContent *content = NULL;
|
||||
IPortableDeviceProperties *properties = NULL;
|
||||
IPortableDeviceKeyCollection *keys = NULL;
|
||||
IPortableDeviceValues *values = NULL;
|
||||
IPortableDeviceCapabilities *capabilities = NULL;
|
||||
IPortableDevicePropVariantCollection *categories = NULL;
|
||||
HRESULT hr;
|
||||
DWORD num_of_categories, i;
|
||||
LPWSTR temp;
|
||||
ULONG ti;
|
||||
PyObject *t, *ans = NULL;
|
||||
char *type;
|
||||
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
hr = CoCreateInstance(CLSID_PortableDeviceKeyCollection, NULL,
|
||||
CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&keys));
|
||||
Py_END_ALLOW_THREADS;
|
||||
if (FAILED(hr)) {hresult_set_exc("Failed to create IPortableDeviceKeyCollection", hr); goto end;}
|
||||
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
hr = keys->Add(WPD_DEVICE_PROTOCOL);
|
||||
// Despite the MSDN documentation, this does not exist in PortableDevice.h
|
||||
// hr = keys->Add(WPD_DEVICE_TRANSPORT);
|
||||
hr = keys->Add(WPD_DEVICE_FRIENDLY_NAME);
|
||||
hr = keys->Add(WPD_DEVICE_MANUFACTURER);
|
||||
hr = keys->Add(WPD_DEVICE_MODEL);
|
||||
hr = keys->Add(WPD_DEVICE_SERIAL_NUMBER);
|
||||
hr = keys->Add(WPD_DEVICE_FIRMWARE_VERSION);
|
||||
hr = keys->Add(WPD_DEVICE_TYPE);
|
||||
Py_END_ALLOW_THREADS;
|
||||
if (FAILED(hr)) {hresult_set_exc("Failed to add keys to IPortableDeviceKeyCollection", hr); goto end;}
|
||||
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
hr = device->Content(&content);
|
||||
Py_END_ALLOW_THREADS;
|
||||
if (FAILED(hr)) {hresult_set_exc("Failed to get IPortableDeviceContent", hr); goto end; }
|
||||
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
hr = content->Properties(&properties);
|
||||
Py_END_ALLOW_THREADS;
|
||||
if (FAILED(hr)) {hresult_set_exc("Failed to get IPortableDeviceProperties", hr); goto end; }
|
||||
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
hr = properties->GetValues(WPD_DEVICE_OBJECT_ID, keys, &values);
|
||||
Py_END_ALLOW_THREADS;
|
||||
if(FAILED(hr)) {hresult_set_exc("Failed to get device info", hr); goto end; }
|
||||
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
hr = device->Capabilities(&capabilities);
|
||||
Py_END_ALLOW_THREADS;
|
||||
if(FAILED(hr)) {hresult_set_exc("Failed to get device capabilities", hr); goto end; }
|
||||
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
hr = capabilities->GetFunctionalCategories(&categories);
|
||||
Py_END_ALLOW_THREADS;
|
||||
if(FAILED(hr)) {hresult_set_exc("Failed to get device functional categories", hr); goto end; }
|
||||
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
hr = categories->GetCount(&num_of_categories);
|
||||
Py_END_ALLOW_THREADS;
|
||||
if(FAILED(hr)) {hresult_set_exc("Failed to get device functional categories number", hr); goto end; }
|
||||
|
||||
ans = PyDict_New();
|
||||
if (ans == NULL) {PyErr_NoMemory(); goto end;}
|
||||
|
||||
if (SUCCEEDED(values->GetStringValue(WPD_DEVICE_PROTOCOL, &temp))) {
|
||||
t = PyUnicode_FromWideChar(temp, wcslen(temp));
|
||||
if (t != NULL) {PyDict_SetItemString(ans, "protocol", t); Py_DECREF(t);}
|
||||
CoTaskMemFree(temp);
|
||||
}
|
||||
|
||||
// if (SUCCEEDED(values->GetUnsignedIntegerValue(WPD_DEVICE_TRANSPORT, &ti))) {
|
||||
// PyDict_SetItemString(ans, "isusb", (ti == WPD_DEVICE_TRANSPORT_USB) ? Py_True : Py_False);
|
||||
// t = PyLong_FromUnsignedLong(ti);
|
||||
// }
|
||||
|
||||
if (SUCCEEDED(values->GetUnsignedIntegerValue(WPD_DEVICE_TYPE, &ti))) {
|
||||
switch (ti) {
|
||||
case WPD_DEVICE_TYPE_CAMERA:
|
||||
type = "camera"; break;
|
||||
case WPD_DEVICE_TYPE_MEDIA_PLAYER:
|
||||
type = "media player"; break;
|
||||
case WPD_DEVICE_TYPE_PHONE:
|
||||
type = "phone"; break;
|
||||
case WPD_DEVICE_TYPE_VIDEO:
|
||||
type = "video"; break;
|
||||
case WPD_DEVICE_TYPE_PERSONAL_INFORMATION_MANAGER:
|
||||
type = "personal information manager"; break;
|
||||
case WPD_DEVICE_TYPE_AUDIO_RECORDER:
|
||||
type = "audio recorder"; break;
|
||||
default:
|
||||
type = "unknown";
|
||||
}
|
||||
t = PyString_FromString(type);
|
||||
if (t != NULL) {
|
||||
PyDict_SetItemString(ans, "type", t); Py_DECREF(t);
|
||||
}
|
||||
}
|
||||
|
||||
if (SUCCEEDED(values->GetStringValue(WPD_DEVICE_FRIENDLY_NAME, &temp))) {
|
||||
t = PyUnicode_FromWideChar(temp, wcslen(temp));
|
||||
if (t != NULL) {PyDict_SetItemString(ans, "friendly_name", t); Py_DECREF(t);}
|
||||
CoTaskMemFree(temp);
|
||||
}
|
||||
|
||||
if (SUCCEEDED(values->GetStringValue(WPD_DEVICE_MANUFACTURER, &temp))) {
|
||||
t = PyUnicode_FromWideChar(temp, wcslen(temp));
|
||||
if (t != NULL) {PyDict_SetItemString(ans, "manufacturer_name", t); Py_DECREF(t);}
|
||||
CoTaskMemFree(temp);
|
||||
}
|
||||
|
||||
if (SUCCEEDED(values->GetStringValue(WPD_DEVICE_MODEL, &temp))) {
|
||||
t = PyUnicode_FromWideChar(temp, wcslen(temp));
|
||||
if (t != NULL) {PyDict_SetItemString(ans, "model_name", t); Py_DECREF(t);}
|
||||
CoTaskMemFree(temp);
|
||||
}
|
||||
|
||||
if (SUCCEEDED(values->GetStringValue(WPD_DEVICE_SERIAL_NUMBER, &temp))) {
|
||||
t = PyUnicode_FromWideChar(temp, wcslen(temp));
|
||||
if (t != NULL) {PyDict_SetItemString(ans, "serial_number", t); Py_DECREF(t);}
|
||||
CoTaskMemFree(temp);
|
||||
}
|
||||
|
||||
if (SUCCEEDED(values->GetStringValue(WPD_DEVICE_FIRMWARE_VERSION, &temp))) {
|
||||
t = PyUnicode_FromWideChar(temp, wcslen(temp));
|
||||
if (t != NULL) {PyDict_SetItemString(ans, "device_version", t); Py_DECREF(t);}
|
||||
CoTaskMemFree(temp);
|
||||
}
|
||||
|
||||
t = Py_False;
|
||||
for (i = 0; i < num_of_categories; i++) {
|
||||
PROPVARIANT pv;
|
||||
PropVariantInit(&pv);
|
||||
if (SUCCEEDED(categories->GetAt(i, &pv)) && pv.puuid != NULL) {
|
||||
if (IsEqualGUID(WPD_FUNCTIONAL_CATEGORY_STORAGE, *pv.puuid)) {
|
||||
t = Py_True;
|
||||
}
|
||||
}
|
||||
PropVariantClear(&pv);
|
||||
if (t == Py_True) break;
|
||||
}
|
||||
PyDict_SetItemString(ans, "has_storage", t);
|
||||
|
||||
end:
|
||||
if (keys != NULL) keys->Release();
|
||||
if (values != NULL) values->Release();
|
||||
if (properties != NULL) properties->Release();
|
||||
if (content != NULL) content->Release();
|
||||
if (capabilities != NULL) capabilities->Release();
|
||||
if (categories != NULL) categories->Release();
|
||||
return ans;
|
||||
} // }}}
|
||||
|
||||
} // namespace wpd
|
46
src/calibre/devices/mtp/windows/global.h
Normal file
46
src/calibre/devices/mtp/windows/global.h
Normal file
@ -0,0 +1,46 @@
|
||||
/*
|
||||
* global.h
|
||||
* Copyright (C) 2012 Kovid Goyal <kovid at kovidgoyal.net>
|
||||
*
|
||||
* Distributed under terms of the MIT license.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
#define UNICODE
|
||||
#include <Windows.h>
|
||||
#include <Python.h>
|
||||
|
||||
#include <Objbase.h>
|
||||
#include <PortableDeviceApi.h>
|
||||
#include <PortableDevice.h>
|
||||
|
||||
#define ENSURE_WPD(retval) \
|
||||
if (portable_device_manager == NULL) { PyErr_SetString(NoWPD, "No WPD service available."); return retval; }
|
||||
|
||||
namespace wpd {
|
||||
|
||||
// Module exception types
|
||||
extern PyObject *WPDError, *NoWPD;
|
||||
|
||||
// The global device manager
|
||||
extern IPortableDeviceManager *portable_device_manager;
|
||||
|
||||
// Application info
|
||||
typedef struct {
|
||||
wchar_t *name;
|
||||
unsigned int major_version;
|
||||
unsigned int minor_version;
|
||||
unsigned int revision;
|
||||
} ClientInfo;
|
||||
extern ClientInfo client_info;
|
||||
|
||||
// Utility functions
|
||||
PyObject *hresult_set_exc(const char *msg, HRESULT hr);
|
||||
wchar_t *unicode_to_wchar(PyObject *o);
|
||||
|
||||
extern IPortableDeviceValues* get_client_information();
|
||||
extern IPortableDevice* open_device(const wchar_t *pnp_id, IPortableDeviceValues *client_information);
|
||||
extern PyObject* get_device_information(IPortableDevice *device);
|
||||
|
||||
}
|
||||
|
46
src/calibre/devices/mtp/windows/remote.py
Normal file
46
src/calibre/devices/mtp/windows/remote.py
Normal file
@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env python
|
||||
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:fdm=marker:ai
|
||||
from __future__ import (unicode_literals, division, absolute_import,
|
||||
print_function)
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2012, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
|
||||
import subprocess, sys, os
|
||||
|
||||
def build():
|
||||
builder = subprocess.Popen('ssh xp_build ~/build-wpd'.split())
|
||||
syncer = subprocess.Popen('ssh getafix ~/test-wpd'.split())
|
||||
if builder.wait() != 0:
|
||||
raise Exception('Failed to build plugin')
|
||||
if syncer.wait() != 0:
|
||||
raise Exception('Failed to rsync to getafix')
|
||||
subprocess.check_call(
|
||||
'scp xp_build:build/calibre/src/calibre/plugins/wpd.pyd /tmp'.split())
|
||||
subprocess.check_call(
|
||||
'scp /tmp/wpd.pyd getafix:calibre/src/calibre/devices/mtp/windows'.split())
|
||||
p = subprocess.Popen(
|
||||
'ssh getafix calibre-debug -e calibre/src/calibre/devices/mtp/windows/remote.py'.split())
|
||||
p.wait()
|
||||
print()
|
||||
|
||||
|
||||
def main():
|
||||
import pprint
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
import wpd
|
||||
from calibre.constants import plugins
|
||||
plugins._plugins['wpd'] = (wpd, '')
|
||||
sys.path.pop(0)
|
||||
wpd.init('calibre', 1, 0, 0)
|
||||
try:
|
||||
for pnp_id in wpd.enumerate_devices():
|
||||
print (pnp_id)
|
||||
pprint.pprint(wpd.device_info(pnp_id))
|
||||
finally:
|
||||
wpd.uninit()
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
45
src/calibre/devices/mtp/windows/utils.cpp
Normal file
45
src/calibre/devices/mtp/windows/utils.cpp
Normal file
@ -0,0 +1,45 @@
|
||||
/*
|
||||
* utils.cpp
|
||||
* Copyright (C) 2012 Kovid Goyal <kovid at kovidgoyal.net>
|
||||
*
|
||||
* Distributed under terms of the MIT license.
|
||||
*/
|
||||
|
||||
#include "global.h"
|
||||
|
||||
using namespace wpd;
|
||||
|
||||
PyObject *wpd::hresult_set_exc(const char *msg, HRESULT hr) {
|
||||
PyObject *o = NULL, *mess;
|
||||
LPWSTR desc = NULL;
|
||||
|
||||
FormatMessageW(FORMAT_MESSAGE_FROM_SYSTEM|FORMAT_MESSAGE_ALLOCATE_BUFFER|FORMAT_MESSAGE_IGNORE_INSERTS,
|
||||
NULL, hr, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), (LPWSTR)&desc, 0, NULL);
|
||||
if (desc == NULL) {
|
||||
o = PyUnicode_FromString("No description available.");
|
||||
} else {
|
||||
o = PyUnicode_FromWideChar(desc, wcslen(desc));
|
||||
LocalFree(desc);
|
||||
}
|
||||
if (o == NULL) return PyErr_NoMemory();
|
||||
mess = PyUnicode_FromFormat("%s: hr=%lu facility=%u error_code=%u description: %U", msg, hr, HRESULT_FACILITY(hr), HRESULT_CODE(hr), o);
|
||||
Py_XDECREF(o);
|
||||
if (mess == NULL) return PyErr_NoMemory();
|
||||
PyErr_SetObject(WPDError, mess);
|
||||
Py_DECREF(mess);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
wchar_t *wpd::unicode_to_wchar(PyObject *o) {
|
||||
wchar_t *buf;
|
||||
Py_ssize_t len;
|
||||
if (!PyUnicode_Check(o)) {PyErr_Format(PyExc_TypeError, "The pnp id must be a unicode object"); return NULL;}
|
||||
len = PyUnicode_GET_SIZE(o);
|
||||
if (len < 1) {PyErr_Format(PyExc_TypeError, "The pnp id must not be empty."); return NULL;}
|
||||
buf = (wchar_t *)calloc(len+2, sizeof(wchar_t));
|
||||
if (buf == NULL) { PyErr_NoMemory(); return NULL; }
|
||||
len = PyUnicode_AsWideChar((PyUnicodeObject*)o, buf, len);
|
||||
if (len == -1) { free(buf); PyErr_Format(PyExc_TypeError, "Invalid pnp id."); return NULL; }
|
||||
return buf;
|
||||
}
|
||||
|
@ -5,34 +5,51 @@
|
||||
* Distributed under terms of the MIT license.
|
||||
*/
|
||||
|
||||
#include "global.h"
|
||||
|
||||
#define UNICODE
|
||||
#include <Windows.h>
|
||||
#include <Python.h>
|
||||
using namespace wpd;
|
||||
|
||||
#include <Objbase.h>
|
||||
#include <PortableDeviceApi.h>
|
||||
// Module exception types
|
||||
PyObject *wpd::WPDError = NULL, *wpd::NoWPD = NULL;
|
||||
|
||||
// The global device manager
|
||||
IPortableDeviceManager *wpd::portable_device_manager = NULL;
|
||||
|
||||
// Flag indicating if COM has been initialized
|
||||
static int _com_initialized = 0;
|
||||
static PyObject *WPDError;
|
||||
static IPortableDeviceManager *portable_device_manager = NULL;
|
||||
// Application Info
|
||||
wpd::ClientInfo wpd::client_info = {NULL, 0, 0, 0};
|
||||
|
||||
extern IPortableDeviceValues* wpd::get_client_information();
|
||||
extern IPortableDevice* wpd::open_device(const wchar_t *pnp_id, IPortableDeviceValues *client_information);
|
||||
extern PyObject* wpd::get_device_information(IPortableDevice *device);
|
||||
|
||||
// Module startup/shutdown {{{
|
||||
static PyObject *
|
||||
wpd_init(PyObject *self, PyObject *args) {
|
||||
HRESULT hr;
|
||||
PyObject *o;
|
||||
if (!PyArg_ParseTuple(args, "OIII", &o, &client_info.major_version, &client_info.minor_version, &client_info.revision)) return NULL;
|
||||
client_info.name = unicode_to_wchar(o);
|
||||
if (client_info.name == NULL) return NULL;
|
||||
|
||||
if (!_com_initialized) {
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED);
|
||||
Py_END_ALLOW_THREADS;
|
||||
if (SUCCEEDED(hr)) _com_initialized = 1;
|
||||
else {PyErr_SetString(WPDError, "Failed to initialize COM"); return NULL;}
|
||||
}
|
||||
|
||||
if (portable_device_manager == NULL) {
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
hr = CoCreateInstance(CLSID_PortableDeviceManager, NULL,
|
||||
CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&portable_device_manager));
|
||||
Py_END_ALLOW_THREADS;
|
||||
|
||||
if (FAILED(hr)) {
|
||||
PyErr_SetString(WPDError, (hr == REGDB_E_CLASSNOTREG) ?
|
||||
portable_device_manager = NULL;
|
||||
PyErr_SetString((hr == REGDB_E_CLASSNOTREG) ? NoWPD : WPDError, (hr == REGDB_E_CLASSNOTREG) ?
|
||||
"This computer is not running the Windows Portable Device framework. You may need to install Windows Media Player 11 or newer." :
|
||||
"Failed to create the WPD device manager interface");
|
||||
return NULL;
|
||||
@ -44,28 +61,126 @@ wpd_init(PyObject *self, PyObject *args) {
|
||||
|
||||
static PyObject *
|
||||
wpd_uninit(PyObject *self, PyObject *args) {
|
||||
if (_com_initialized) {
|
||||
CoUninitialize();
|
||||
_com_initialized = 0;
|
||||
}
|
||||
|
||||
if (portable_device_manager != NULL) {
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
portable_device_manager->Release();
|
||||
Py_END_ALLOW_THREADS;
|
||||
portable_device_manager = NULL;
|
||||
}
|
||||
|
||||
if (_com_initialized) {
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
CoUninitialize();
|
||||
Py_END_ALLOW_THREADS;
|
||||
_com_initialized = 0;
|
||||
}
|
||||
|
||||
if (client_info.name != NULL) { free(client_info.name); }
|
||||
// hresult_set_exc("test", HRESULT_FROM_WIN32(ERROR_ACCESS_DENIED)); return NULL;
|
||||
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
// }}}
|
||||
|
||||
// enumerate_devices() {{{
|
||||
static PyObject *
|
||||
wpd_enumerate_devices(PyObject *self, PyObject *args) {
|
||||
PyObject *refresh = NULL, *ans = NULL, *temp;
|
||||
HRESULT hr;
|
||||
DWORD num_of_devices, i;
|
||||
PWSTR *pnp_device_ids;
|
||||
|
||||
ENSURE_WPD(NULL);
|
||||
|
||||
if (!PyArg_ParseTuple(args, "|O", &refresh)) return NULL;
|
||||
|
||||
if (refresh != NULL && PyObject_IsTrue(refresh)) {
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
hr = portable_device_manager->RefreshDeviceList();
|
||||
Py_END_ALLOW_THREADS;
|
||||
if (FAILED(hr)) return hresult_set_exc("Failed to refresh the list of portable devices", hr);
|
||||
}
|
||||
|
||||
hr = portable_device_manager->GetDevices(NULL, &num_of_devices);
|
||||
num_of_devices += 15; // Incase new devices were connected between this call and the next
|
||||
if (FAILED(hr)) return hresult_set_exc("Failed to get number of devices on the system", hr);
|
||||
pnp_device_ids = (PWSTR*)calloc(num_of_devices, sizeof(PWSTR));
|
||||
if (pnp_device_ids == NULL) return PyErr_NoMemory();
|
||||
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
hr = portable_device_manager->GetDevices(pnp_device_ids, &num_of_devices);
|
||||
Py_END_ALLOW_THREADS;
|
||||
|
||||
if (SUCCEEDED(hr)) {
|
||||
ans = PyTuple_New(num_of_devices);
|
||||
if (ans != NULL) {
|
||||
for(i = 0; i < num_of_devices; i++) {
|
||||
temp = PyUnicode_FromWideChar(pnp_device_ids[i], wcslen(pnp_device_ids[i]));
|
||||
if (temp == NULL) { PyErr_NoMemory(); Py_DECREF(ans); ans = NULL; break;}
|
||||
PyTuple_SET_ITEM(ans, i, temp);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
hresult_set_exc("Failed to get list of portable devices", hr);
|
||||
}
|
||||
|
||||
for (i = 0; i < num_of_devices; i++) {
|
||||
Py_BEGIN_ALLOW_THREADS;
|
||||
CoTaskMemFree(pnp_device_ids[i]);
|
||||
Py_END_ALLOW_THREADS;
|
||||
pnp_device_ids[i] = NULL;
|
||||
}
|
||||
free(pnp_device_ids);
|
||||
pnp_device_ids = NULL;
|
||||
|
||||
return Py_BuildValue("N", ans);
|
||||
} // }}}
|
||||
|
||||
// device_info() {{{
|
||||
static PyObject *
|
||||
wpd_device_info(PyObject *self, PyObject *args) {
|
||||
PyObject *py_pnp_id, *ans = NULL;
|
||||
wchar_t *pnp_id;
|
||||
IPortableDeviceValues *client_information = NULL;
|
||||
IPortableDevice *device = NULL;
|
||||
|
||||
ENSURE_WPD(NULL);
|
||||
|
||||
if (!PyArg_ParseTuple(args, "O", &py_pnp_id)) return NULL;
|
||||
pnp_id = unicode_to_wchar(py_pnp_id);
|
||||
if (pnp_id == NULL) return NULL;
|
||||
|
||||
client_information = get_client_information();
|
||||
if (client_information != NULL) {
|
||||
device = open_device(pnp_id, client_information);
|
||||
if (device != NULL) {
|
||||
ans = get_device_information(device);
|
||||
}
|
||||
}
|
||||
|
||||
if (pnp_id != NULL) free(pnp_id);
|
||||
if (client_information != NULL) client_information->Release();
|
||||
if (device != NULL) {device->Close(); device->Release();}
|
||||
return ans;
|
||||
} // }}}
|
||||
|
||||
static PyMethodDef wpd_methods[] = {
|
||||
{"init", wpd_init, METH_VARARGS,
|
||||
"init()\n\n Initializes this module. Call this method *only* in the thread in which you intend to use this module. Also remember to call uninit before the thread exits."
|
||||
"init(name, major_version, minor_version, revision)\n\n Initializes this module. Call this method *only* in the thread in which you intend to use this module. Also remember to call uninit before the thread exits."
|
||||
},
|
||||
|
||||
{"uninit", wpd_uninit, METH_VARARGS,
|
||||
"uninit()\n\n Uninitialize this module. Must be called in the same thread as init(). Do not use any function/objects from this module after uninit has been called."
|
||||
},
|
||||
|
||||
{"enumerate_devices", wpd_enumerate_devices, METH_VARARGS,
|
||||
"enumerate_devices(refresh=False)\n\n Get the list of device PnP ids for all connected devices recognized by the WPD service. The result is cached, unless refresh=True. Do not call with refresh=True too often as it is resource intensive."
|
||||
},
|
||||
|
||||
{"device_info", wpd_device_info, METH_VARARGS,
|
||||
"device_info(pnp_id)\n\n Return basic device information for the device identified by pnp_id (which you get from enumerate_devices)."
|
||||
},
|
||||
|
||||
{NULL, NULL, 0, NULL}
|
||||
};
|
||||
|
||||
@ -80,6 +195,8 @@ initwpd(void) {
|
||||
WPDError = PyErr_NewException("wpd.WPDError", NULL, NULL);
|
||||
if (WPDError == NULL) return;
|
||||
|
||||
NoWPD = PyErr_NewException("wpd.NoWPD", NULL, NULL);
|
||||
if (NoWPD == NULL) return;
|
||||
}
|
||||
|
||||
|
||||
|
@ -31,7 +31,7 @@ BOOK_EXTENSIONS = ['lrf', 'rar', 'zip', 'rtf', 'lit', 'txt', 'txtz', 'text', 'ht
|
||||
'epub', 'fb2', 'djv', 'djvu', 'lrx', 'cbr', 'cbz', 'cbc', 'oebzip',
|
||||
'rb', 'imp', 'odt', 'chm', 'tpz', 'azw1', 'pml', 'pmlz', 'mbp', 'tan', 'snb',
|
||||
'xps', 'oxps', 'azw4', 'book', 'zbf', 'pobi', 'docx', 'md',
|
||||
'textile', 'markdown', 'ibook', 'iba', 'azw3']
|
||||
'textile', 'markdown', 'ibook', 'iba', 'azw3', 'ps']
|
||||
|
||||
class HTMLRenderer(object):
|
||||
|
||||
|
@ -1541,6 +1541,8 @@ class TOC(object):
|
||||
if title:
|
||||
title = re.sub(r'\s+', ' ', title)
|
||||
element(label, NCX('text')).text = title
|
||||
# Do not unescape this URL as ADE requires it to be escaped to
|
||||
# handle semi colons and other special characters in the file names
|
||||
element(point, NCX('content'), src=node.href)
|
||||
node.to_ncx(point)
|
||||
return parent
|
||||
|
@ -11,6 +11,7 @@ from collections import defaultdict
|
||||
|
||||
from lxml import etree
|
||||
import cssutils
|
||||
from cssutils.css import Property
|
||||
|
||||
from calibre.ebooks.oeb.base import (XHTML, XHTML_NS, CSS_MIME, OEB_STYLES,
|
||||
namespace, barename, XPath)
|
||||
@ -276,10 +277,16 @@ class CSSFlattener(object):
|
||||
cssdict['font-family'] = node.attrib['face']
|
||||
del node.attrib['face']
|
||||
if 'color' in node.attrib:
|
||||
cssdict['color'] = node.attrib['color']
|
||||
try:
|
||||
cssdict['color'] = Property('color', node.attrib['color']).value
|
||||
except ValueError:
|
||||
pass
|
||||
del node.attrib['color']
|
||||
if 'bgcolor' in node.attrib:
|
||||
cssdict['background-color'] = node.attrib['bgcolor']
|
||||
try:
|
||||
cssdict['background-color'] = Property('background-color', node.attrib['bgcolor']).value
|
||||
except ValueError:
|
||||
pass
|
||||
del node.attrib['bgcolor']
|
||||
if cssdict.get('font-weight', '').lower() == 'medium':
|
||||
cssdict['font-weight'] = 'normal' # ADE chokes on font-weight medium
|
||||
|
@ -194,9 +194,17 @@ def render_jacket(mi, output_profile,
|
||||
args[key] = escape(val)
|
||||
args[key+'_label'] = escape(display_name)
|
||||
except:
|
||||
# if the val (custom column contents) is None, don't add to args
|
||||
pass
|
||||
|
||||
if False:
|
||||
print("Custom column values available in jacket template:")
|
||||
for key in args.keys():
|
||||
if key.startswith('_') and not key.endswith('_label'):
|
||||
print(" %s: %s" % ('#' + key[1:], args[key]))
|
||||
|
||||
# Used in the comment describing use of custom columns in templates
|
||||
# Don't change this unless you also change it in template.xhtml
|
||||
args['_genre_label'] = args.get('_genre_label', '{_genre_label}')
|
||||
args['_genre'] = args.get('_genre', '{_genre}')
|
||||
|
||||
|
@ -14,13 +14,9 @@ from calibre.gui2 import gprefs, question_dialog
|
||||
from calibre.utils.icu import sort_key
|
||||
|
||||
from catalog_epub_mobi_ui import Ui_Form
|
||||
from PyQt4 import QtGui
|
||||
from PyQt4.Qt import (Qt, QAbstractItemView, QCheckBox, QComboBox, QDialog,
|
||||
QDialogButtonBox, QDoubleSpinBox,
|
||||
QHBoxLayout, QIcon, QLabel, QLineEdit,
|
||||
QPlainTextEdit, QRadioButton, QSize, QSizePolicy,
|
||||
QTableWidget, QTableWidgetItem, QTimer,
|
||||
QToolButton, QVBoxLayout, QWidget)
|
||||
from PyQt4.Qt import (Qt, QAbstractItemView, QCheckBox, QComboBox,
|
||||
QDoubleSpinBox, QIcon, QLineEdit, QRadioButton, QSize, QSizePolicy,
|
||||
QTableWidget, QTableWidgetItem, QToolButton, QVBoxLayout, QWidget)
|
||||
|
||||
class PluginWidget(QWidget,Ui_Form):
|
||||
|
||||
@ -549,9 +545,9 @@ class GenericRulesTable(QTableWidget):
|
||||
first = rows[0].row() + 1
|
||||
last = rows[-1].row() + 1
|
||||
|
||||
message = '<p>Are you sure you want to delete rule %d?' % first
|
||||
message = _('Are you sure you want to delete rule %d?') % first
|
||||
if len(rows) > 1:
|
||||
message = '<p>Are you sure you want to delete rules %d-%d?' % (first, last)
|
||||
message = _('Are you sure you want to delete rules %d-%d?') % (first, last)
|
||||
if not question_dialog(self, _('Are you sure?'), message, show_copy_button=False):
|
||||
return
|
||||
first_sel_row = self.currentRow()
|
||||
@ -656,9 +652,9 @@ class GenericRulesTable(QTableWidget):
|
||||
class ExclusionRules(GenericRulesTable):
|
||||
|
||||
COLUMNS = { 'ENABLED':{'ordinal': 0, 'name': ''},
|
||||
'NAME': {'ordinal': 1, 'name': 'Name'},
|
||||
'FIELD': {'ordinal': 2, 'name': 'Field'},
|
||||
'PATTERN': {'ordinal': 3, 'name': 'Value'},}
|
||||
'NAME': {'ordinal': 1, 'name': _('Name')},
|
||||
'FIELD': {'ordinal': 2, 'name': _('Field')},
|
||||
'PATTERN': {'ordinal': 3, 'name': _('Value')},}
|
||||
|
||||
def __init__(self, parent_gb_hl, object_name, rules, eligible_custom_fields, db):
|
||||
super(ExclusionRules, self).__init__(parent_gb_hl, object_name, rules, eligible_custom_fields, db)
|
||||
@ -764,15 +760,15 @@ class ExclusionRules(GenericRulesTable):
|
||||
elif source_field == 'Tags':
|
||||
values = sorted(self.db.all_tags(), key=sort_key)
|
||||
else:
|
||||
if self.eligible_custom_fields[source_field]['datatype'] in ['enumeration', 'text']:
|
||||
if self.eligible_custom_fields[unicode(source_field)]['datatype'] in ['enumeration', 'text']:
|
||||
values = self.db.all_custom(self.db.field_metadata.key_to_label(
|
||||
self.eligible_custom_fields[source_field]['field']))
|
||||
self.eligible_custom_fields[unicode(source_field)]['field']))
|
||||
values = sorted(values, key=sort_key)
|
||||
elif self.eligible_custom_fields[source_field]['datatype'] in ['bool']:
|
||||
elif self.eligible_custom_fields[unicode(source_field)]['datatype'] in ['bool']:
|
||||
values = ['True','False','unspecified']
|
||||
elif self.eligible_custom_fields[source_field]['datatype'] in ['composite']:
|
||||
elif self.eligible_custom_fields[unicode(source_field)]['datatype'] in ['composite']:
|
||||
values = ['any value','unspecified']
|
||||
elif self.eligible_custom_fields[source_field]['datatype'] in ['datetime']:
|
||||
elif self.eligible_custom_fields[unicode(source_field)]['datatype'] in ['datetime']:
|
||||
values = ['any date','unspecified']
|
||||
|
||||
values_combo = ComboBox(self, values, pattern)
|
||||
@ -781,10 +777,10 @@ class ExclusionRules(GenericRulesTable):
|
||||
class PrefixRules(GenericRulesTable):
|
||||
|
||||
COLUMNS = { 'ENABLED':{'ordinal': 0, 'name': ''},
|
||||
'NAME': {'ordinal': 1, 'name': 'Name'},
|
||||
'PREFIX': {'ordinal': 2, 'name': 'Prefix'},
|
||||
'FIELD': {'ordinal': 3, 'name': 'Field'},
|
||||
'PATTERN':{'ordinal': 4, 'name': 'Value'},}
|
||||
'NAME': {'ordinal': 1, 'name': _('Name')},
|
||||
'PREFIX': {'ordinal': 2, 'name': _('Prefix')},
|
||||
'FIELD': {'ordinal': 3, 'name': _('Field')},
|
||||
'PATTERN':{'ordinal': 4, 'name': _('Value')},}
|
||||
|
||||
def __init__(self, parent_gb_hl, object_name, rules, eligible_custom_fields, db):
|
||||
super(PrefixRules, self).__init__(parent_gb_hl, object_name, rules, eligible_custom_fields, db)
|
||||
@ -1039,15 +1035,15 @@ class PrefixRules(GenericRulesTable):
|
||||
elif source_field == 'Tags':
|
||||
values = sorted(self.db.all_tags(), key=sort_key)
|
||||
else:
|
||||
if self.eligible_custom_fields[source_field]['datatype'] in ['enumeration', 'text']:
|
||||
if self.eligible_custom_fields[unicode(source_field)]['datatype'] in ['enumeration', 'text']:
|
||||
values = self.db.all_custom(self.db.field_metadata.key_to_label(
|
||||
self.eligible_custom_fields[source_field]['field']))
|
||||
self.eligible_custom_fields[unicode(source_field)]['field']))
|
||||
values = sorted(values, key=sort_key)
|
||||
elif self.eligible_custom_fields[source_field]['datatype'] in ['bool']:
|
||||
elif self.eligible_custom_fields[unicode(source_field)]['datatype'] in ['bool']:
|
||||
values = ['True','False','unspecified']
|
||||
elif self.eligible_custom_fields[source_field]['datatype'] in ['composite']:
|
||||
elif self.eligible_custom_fields[unicode(source_field)]['datatype'] in ['composite']:
|
||||
values = ['any value','unspecified']
|
||||
elif self.eligible_custom_fields[source_field]['datatype'] in ['datetime']:
|
||||
elif self.eligible_custom_fields[unicode(source_field)]['datatype'] in ['datetime']:
|
||||
values = ['any date','unspecified']
|
||||
|
||||
values_combo = ComboBox(self, values, pattern)
|
||||
|
@ -410,6 +410,7 @@ class TagsModel(QAbstractItemModel): # {{{
|
||||
# first letter can actually be more than one letter long.
|
||||
cl_list = [None] * len(data[key])
|
||||
last_ordnum = 0
|
||||
last_c = ' '
|
||||
for idx,tag in enumerate(data[key]):
|
||||
if not tag.sort:
|
||||
c = ' '
|
||||
|
@ -141,7 +141,7 @@ class EPUB_MOBI(CatalogPlugin):
|
||||
default="(('Read books','tags','+','\u2713'),('Wishlist items','tags','Wishlist','\u00d7'))",
|
||||
dest='prefix_rules',
|
||||
action=None,
|
||||
help=_("Specifies the rules used to include prefixes indicating read books, wishlist items and other user-specifed prefixes.\n"
|
||||
help=_("Specifies the rules used to include prefixes indicating read books, wishlist items and other user-specified prefixes.\n"
|
||||
"The model for a prefix rule is ('<rule name>','<source field>','<pattern>','<prefix>').\n"
|
||||
"When multiple rules are defined, the first matching rule will be used.\n"
|
||||
"Default:\n" + '"' + '%default' + '"' + "\n"
|
||||
@ -174,14 +174,14 @@ class EPUB_MOBI(CatalogPlugin):
|
||||
if op is None:
|
||||
op = 'default'
|
||||
|
||||
if opts.connected_device['name'] and \
|
||||
opts.connected_device['short_name'] in ['kindle','kindle dx']:
|
||||
if opts.connected_device['name'] and 'kindle' in opts.connected_device['name'].lower():
|
||||
opts.connected_kindle = True
|
||||
if opts.connected_device['serial'] and \
|
||||
opts.connected_device['serial'][:4] in ['B004','B005']:
|
||||
op = "kindle_dx"
|
||||
else:
|
||||
op = "kindle"
|
||||
|
||||
opts.descriptionClip = 380 if op.endswith('dx') or 'kindle' not in op else 100
|
||||
opts.authorClip = 100 if op.endswith('dx') or 'kindle' not in op else 60
|
||||
opts.output_profile = op
|
||||
@ -385,9 +385,6 @@ class EPUB_MOBI(CatalogPlugin):
|
||||
new_cover_path.close()
|
||||
recommendations.append(('cover', new_cover_path.name, OptionRecommendation.HIGH))
|
||||
|
||||
if opts.verbose:
|
||||
log.info("Invoking Plumber with recommendations:\n %s" % recommendations)
|
||||
|
||||
# Run ebook-convert
|
||||
from calibre.ebooks.conversion.plumber import Plumber
|
||||
plumber = Plumber(os.path.join(catalog.catalogPath,
|
||||
|
@ -15,7 +15,7 @@ from calibre.ptempfile import PersistentTemporaryDirectory
|
||||
from calibre.utils.config import config_dir
|
||||
from calibre.utils.date import format_date, is_date_undefined, now as nowf
|
||||
from calibre.utils.filenames import ascii_text
|
||||
from calibre.utils.icu import capitalize, sort_key
|
||||
from calibre.utils.icu import capitalize, collation_order, sort_key
|
||||
from calibre.utils.magick.draw import thumbnail
|
||||
from calibre.utils.zipfile import ZipFile
|
||||
|
||||
@ -517,19 +517,19 @@ class CatalogBuilder(object):
|
||||
self.generateOPF()
|
||||
self.generateNCXHeader()
|
||||
if self.opts.generate_authors:
|
||||
self.generateNCXByAuthor("Authors")
|
||||
self.generateNCXByAuthor(_("Authors"))
|
||||
if self.opts.generate_titles:
|
||||
self.generateNCXByTitle("Titles")
|
||||
self.generateNCXByTitle(_("Titles"))
|
||||
if self.opts.generate_series:
|
||||
self.generateNCXBySeries("Series")
|
||||
self.generateNCXBySeries(_("Series"))
|
||||
if self.opts.generate_genres:
|
||||
self.generateNCXByGenre("Genres")
|
||||
self.generateNCXByGenre(_("Genres"))
|
||||
if self.opts.generate_recently_added:
|
||||
self.generateNCXByDateAdded("Recently Added")
|
||||
self.generateNCXByDateAdded(_("Recently Added"))
|
||||
if self.generateRecentlyRead:
|
||||
self.generateNCXByDateRead("Recently Read")
|
||||
self.generateNCXByDateRead(_("Recently Read"))
|
||||
if self.opts.generate_descriptions:
|
||||
self.generateNCXDescriptions("Descriptions")
|
||||
self.generateNCXDescriptions(_("Descriptions"))
|
||||
|
||||
self.writeNCX()
|
||||
return True
|
||||
@ -673,7 +673,8 @@ Author '{0}':
|
||||
for record in data:
|
||||
matched = list(set(record['tags']) & set(exclude_tags))
|
||||
if matched :
|
||||
self.opts.log.info(" - %s (Exclusion rule Tags: '%s')" % (record['title'], str(matched[0])))
|
||||
self.opts.log.info(" - %s by %s (Exclusion rule Tags: '%s')" %
|
||||
(record['title'], record['authors'][0], str(matched[0])))
|
||||
|
||||
search_phrase = ''
|
||||
if exclude_tags:
|
||||
@ -960,7 +961,7 @@ Author '{0}':
|
||||
aTag['id'] = "bytitle"
|
||||
pTag.insert(ptc,aTag)
|
||||
ptc += 1
|
||||
pTag.insert(ptc,NavigableString('Titles'))
|
||||
pTag.insert(ptc,NavigableString(_('Titles')))
|
||||
|
||||
body.insert(btc,pTag)
|
||||
btc += 1
|
||||
@ -976,6 +977,9 @@ Author '{0}':
|
||||
nspt = sorted(nspt, key=lambda x: sort_key(x['title_sort'].upper()))
|
||||
self.booksByTitle_noSeriesPrefix = nspt
|
||||
|
||||
# Establish initial letter equivalencies
|
||||
sort_equivalents = self.establish_equivalencies(self.booksByTitle, key='title_sort')
|
||||
|
||||
# Loop through the books by title
|
||||
# Generate one divRunningTag per initial letter for the purposes of
|
||||
# minimizing widows and orphans on readers that can handle large
|
||||
@ -985,8 +989,8 @@ Author '{0}':
|
||||
title_list = self.booksByTitle_noSeriesPrefix
|
||||
drtc = 0
|
||||
divRunningTag = None
|
||||
for book in title_list:
|
||||
if self.letter_or_symbol(book['title_sort'][0]) != current_letter :
|
||||
for idx, book in enumerate(title_list):
|
||||
if self.letter_or_symbol(sort_equivalents[idx]) != current_letter:
|
||||
# Start a new letter
|
||||
if drtc and divRunningTag is not None:
|
||||
divTag.insert(dtc, divRunningTag)
|
||||
@ -998,13 +1002,15 @@ Author '{0}':
|
||||
pIndexTag = Tag(soup, "p")
|
||||
pIndexTag['class'] = "author_title_letter_index"
|
||||
aTag = Tag(soup, "a")
|
||||
current_letter = self.letter_or_symbol(book['title_sort'][0])
|
||||
current_letter = self.letter_or_symbol(sort_equivalents[idx])
|
||||
if current_letter == self.SYMBOLS:
|
||||
aTag['id'] = self.SYMBOLS
|
||||
aTag['id'] = self.SYMBOLS + "_titles"
|
||||
pIndexTag.insert(0,aTag)
|
||||
pIndexTag.insert(1,NavigableString(self.SYMBOLS))
|
||||
else:
|
||||
aTag['id'] = "%s" % self.generateUnicodeName(current_letter)
|
||||
pIndexTag.insert(0,aTag)
|
||||
pIndexTag.insert(1,NavigableString(self.letter_or_symbol(book['title_sort'][0])))
|
||||
aTag['id'] = self.generateUnicodeName(current_letter) + "_titles"
|
||||
pIndexTag.insert(0,aTag)
|
||||
pIndexTag.insert(1,NavigableString(sort_equivalents[idx]))
|
||||
divRunningTag.insert(dtc,pIndexTag)
|
||||
drtc += 1
|
||||
|
||||
@ -1079,7 +1085,7 @@ Author '{0}':
|
||||
'''
|
||||
self.updateProgressFullStep("'Authors'")
|
||||
|
||||
friendly_name = "Authors"
|
||||
friendly_name = _("Authors")
|
||||
|
||||
soup = self.generateHTMLEmptyHeader(friendly_name)
|
||||
body = soup.find('body')
|
||||
@ -1100,11 +1106,14 @@ Author '{0}':
|
||||
current_author = ''
|
||||
current_letter = ''
|
||||
current_series = None
|
||||
#for book in sorted(self.booksByAuthor, key = self.booksByAuthorSorter_author_sort):
|
||||
for book in self.booksByAuthor:
|
||||
# Establish initial letter equivalencies
|
||||
sort_equivalents = self.establish_equivalencies(self.booksByAuthor,key='author_sort')
|
||||
|
||||
#for book in sorted(self.booksByAuthor, key = self.booksByAuthorSorter_author_sort):
|
||||
#for book in self.booksByAuthor:
|
||||
for idx, book in enumerate(self.booksByAuthor):
|
||||
book_count += 1
|
||||
if self.letter_or_symbol(book['author_sort'][0].upper()) != current_letter :
|
||||
if self.letter_or_symbol(sort_equivalents[idx]) != current_letter :
|
||||
# Start a new letter with Index letter
|
||||
if divOpeningTag is not None:
|
||||
divTag.insert(dtc, divOpeningTag)
|
||||
@ -1124,13 +1133,16 @@ Author '{0}':
|
||||
pIndexTag = Tag(soup, "p")
|
||||
pIndexTag['class'] = "author_title_letter_index"
|
||||
aTag = Tag(soup, "a")
|
||||
current_letter = self.letter_or_symbol(book['author_sort'][0].upper())
|
||||
#current_letter = self.letter_or_symbol(book['author_sort'][0].upper())
|
||||
current_letter = self.letter_or_symbol(sort_equivalents[idx])
|
||||
if current_letter == self.SYMBOLS:
|
||||
aTag['id'] = self.SYMBOLS + '_authors'
|
||||
pIndexTag.insert(0,aTag)
|
||||
pIndexTag.insert(1,NavigableString(self.SYMBOLS))
|
||||
else:
|
||||
aTag['id'] = "%s_authors" % self.generateUnicodeName(current_letter)
|
||||
pIndexTag.insert(0,aTag)
|
||||
pIndexTag.insert(1,NavigableString(self.letter_or_symbol(book['author_sort'][0].upper())))
|
||||
aTag['id'] = self.generateUnicodeName(current_letter) + '_authors'
|
||||
pIndexTag.insert(0,aTag)
|
||||
pIndexTag.insert(1,NavigableString(sort_equivalents[idx]))
|
||||
divOpeningTag.insert(dotc,pIndexTag)
|
||||
dotc += 1
|
||||
|
||||
@ -1180,7 +1192,7 @@ Author '{0}':
|
||||
if self.opts.generate_series:
|
||||
aTag = Tag(soup,'a')
|
||||
aTag['href'] = "%s.html#%s_series" % ('BySeries',
|
||||
re.sub('\W','',book['series']).lower())
|
||||
re.sub('\s','',book['series']).lower())
|
||||
aTag.insert(0, book['series'])
|
||||
pSeriesTag.insert(0, aTag)
|
||||
else:
|
||||
@ -1282,7 +1294,7 @@ Author '{0}':
|
||||
def add_books_to_HTML_by_month(this_months_list, dtc):
|
||||
if len(this_months_list):
|
||||
|
||||
#this_months_list = sorted(this_months_list, key=self.booksByAuthorSorter_author_sort)
|
||||
this_months_list = sorted(this_months_list, key=self.booksByAuthorSorter_author_sort)
|
||||
|
||||
# Create a new month anchor
|
||||
date_string = strftime(u'%B %Y', current_date.timetuple())
|
||||
@ -1307,7 +1319,7 @@ Author '{0}':
|
||||
pAuthorTag['class'] = "author_index"
|
||||
aTag = Tag(soup, "a")
|
||||
if self.opts.generate_authors:
|
||||
aTag['id'] = "%s" % self.generateAuthorAnchor(current_author)
|
||||
aTag['href'] = "%s.html#%s" % ("ByAlphaAuthor", self.generateAuthorAnchor(current_author))
|
||||
aTag.insert(0,NavigableString(current_author))
|
||||
pAuthorTag.insert(0,aTag)
|
||||
divTag.insert(dtc,pAuthorTag)
|
||||
@ -1425,7 +1437,7 @@ Author '{0}':
|
||||
dtc += 1
|
||||
return dtc
|
||||
|
||||
friendly_name = "Recently Added"
|
||||
friendly_name = _("Recently Added")
|
||||
|
||||
soup = self.generateHTMLEmptyHeader(friendly_name)
|
||||
body = soup.find('body')
|
||||
@ -1518,7 +1530,7 @@ Author '{0}':
|
||||
'''
|
||||
Write books by active bookmarks
|
||||
'''
|
||||
friendly_name = 'Recently Read'
|
||||
friendly_name = _('Recently Read')
|
||||
self.updateProgressFullStep("'%s'" % friendly_name)
|
||||
if not self.bookmarked_books:
|
||||
return
|
||||
@ -1710,6 +1722,8 @@ Author '{0}':
|
||||
|
||||
# Fetch the database as a dictionary
|
||||
data = self.plugin.search_sort_db(self.db, self.opts)
|
||||
|
||||
# Remove exclusions
|
||||
self.booksBySeries = self.processExclusions(data)
|
||||
|
||||
if not self.booksBySeries:
|
||||
@ -1717,24 +1731,25 @@ Author '{0}':
|
||||
self.opts.log(" no series found in selected books, cancelling series generation")
|
||||
return
|
||||
|
||||
friendly_name = "Series"
|
||||
# Generate series_sort
|
||||
for book in self.booksBySeries:
|
||||
book['series_sort'] = self.generateSortTitle(book['series'])
|
||||
|
||||
friendly_name = _("Series")
|
||||
|
||||
soup = self.generateHTMLEmptyHeader(friendly_name)
|
||||
body = soup.find('body')
|
||||
|
||||
btc = 0
|
||||
|
||||
# Insert section tag
|
||||
pTag = Tag(soup, "p")
|
||||
pTag['style'] = 'display:none'
|
||||
ptc = 0
|
||||
aTag = Tag(soup,'a')
|
||||
aTag['name'] = 'section_start'
|
||||
body.insert(btc, aTag)
|
||||
btc += 1
|
||||
|
||||
# Insert the anchor
|
||||
aTag = Tag(soup, "a")
|
||||
anchor_name = friendly_name.lower()
|
||||
aTag['name'] = anchor_name.replace(" ","")
|
||||
body.insert(btc, aTag)
|
||||
aTag['id'] = 'section_start'
|
||||
pTag.insert(ptc, aTag)
|
||||
ptc += 1
|
||||
body.insert(btc, pTag)
|
||||
btc += 1
|
||||
|
||||
divTag = Tag(soup, "div")
|
||||
@ -1742,23 +1757,29 @@ Author '{0}':
|
||||
current_letter = ""
|
||||
current_series = None
|
||||
|
||||
# Establish initial letter equivalencies
|
||||
sort_equivalents = self.establish_equivalencies(self.booksBySeries, key='series_sort')
|
||||
|
||||
# Loop through booksBySeries
|
||||
series_count = 0
|
||||
for book in self.booksBySeries:
|
||||
for idx, book in enumerate(self.booksBySeries):
|
||||
# Check for initial letter change
|
||||
sort_title = self.generateSortTitle(book['series'])
|
||||
if self.letter_or_symbol(sort_title[0].upper()) != current_letter :
|
||||
if self.letter_or_symbol(sort_equivalents[idx]) != current_letter :
|
||||
# Start a new letter with Index letter
|
||||
current_letter = self.letter_or_symbol(sort_title[0].upper())
|
||||
current_letter = self.letter_or_symbol(sort_equivalents[idx])
|
||||
pIndexTag = Tag(soup, "p")
|
||||
pIndexTag['class'] = "series_letter_index"
|
||||
aTag = Tag(soup, "a")
|
||||
aTag['name'] = "%s_series" % self.letter_or_symbol(current_letter)
|
||||
pIndexTag.insert(0,aTag)
|
||||
pIndexTag.insert(1,NavigableString(self.letter_or_symbol(sort_title[0].upper())))
|
||||
if current_letter == self.SYMBOLS:
|
||||
aTag['id'] = self.SYMBOLS + "_series"
|
||||
pIndexTag.insert(0,aTag)
|
||||
pIndexTag.insert(1,NavigableString(self.SYMBOLS))
|
||||
else:
|
||||
aTag['id'] = self.generateUnicodeName(current_letter) + "_series"
|
||||
pIndexTag.insert(0,aTag)
|
||||
pIndexTag.insert(1,NavigableString(sort_equivalents[idx]))
|
||||
divTag.insert(dtc,pIndexTag)
|
||||
dtc += 1
|
||||
|
||||
# Check for series change
|
||||
if book['series'] != current_series:
|
||||
# Start a new series
|
||||
@ -1767,7 +1788,10 @@ Author '{0}':
|
||||
pSeriesTag = Tag(soup,'p')
|
||||
pSeriesTag['class'] = "series"
|
||||
aTag = Tag(soup, 'a')
|
||||
aTag['name'] = "%s_series" % re.sub('\W','',book['series']).lower()
|
||||
if self.letter_or_symbol(book['series']):
|
||||
aTag['id'] = "symbol_%s_series" % re.sub('\W','',book['series']).lower()
|
||||
else:
|
||||
aTag['id'] = "%s_series" % re.sub('\W','',book['series']).lower()
|
||||
pSeriesTag.insert(0,aTag)
|
||||
pSeriesTag.insert(1,NavigableString('%s' % book['series']))
|
||||
divTag.insert(dtc,pSeriesTag)
|
||||
@ -1830,7 +1854,7 @@ Author '{0}':
|
||||
pTag['class'] = 'title'
|
||||
aTag = Tag(soup, "a")
|
||||
anchor_name = friendly_name.lower()
|
||||
aTag['name'] = anchor_name.replace(" ","")
|
||||
aTag['id'] = anchor_name.replace(" ","")
|
||||
pTag.insert(0,aTag)
|
||||
#h2Tag.insert(1,NavigableString('%s (%d)' % (friendly_name, series_count)))
|
||||
pTag.insert(1,NavigableString('%s' % friendly_name))
|
||||
@ -1984,7 +2008,7 @@ Author '{0}':
|
||||
thumb_generated = False
|
||||
|
||||
if not thumb_generated:
|
||||
self.opts.log.warn(" using default cover for '%s' (%d)" % (title['title'], title['id']))
|
||||
self.opts.log.warn(" using default cover for '%s' (%d)" % (title['title'], title['id']))
|
||||
# Confirm thumb exists, default is current
|
||||
default_thumb_fp = os.path.join(image_dir,"thumbnail_default.jpg")
|
||||
cover = os.path.join(self.catalogPath, "DefaultCover.png")
|
||||
@ -2376,22 +2400,30 @@ Author '{0}':
|
||||
nptc += 1
|
||||
|
||||
series_by_letter = []
|
||||
# Establish initial letter equivalencies
|
||||
sort_equivalents = self.establish_equivalencies(self.booksBySeries, key='series_sort')
|
||||
|
||||
# Loop over the series titles, find start of each letter, add description_preview_count books
|
||||
# Special switch for using different title list
|
||||
|
||||
title_list = self.booksBySeries
|
||||
current_letter = self.letter_or_symbol(self.generateSortTitle(title_list[0]['series'])[0])
|
||||
|
||||
# Prime the pump
|
||||
current_letter = self.letter_or_symbol(sort_equivalents[0])
|
||||
|
||||
title_letters = [current_letter]
|
||||
current_series_list = []
|
||||
current_series = ""
|
||||
for book in title_list:
|
||||
for idx, book in enumerate(title_list):
|
||||
sort_title = self.generateSortTitle(book['series'])
|
||||
if self.letter_or_symbol(sort_title[0]) != current_letter:
|
||||
sort_title_equivalent = self.establish_equivalencies([sort_title])[0]
|
||||
if self.letter_or_symbol(sort_equivalents[idx]) != current_letter:
|
||||
|
||||
# Save the old list
|
||||
add_to_series_by_letter(current_series_list)
|
||||
|
||||
# Start the new list
|
||||
current_letter = self.letter_or_symbol(sort_title[0])
|
||||
current_letter = self.letter_or_symbol(sort_equivalents[idx])
|
||||
title_letters.append(current_letter)
|
||||
current_series = book['series']
|
||||
current_series_list = [book['series']]
|
||||
@ -2413,12 +2445,17 @@ Author '{0}':
|
||||
self.playOrder += 1
|
||||
navLabelTag = Tag(soup, 'navLabel')
|
||||
textTag = Tag(soup, 'text')
|
||||
textTag.insert(0, NavigableString(u"Series beginning with %s" % \
|
||||
textTag.insert(0, NavigableString(_(u"Series beginning with %s") % \
|
||||
(title_letters[i] if len(title_letters[i])>1 else "'" + title_letters[i] + "'")))
|
||||
navLabelTag.insert(0, textTag)
|
||||
navPointByLetterTag.insert(0,navLabelTag)
|
||||
contentTag = Tag(soup, 'content')
|
||||
contentTag['src'] = "content/%s.html#%s_series" % (output, title_letters[i])
|
||||
#contentTag['src'] = "content/%s.html#%s_series" % (output, title_letters[i])
|
||||
if title_letters[i] == self.SYMBOLS:
|
||||
contentTag['src'] = "content/%s.html#%s_series" % (output, self.SYMBOLS)
|
||||
else:
|
||||
contentTag['src'] = "content/%s.html#%s_series" % (output, self.generateUnicodeName(title_letters[i]))
|
||||
|
||||
navPointByLetterTag.insert(1,contentTag)
|
||||
|
||||
if self.generateForKindle:
|
||||
@ -2469,23 +2506,31 @@ Author '{0}':
|
||||
|
||||
books_by_letter = []
|
||||
|
||||
# Establish initial letter equivalencies
|
||||
sort_equivalents = self.establish_equivalencies(self.booksByTitle, key='title_sort')
|
||||
|
||||
# Loop over the titles, find start of each letter, add description_preview_count books
|
||||
# Special switch for using different title list
|
||||
if self.useSeriesPrefixInTitlesSection:
|
||||
title_list = self.booksByTitle
|
||||
else:
|
||||
title_list = self.booksByTitle_noSeriesPrefix
|
||||
current_letter = self.letter_or_symbol(title_list[0]['title_sort'][0])
|
||||
|
||||
# Prime the list
|
||||
current_letter = self.letter_or_symbol(sort_equivalents[0])
|
||||
title_letters = [current_letter]
|
||||
current_book_list = []
|
||||
current_book = ""
|
||||
for book in title_list:
|
||||
if self.letter_or_symbol(book['title_sort'][0]) != current_letter:
|
||||
for idx, book in enumerate(title_list):
|
||||
#if self.letter_or_symbol(book['title_sort'][0]) != current_letter:
|
||||
if self.letter_or_symbol(sort_equivalents[idx]) != current_letter:
|
||||
|
||||
# Save the old list
|
||||
add_to_books_by_letter(current_book_list)
|
||||
|
||||
# Start the new list
|
||||
current_letter = self.letter_or_symbol(book['title_sort'][0])
|
||||
#current_letter = self.letter_or_symbol(book['title_sort'][0])
|
||||
current_letter = self.letter_or_symbol(sort_equivalents[idx])
|
||||
title_letters.append(current_letter)
|
||||
current_book = book['title']
|
||||
current_book_list = [book['title']]
|
||||
@ -2507,15 +2552,15 @@ Author '{0}':
|
||||
self.playOrder += 1
|
||||
navLabelTag = Tag(soup, 'navLabel')
|
||||
textTag = Tag(soup, 'text')
|
||||
textTag.insert(0, NavigableString(u"Titles beginning with %s" % \
|
||||
textTag.insert(0, NavigableString(_(u"Titles beginning with %s") % \
|
||||
(title_letters[i] if len(title_letters[i])>1 else "'" + title_letters[i] + "'")))
|
||||
navLabelTag.insert(0, textTag)
|
||||
navPointByLetterTag.insert(0,navLabelTag)
|
||||
contentTag = Tag(soup, 'content')
|
||||
if title_letters[i] == self.SYMBOLS:
|
||||
contentTag['src'] = "content/%s.html#%s" % (output, title_letters[i])
|
||||
contentTag['src'] = "content/%s.html#%s_titles" % (output, self.SYMBOLS)
|
||||
else:
|
||||
contentTag['src'] = "content/%s.html#%s" % (output, self.generateUnicodeName(title_letters[i]))
|
||||
contentTag['src'] = "content/%s.html#%s_titles" % (output, self.generateUnicodeName(title_letters[i]))
|
||||
navPointByLetterTag.insert(1,contentTag)
|
||||
|
||||
if self.generateForKindle:
|
||||
@ -2570,17 +2615,22 @@ Author '{0}':
|
||||
# Loop over the sorted_authors list, find start of each letter,
|
||||
# add description_preview_count artists
|
||||
# self.authors[0]:friendly [1]:author_sort [2]:book_count
|
||||
# (<friendly name>, author_sort, book_count)
|
||||
|
||||
# Need to extract a list of author_sort, generate sort_equivalents from that
|
||||
sort_equivalents = self.establish_equivalencies([x[1] for x in self.authors])
|
||||
|
||||
master_author_list = []
|
||||
# self.authors[0][1][0] = Initial letter of author_sort[0]
|
||||
current_letter = self.letter_or_symbol(self.authors[0][1][0])
|
||||
# Prime the pump
|
||||
current_letter = self.letter_or_symbol(sort_equivalents[0])
|
||||
current_author_list = []
|
||||
for author in self.authors:
|
||||
if self.letter_or_symbol(author[1][0]) != current_letter:
|
||||
for idx, author in enumerate(self.authors):
|
||||
if self.letter_or_symbol(sort_equivalents[idx]) != current_letter:
|
||||
# Save the old list
|
||||
add_to_author_list(current_author_list, current_letter)
|
||||
|
||||
# Start the new list
|
||||
current_letter = self.letter_or_symbol(author[1][0])
|
||||
current_letter = self.letter_or_symbol(sort_equivalents[idx])
|
||||
current_author_list = [author[0]]
|
||||
else:
|
||||
if len(current_author_list) < self.descriptionClip:
|
||||
@ -2599,7 +2649,7 @@ Author '{0}':
|
||||
self.playOrder += 1
|
||||
navLabelTag = Tag(soup, 'navLabel')
|
||||
textTag = Tag(soup, 'text')
|
||||
textTag.insert(0, NavigableString("Authors beginning with '%s'" % (authors_by_letter[1])))
|
||||
textTag.insert(0, NavigableString(_("Authors beginning with '%s'") % (authors_by_letter[1])))
|
||||
navLabelTag.insert(0, textTag)
|
||||
navPointByLetterTag.insert(0,navLabelTag)
|
||||
contentTag = Tag(soup, 'content')
|
||||
@ -3177,6 +3227,46 @@ Author '{0}':
|
||||
|
||||
return None
|
||||
|
||||
def establish_equivalencies(self, item_list, key=None):
|
||||
# Filter for leading letter equivalencies
|
||||
|
||||
# Hack to force the cataloged leading letter to be
|
||||
# an unadorned character if the accented version sorts before the unaccented
|
||||
exceptions = {
|
||||
u'Ä':u'A',
|
||||
u'Ö':u'O',
|
||||
u'Ü':u'U'
|
||||
}
|
||||
|
||||
if key is not None:
|
||||
sort_field = key
|
||||
|
||||
cl_list = [None] * len(item_list)
|
||||
last_ordnum = 0
|
||||
|
||||
for idx, item in enumerate(item_list):
|
||||
if key:
|
||||
c = item[sort_field]
|
||||
else:
|
||||
c = item
|
||||
|
||||
ordnum, ordlen = collation_order(c)
|
||||
if last_ordnum != ordnum:
|
||||
last_c = icu_upper(c[0:ordlen])
|
||||
if last_c in exceptions.keys():
|
||||
last_c = exceptions[unicode(last_c)]
|
||||
last_ordnum = ordnum
|
||||
cl_list[idx] = last_c
|
||||
|
||||
if False:
|
||||
if key:
|
||||
for idx, item in enumerate(item_list):
|
||||
print("%s %s" % (cl_list[idx],item[sort_field]))
|
||||
else:
|
||||
print("%s %s" % (cl_list[0], item))
|
||||
|
||||
return cl_list
|
||||
|
||||
def filterDbTags(self, tags):
|
||||
# Remove the special marker tags from the database's tag list,
|
||||
# return sorted list of normalized genre tags
|
||||
@ -3252,17 +3342,22 @@ Author '{0}':
|
||||
|
||||
def formatPrefix(self,prefix_char,soup):
|
||||
# Generate the HTML for the prefix portion of the listing
|
||||
spanTag = Tag(soup, "span")
|
||||
if prefix_char is None:
|
||||
spanTag['style'] = "color:white"
|
||||
spanTag.insert(0,NavigableString(self.defaultPrefix))
|
||||
# 2e3a is 'two-em dash', which matches width in Kindle Previewer
|
||||
# too wide in calibre viewer
|
||||
# minimal visual distraction
|
||||
# spanTag.insert(0,NavigableString(u'\u2e3a'))
|
||||
# Kindle Previewer doesn't properly handle style=color:white
|
||||
# MOBI does a better job allocating blank space with <code>
|
||||
if self.opts.fmt == 'mobi':
|
||||
codeTag = Tag(soup, "code")
|
||||
if prefix_char is None:
|
||||
codeTag.insert(0,NavigableString(' '))
|
||||
else:
|
||||
codeTag.insert(0,NavigableString(prefix_char))
|
||||
return codeTag
|
||||
else:
|
||||
spanTag = Tag(soup, "span")
|
||||
if prefix_char is None:
|
||||
spanTag['style'] = "color:white"
|
||||
prefix_char = self.defaultPrefix
|
||||
spanTag.insert(0,NavigableString(prefix_char))
|
||||
return spanTag
|
||||
return spanTag
|
||||
|
||||
def generateAuthorAnchor(self, author):
|
||||
# Strip white space to ''
|
||||
@ -3453,11 +3548,11 @@ Author '{0}':
|
||||
author = book['author']
|
||||
|
||||
if book['prefix']:
|
||||
author_prefix = book['prefix'] + " by "
|
||||
author_prefix = book['prefix'] + ' ' + _("by ")
|
||||
elif self.opts.connected_kindle and book['id'] in self.bookmarked_books:
|
||||
author_prefix = self.READING_SYMBOL + " by "
|
||||
author_prefix = self.READING_SYMBOL + ' ' + _("by ")
|
||||
else:
|
||||
author_prefix = "by "
|
||||
author_prefix = _("by ")
|
||||
|
||||
# Genres
|
||||
genres = ''
|
||||
@ -3546,8 +3641,12 @@ Author '{0}':
|
||||
if aTag:
|
||||
if book['series']:
|
||||
if self.opts.generate_series:
|
||||
aTag['href'] = "%s.html#%s_series" % ('BySeries',
|
||||
if self.letter_or_symbol(book['series']):
|
||||
aTag['href'] = "%s.html#symbol_%s_series" % ('BySeries',
|
||||
re.sub('\W','',book['series']).lower())
|
||||
else:
|
||||
aTag['href'] = "%s.html#%s_series" % ('BySeries',
|
||||
re.sub('\s','',book['series']).lower())
|
||||
else:
|
||||
aTag.extract()
|
||||
|
||||
@ -4040,7 +4139,7 @@ Author '{0}':
|
||||
re.IGNORECASE) is not None:
|
||||
if self.opts.verbose:
|
||||
field_md = self.db.metadata_for_field(field)
|
||||
self.opts.log.info(" - %s (Exclusion rule '%s': %s:%s)" %
|
||||
self.opts.log.info(" - %s (Exclusion rule '%s': %s:%s)" %
|
||||
(record['title'], field_md['name'], field,pat))
|
||||
exclusion_set.append(record)
|
||||
if record in filtered_data_set:
|
||||
|
@ -165,7 +165,7 @@ List the books available in the calibre database.
|
||||
|
||||
def command_list(args, dbpath):
|
||||
pre = get_parser('')
|
||||
pargs = [x for x in args if x in ('--with-library', '--library-path')
|
||||
pargs = [x for x in args if x.startswith('--with-library') or x.startswith('--library-path')
|
||||
or not x.startswith('-')]
|
||||
opts = pre.parse_args(sys.argv[:1] + pargs)[0]
|
||||
db = get_db(dbpath, opts)
|
||||
|
@ -100,6 +100,15 @@ def test_icu():
|
||||
raise RuntimeError('ICU module not loaded/valid')
|
||||
print ('ICU OK!')
|
||||
|
||||
def test_wpd():
|
||||
wpd = plugins['wpd'][0]
|
||||
try:
|
||||
wpd.init()
|
||||
except wpd.NoWPD:
|
||||
print ('This computer does not have WPD')
|
||||
else:
|
||||
wpd.uninit()
|
||||
|
||||
def test():
|
||||
test_plugins()
|
||||
test_lxml()
|
||||
@ -112,6 +121,7 @@ def test():
|
||||
if iswindows:
|
||||
test_win32()
|
||||
test_winutil()
|
||||
test_wpd()
|
||||
|
||||
if __name__ == '__main__':
|
||||
test()
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
20661
src/calibre/translations/jv.po
Normal file
20661
src/calibre/translations/jv.po
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user