mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
New db: Port schema upgrades and fix various bugs with initializing a new database
This commit is contained in:
parent
8bc99c82f1
commit
a100068b59
@ -63,5 +63,4 @@ Various things that require other things before they can be migrated:
|
|||||||
columns/categories/searches info into
|
columns/categories/searches info into
|
||||||
self.field_metadata. Finally, implement metadata dirtied
|
self.field_metadata. Finally, implement metadata dirtied
|
||||||
functionality.
|
functionality.
|
||||||
2. Test Schema upgrades
|
|
||||||
'''
|
'''
|
||||||
|
@ -17,12 +17,13 @@ from calibre import isbytestring, force_unicode, prints
|
|||||||
from calibre.constants import (iswindows, filesystem_encoding,
|
from calibre.constants import (iswindows, filesystem_encoding,
|
||||||
preferred_encoding)
|
preferred_encoding)
|
||||||
from calibre.ptempfile import PersistentTemporaryFile
|
from calibre.ptempfile import PersistentTemporaryFile
|
||||||
from calibre.library.schema_upgrades import SchemaUpgrade
|
from calibre.db.schema_upgrades import SchemaUpgrade
|
||||||
from calibre.library.field_metadata import FieldMetadata
|
from calibre.library.field_metadata import FieldMetadata
|
||||||
from calibre.ebooks.metadata import title_sort, author_to_author_sort
|
from calibre.ebooks.metadata import title_sort, author_to_author_sort
|
||||||
from calibre.utils.icu import strcmp
|
from calibre.utils.icu import strcmp
|
||||||
from calibre.utils.config import to_json, from_json, prefs, tweaks
|
from calibre.utils.config import to_json, from_json, prefs, tweaks
|
||||||
from calibre.utils.date import utcfromtimestamp, parse_date
|
from calibre.utils.date import utcfromtimestamp, parse_date
|
||||||
|
from calibre.utils.filenames import is_case_sensitive
|
||||||
from calibre.db.tables import (OneToOneTable, ManyToOneTable, ManyToManyTable,
|
from calibre.db.tables import (OneToOneTable, ManyToOneTable, ManyToManyTable,
|
||||||
SizeTable, FormatsTable, AuthorsTable, IdentifiersTable)
|
SizeTable, FormatsTable, AuthorsTable, IdentifiersTable)
|
||||||
# }}}
|
# }}}
|
||||||
@ -30,8 +31,9 @@ from calibre.db.tables import (OneToOneTable, ManyToOneTable, ManyToManyTable,
|
|||||||
'''
|
'''
|
||||||
Differences in semantics from pysqlite:
|
Differences in semantics from pysqlite:
|
||||||
|
|
||||||
1. execute/executemany/executescript operate in autocommit mode
|
1. execute/executemany operate in autocommit mode
|
||||||
2. There is no fetchone() method on cursor objects, instead use next()
|
2. There is no fetchone() method on cursor objects, instead use next()
|
||||||
|
3. There is no executescript
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
@ -120,6 +122,66 @@ def icu_collator(s1, s2):
|
|||||||
return strcmp(force_unicode(s1, 'utf-8'), force_unicode(s2, 'utf-8'))
|
return strcmp(force_unicode(s1, 'utf-8'), force_unicode(s2, 'utf-8'))
|
||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
|
# Unused aggregators {{{
|
||||||
|
def Concatenate(sep=','):
|
||||||
|
'''String concatenation aggregator for sqlite'''
|
||||||
|
|
||||||
|
def step(ctxt, value):
|
||||||
|
if value is not None:
|
||||||
|
ctxt.append(value)
|
||||||
|
|
||||||
|
def finalize(ctxt):
|
||||||
|
if not ctxt:
|
||||||
|
return None
|
||||||
|
return sep.join(ctxt)
|
||||||
|
|
||||||
|
return ([], step, finalize)
|
||||||
|
|
||||||
|
def SortedConcatenate(sep=','):
|
||||||
|
'''String concatenation aggregator for sqlite, sorted by supplied index'''
|
||||||
|
|
||||||
|
def step(ctxt, ndx, value):
|
||||||
|
if value is not None:
|
||||||
|
ctxt[ndx] = value
|
||||||
|
|
||||||
|
def finalize(ctxt):
|
||||||
|
if len(ctxt) == 0:
|
||||||
|
return None
|
||||||
|
return sep.join(map(ctxt.get, sorted(ctxt.iterkeys())))
|
||||||
|
|
||||||
|
return ({}, step, finalize)
|
||||||
|
|
||||||
|
def IdentifiersConcat():
|
||||||
|
'''String concatenation aggregator for the identifiers map'''
|
||||||
|
|
||||||
|
def step(ctxt, key, val):
|
||||||
|
ctxt.append(u'%s:%s'%(key, val))
|
||||||
|
|
||||||
|
def finalize(ctxt):
|
||||||
|
return ','.join(ctxt)
|
||||||
|
|
||||||
|
return ([], step, finalize)
|
||||||
|
|
||||||
|
def AumSortedConcatenate():
|
||||||
|
'''String concatenation aggregator for the author sort map'''
|
||||||
|
|
||||||
|
def step(ctxt, ndx, author, sort, link):
|
||||||
|
if author is not None:
|
||||||
|
ctxt[ndx] = ':::'.join((author, sort, link))
|
||||||
|
|
||||||
|
def finalize(ctxt):
|
||||||
|
keys = list(ctxt.iterkeys())
|
||||||
|
l = len(keys)
|
||||||
|
if l == 0:
|
||||||
|
return None
|
||||||
|
if l == 1:
|
||||||
|
return ctxt[keys[0]]
|
||||||
|
return ':#:'.join([ctxt[v] for v in sorted(keys)])
|
||||||
|
|
||||||
|
return ({}, step, finalize)
|
||||||
|
|
||||||
|
# }}}
|
||||||
|
|
||||||
class Connection(apsw.Connection): # {{{
|
class Connection(apsw.Connection): # {{{
|
||||||
|
|
||||||
BUSY_TIMEOUT = 2000 # milliseconds
|
BUSY_TIMEOUT = 2000 # milliseconds
|
||||||
@ -145,6 +207,18 @@ class Connection(apsw.Connection): # {{{
|
|||||||
self.createscalarfunction('books_list_filter', lambda x: 1, 1)
|
self.createscalarfunction('books_list_filter', lambda x: 1, 1)
|
||||||
self.createcollation('icucollate', icu_collator)
|
self.createcollation('icucollate', icu_collator)
|
||||||
|
|
||||||
|
# Legacy aggregators (never used) but present for backwards compat
|
||||||
|
self.createaggregatefunction('sortconcat', SortedConcatenate, 2)
|
||||||
|
self.createaggregatefunction('sortconcat_bar',
|
||||||
|
partial(SortedConcatenate, sep='|'), 2)
|
||||||
|
self.createaggregatefunction('sortconcat_amper',
|
||||||
|
partial(SortedConcatenate, sep='&'), 2)
|
||||||
|
self.createaggregatefunction('identifiers_concat',
|
||||||
|
IdentifiersConcat, 2)
|
||||||
|
self.createaggregatefunction('concat', Concatenate, 1)
|
||||||
|
self.createaggregatefunction('aum_sortconcat',
|
||||||
|
AumSortedConcatenate, 4)
|
||||||
|
|
||||||
def create_dynamic_filter(self, name):
|
def create_dynamic_filter(self, name):
|
||||||
f = DynamicFilter(name)
|
f = DynamicFilter(name)
|
||||||
self.createscalarfunction(name, f, 1)
|
self.createscalarfunction(name, f, 1)
|
||||||
@ -153,7 +227,10 @@ class Connection(apsw.Connection): # {{{
|
|||||||
ans = self.cursor().execute(*args)
|
ans = self.cursor().execute(*args)
|
||||||
if kw.get('all', True):
|
if kw.get('all', True):
|
||||||
return ans.fetchall()
|
return ans.fetchall()
|
||||||
return ans.next()[0]
|
try:
|
||||||
|
return ans.next()[0]
|
||||||
|
except (StopIteration, IndexError):
|
||||||
|
return None
|
||||||
|
|
||||||
def execute(self, sql, bindings=None):
|
def execute(self, sql, bindings=None):
|
||||||
cursor = self.cursor()
|
cursor = self.cursor()
|
||||||
@ -162,14 +239,9 @@ class Connection(apsw.Connection): # {{{
|
|||||||
def executemany(self, sql, sequence_of_bindings):
|
def executemany(self, sql, sequence_of_bindings):
|
||||||
return self.cursor().executemany(sql, sequence_of_bindings)
|
return self.cursor().executemany(sql, sequence_of_bindings)
|
||||||
|
|
||||||
def executescript(self, sql):
|
|
||||||
with self:
|
|
||||||
# Use an explicit savepoint so that even if this is called
|
|
||||||
# while a transaction is active, it is atomic
|
|
||||||
return self.cursor().execute(sql)
|
|
||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
class DB(SchemaUpgrade):
|
class DB(object):
|
||||||
|
|
||||||
PATH_LIMIT = 40 if iswindows else 100
|
PATH_LIMIT = 40 if iswindows else 100
|
||||||
WINDOWS_LIBRARY_PATH_LIMIT = 75
|
WINDOWS_LIBRARY_PATH_LIMIT = 75
|
||||||
@ -213,25 +285,24 @@ class DB(SchemaUpgrade):
|
|||||||
shutil.copyfile(self.dbpath, pt.name)
|
shutil.copyfile(self.dbpath, pt.name)
|
||||||
self.dbpath = pt.name
|
self.dbpath = pt.name
|
||||||
|
|
||||||
self.is_case_sensitive = (not iswindows and
|
if not os.path.exists(os.path.dirname(self.dbpath)):
|
||||||
not os.path.exists(self.dbpath.replace('metadata.db',
|
os.makedirs(os.path.dirname(self.dbpath))
|
||||||
'MeTAdAtA.dB')))
|
|
||||||
|
|
||||||
self._conn = None
|
self._conn = None
|
||||||
|
|
||||||
if self.user_version == 0:
|
if self.user_version == 0:
|
||||||
self.initialize_database()
|
self.initialize_database()
|
||||||
|
|
||||||
with self.conn:
|
if not os.path.exists(self.library_path):
|
||||||
SchemaUpgrade.__init__(self)
|
os.makedirs(self.library_path)
|
||||||
|
self.is_case_sensitive = is_case_sensitive(self.library_path)
|
||||||
|
|
||||||
|
SchemaUpgrade(self.conn, self.library_path, self.field_metadata)
|
||||||
|
|
||||||
# Guarantee that the library_id is set
|
# Guarantee that the library_id is set
|
||||||
self.library_id
|
self.library_id
|
||||||
|
|
||||||
self.initialize_prefs(default_prefs)
|
|
||||||
|
|
||||||
# Fix legacy triggers and columns
|
# Fix legacy triggers and columns
|
||||||
self.conn.executescript('''
|
self.conn.execute('''
|
||||||
DROP TRIGGER IF EXISTS author_insert_trg;
|
DROP TRIGGER IF EXISTS author_insert_trg;
|
||||||
CREATE TEMP TRIGGER author_insert_trg
|
CREATE TEMP TRIGGER author_insert_trg
|
||||||
AFTER INSERT ON authors
|
AFTER INSERT ON authors
|
||||||
@ -248,6 +319,7 @@ class DB(SchemaUpgrade):
|
|||||||
UPDATE authors SET sort=author_to_author_sort(name) WHERE sort IS NULL;
|
UPDATE authors SET sort=author_to_author_sort(name) WHERE sort IS NULL;
|
||||||
''')
|
''')
|
||||||
|
|
||||||
|
self.initialize_prefs(default_prefs)
|
||||||
self.initialize_custom_columns()
|
self.initialize_custom_columns()
|
||||||
self.initialize_tables()
|
self.initialize_tables()
|
||||||
|
|
||||||
@ -589,7 +661,14 @@ class DB(SchemaUpgrade):
|
|||||||
def initialize_database(self):
|
def initialize_database(self):
|
||||||
metadata_sqlite = P('metadata_sqlite.sql', data=True,
|
metadata_sqlite = P('metadata_sqlite.sql', data=True,
|
||||||
allow_user_override=False).decode('utf-8')
|
allow_user_override=False).decode('utf-8')
|
||||||
self.conn.executescript(metadata_sqlite)
|
cur = self.conn.cursor()
|
||||||
|
cur.execute('BEGIN EXCLUSIVE TRANSACTION')
|
||||||
|
try:
|
||||||
|
cur.execute(metadata_sqlite)
|
||||||
|
except:
|
||||||
|
cur.execute('ROLLBACK')
|
||||||
|
else:
|
||||||
|
cur.execute('COMMIT')
|
||||||
if self.user_version == 0:
|
if self.user_version == 0:
|
||||||
self.user_version = 1
|
self.user_version = 1
|
||||||
# }}}
|
# }}}
|
||||||
@ -629,7 +708,7 @@ class DB(SchemaUpgrade):
|
|||||||
self.conn.execute('''
|
self.conn.execute('''
|
||||||
DELETE FROM library_id;
|
DELETE FROM library_id;
|
||||||
INSERT INTO library_id (uuid) VALUES (?);
|
INSERT INTO library_id (uuid) VALUES (?);
|
||||||
''', self._library_id_)
|
''', (self._library_id_,))
|
||||||
|
|
||||||
return property(doc=doc, fget=fget, fset=fset)
|
return property(doc=doc, fget=fget, fset=fset)
|
||||||
|
|
||||||
|
618
src/calibre/db/schema_upgrades.py
Normal file
618
src/calibre/db/schema_upgrades.py
Normal file
@ -0,0 +1,618 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
|
||||||
|
from __future__ import (unicode_literals, division, absolute_import,
|
||||||
|
print_function)
|
||||||
|
|
||||||
|
__license__ = 'GPL v3'
|
||||||
|
__copyright__ = '2011, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||||
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
from calibre import prints
|
||||||
|
from calibre.utils.date import isoformat, DEFAULT_DATE
|
||||||
|
|
||||||
|
class SchemaUpgrade(object):
|
||||||
|
|
||||||
|
def __init__(self, conn, library_path, field_metadata):
|
||||||
|
conn.execute('BEGIN EXCLUSIVE TRANSACTION')
|
||||||
|
self.conn = conn
|
||||||
|
self.library_path = library_path
|
||||||
|
self.field_metadata = field_metadata
|
||||||
|
# Upgrade database
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
uv = self.conn.execute('pragma user_version').next()[0]
|
||||||
|
meth = getattr(self, 'upgrade_version_%d'%uv, None)
|
||||||
|
if meth is None:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
prints('Upgrading database to version %d...'%(uv+1))
|
||||||
|
meth()
|
||||||
|
self.conn.execute('pragma user_version=%d'%(uv+1))
|
||||||
|
except:
|
||||||
|
self.conn.execute('ROLLBACK')
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
self.conn.execute('COMMIT')
|
||||||
|
finally:
|
||||||
|
self.conn = self.field_metadata = None
|
||||||
|
|
||||||
|
def upgrade_version_1(self):
|
||||||
|
'''
|
||||||
|
Normalize indices.
|
||||||
|
'''
|
||||||
|
self.conn.execute('''\
|
||||||
|
DROP INDEX IF EXISTS authors_idx;
|
||||||
|
CREATE INDEX authors_idx ON books (author_sort COLLATE NOCASE, sort COLLATE NOCASE);
|
||||||
|
DROP INDEX IF EXISTS series_idx;
|
||||||
|
CREATE INDEX series_idx ON series (name COLLATE NOCASE);
|
||||||
|
DROP INDEX IF EXISTS series_sort_idx;
|
||||||
|
CREATE INDEX series_sort_idx ON books (series_index, id);
|
||||||
|
''')
|
||||||
|
|
||||||
|
def upgrade_version_2(self):
|
||||||
|
''' Fix Foreign key constraints for deleting from link tables. '''
|
||||||
|
script = '''\
|
||||||
|
DROP TRIGGER IF EXISTS fkc_delete_books_%(ltable)s_link;
|
||||||
|
CREATE TRIGGER fkc_delete_on_%(table)s
|
||||||
|
BEFORE DELETE ON %(table)s
|
||||||
|
BEGIN
|
||||||
|
SELECT CASE
|
||||||
|
WHEN (SELECT COUNT(id) FROM books_%(ltable)s_link WHERE %(ltable_col)s=OLD.id) > 0
|
||||||
|
THEN RAISE(ABORT, 'Foreign key violation: %(table)s is still referenced')
|
||||||
|
END;
|
||||||
|
END;
|
||||||
|
DELETE FROM %(table)s WHERE (SELECT COUNT(id) FROM books_%(ltable)s_link WHERE %(ltable_col)s=%(table)s.id) < 1;
|
||||||
|
'''
|
||||||
|
self.conn.execute(script%dict(ltable='authors', table='authors', ltable_col='author'))
|
||||||
|
self.conn.execute(script%dict(ltable='publishers', table='publishers', ltable_col='publisher'))
|
||||||
|
self.conn.execute(script%dict(ltable='tags', table='tags', ltable_col='tag'))
|
||||||
|
self.conn.execute(script%dict(ltable='series', table='series', ltable_col='series'))
|
||||||
|
|
||||||
|
def upgrade_version_3(self):
|
||||||
|
' Add path to result cache '
|
||||||
|
self.conn.execute('''
|
||||||
|
DROP VIEW IF EXISTS meta;
|
||||||
|
CREATE VIEW meta AS
|
||||||
|
SELECT id, title,
|
||||||
|
(SELECT concat(name) FROM authors WHERE authors.id IN (SELECT author from books_authors_link WHERE book=books.id)) authors,
|
||||||
|
(SELECT name FROM publishers WHERE publishers.id IN (SELECT publisher from books_publishers_link WHERE book=books.id)) publisher,
|
||||||
|
(SELECT rating FROM ratings WHERE ratings.id IN (SELECT rating from books_ratings_link WHERE book=books.id)) rating,
|
||||||
|
timestamp,
|
||||||
|
(SELECT MAX(uncompressed_size) FROM data WHERE book=books.id) size,
|
||||||
|
(SELECT concat(name) FROM tags WHERE tags.id IN (SELECT tag from books_tags_link WHERE book=books.id)) tags,
|
||||||
|
(SELECT text FROM comments WHERE book=books.id) comments,
|
||||||
|
(SELECT name FROM series WHERE series.id IN (SELECT series FROM books_series_link WHERE book=books.id)) series,
|
||||||
|
series_index,
|
||||||
|
sort,
|
||||||
|
author_sort,
|
||||||
|
(SELECT concat(format) FROM data WHERE data.book=books.id) formats,
|
||||||
|
isbn,
|
||||||
|
path
|
||||||
|
FROM books;
|
||||||
|
''')
|
||||||
|
|
||||||
|
def upgrade_version_4(self):
|
||||||
|
'Rationalize books table'
|
||||||
|
self.conn.execute('''
|
||||||
|
CREATE TEMPORARY TABLE
|
||||||
|
books_backup(id,title,sort,timestamp,series_index,author_sort,isbn,path);
|
||||||
|
INSERT INTO books_backup SELECT id,title,sort,timestamp,series_index,author_sort,isbn,path FROM books;
|
||||||
|
DROP TABLE books;
|
||||||
|
CREATE TABLE books ( id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
title TEXT NOT NULL DEFAULT 'Unknown' COLLATE NOCASE,
|
||||||
|
sort TEXT COLLATE NOCASE,
|
||||||
|
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
pubdate TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
series_index REAL NOT NULL DEFAULT 1.0,
|
||||||
|
author_sort TEXT COLLATE NOCASE,
|
||||||
|
isbn TEXT DEFAULT "" COLLATE NOCASE,
|
||||||
|
lccn TEXT DEFAULT "" COLLATE NOCASE,
|
||||||
|
path TEXT NOT NULL DEFAULT "",
|
||||||
|
flags INTEGER NOT NULL DEFAULT 1
|
||||||
|
);
|
||||||
|
INSERT INTO
|
||||||
|
books (id,title,sort,timestamp,pubdate,series_index,author_sort,isbn,path)
|
||||||
|
SELECT id,title,sort,timestamp,timestamp,series_index,author_sort,isbn,path FROM books_backup;
|
||||||
|
DROP TABLE books_backup;
|
||||||
|
|
||||||
|
DROP VIEW IF EXISTS meta;
|
||||||
|
CREATE VIEW meta AS
|
||||||
|
SELECT id, title,
|
||||||
|
(SELECT concat(name) FROM authors WHERE authors.id IN (SELECT author from books_authors_link WHERE book=books.id)) authors,
|
||||||
|
(SELECT name FROM publishers WHERE publishers.id IN (SELECT publisher from books_publishers_link WHERE book=books.id)) publisher,
|
||||||
|
(SELECT rating FROM ratings WHERE ratings.id IN (SELECT rating from books_ratings_link WHERE book=books.id)) rating,
|
||||||
|
timestamp,
|
||||||
|
(SELECT MAX(uncompressed_size) FROM data WHERE book=books.id) size,
|
||||||
|
(SELECT concat(name) FROM tags WHERE tags.id IN (SELECT tag from books_tags_link WHERE book=books.id)) tags,
|
||||||
|
(SELECT text FROM comments WHERE book=books.id) comments,
|
||||||
|
(SELECT name FROM series WHERE series.id IN (SELECT series FROM books_series_link WHERE book=books.id)) series,
|
||||||
|
series_index,
|
||||||
|
sort,
|
||||||
|
author_sort,
|
||||||
|
(SELECT concat(format) FROM data WHERE data.book=books.id) formats,
|
||||||
|
isbn,
|
||||||
|
path,
|
||||||
|
lccn,
|
||||||
|
pubdate,
|
||||||
|
flags
|
||||||
|
FROM books;
|
||||||
|
''')
|
||||||
|
|
||||||
|
def upgrade_version_5(self):
|
||||||
|
'Update indexes/triggers for new books table'
|
||||||
|
self.conn.execute('''
|
||||||
|
CREATE INDEX authors_idx ON books (author_sort COLLATE NOCASE);
|
||||||
|
CREATE INDEX books_idx ON books (sort COLLATE NOCASE);
|
||||||
|
CREATE TRIGGER books_delete_trg
|
||||||
|
AFTER DELETE ON books
|
||||||
|
BEGIN
|
||||||
|
DELETE FROM books_authors_link WHERE book=OLD.id;
|
||||||
|
DELETE FROM books_publishers_link WHERE book=OLD.id;
|
||||||
|
DELETE FROM books_ratings_link WHERE book=OLD.id;
|
||||||
|
DELETE FROM books_series_link WHERE book=OLD.id;
|
||||||
|
DELETE FROM books_tags_link WHERE book=OLD.id;
|
||||||
|
DELETE FROM data WHERE book=OLD.id;
|
||||||
|
DELETE FROM comments WHERE book=OLD.id;
|
||||||
|
DELETE FROM conversion_options WHERE book=OLD.id;
|
||||||
|
END;
|
||||||
|
CREATE TRIGGER books_insert_trg
|
||||||
|
AFTER INSERT ON books
|
||||||
|
BEGIN
|
||||||
|
UPDATE books SET sort=title_sort(NEW.title) WHERE id=NEW.id;
|
||||||
|
END;
|
||||||
|
CREATE TRIGGER books_update_trg
|
||||||
|
AFTER UPDATE ON books
|
||||||
|
BEGIN
|
||||||
|
UPDATE books SET sort=title_sort(NEW.title) WHERE id=NEW.id;
|
||||||
|
END;
|
||||||
|
|
||||||
|
UPDATE books SET sort=title_sort(title) WHERE sort IS NULL;
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade_version_6(self):
|
||||||
|
'Show authors in order'
|
||||||
|
self.conn.execute('''
|
||||||
|
DROP VIEW IF EXISTS meta;
|
||||||
|
CREATE VIEW meta AS
|
||||||
|
SELECT id, title,
|
||||||
|
(SELECT sortconcat(bal.id, name) FROM books_authors_link AS bal JOIN authors ON(author = authors.id) WHERE book = books.id) authors,
|
||||||
|
(SELECT name FROM publishers WHERE publishers.id IN (SELECT publisher from books_publishers_link WHERE book=books.id)) publisher,
|
||||||
|
(SELECT rating FROM ratings WHERE ratings.id IN (SELECT rating from books_ratings_link WHERE book=books.id)) rating,
|
||||||
|
timestamp,
|
||||||
|
(SELECT MAX(uncompressed_size) FROM data WHERE book=books.id) size,
|
||||||
|
(SELECT concat(name) FROM tags WHERE tags.id IN (SELECT tag from books_tags_link WHERE book=books.id)) tags,
|
||||||
|
(SELECT text FROM comments WHERE book=books.id) comments,
|
||||||
|
(SELECT name FROM series WHERE series.id IN (SELECT series FROM books_series_link WHERE book=books.id)) series,
|
||||||
|
series_index,
|
||||||
|
sort,
|
||||||
|
author_sort,
|
||||||
|
(SELECT concat(format) FROM data WHERE data.book=books.id) formats,
|
||||||
|
isbn,
|
||||||
|
path,
|
||||||
|
lccn,
|
||||||
|
pubdate,
|
||||||
|
flags
|
||||||
|
FROM books;
|
||||||
|
''')
|
||||||
|
|
||||||
|
def upgrade_version_7(self):
|
||||||
|
'Add uuid column'
|
||||||
|
self.conn.execute('''
|
||||||
|
ALTER TABLE books ADD COLUMN uuid TEXT;
|
||||||
|
DROP TRIGGER IF EXISTS books_insert_trg;
|
||||||
|
DROP TRIGGER IF EXISTS books_update_trg;
|
||||||
|
UPDATE books SET uuid=uuid4();
|
||||||
|
|
||||||
|
CREATE TRIGGER books_insert_trg AFTER INSERT ON books
|
||||||
|
BEGIN
|
||||||
|
UPDATE books SET sort=title_sort(NEW.title),uuid=uuid4() WHERE id=NEW.id;
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TRIGGER books_update_trg AFTER UPDATE ON books
|
||||||
|
BEGIN
|
||||||
|
UPDATE books SET sort=title_sort(NEW.title) WHERE id=NEW.id;
|
||||||
|
END;
|
||||||
|
|
||||||
|
DROP VIEW IF EXISTS meta;
|
||||||
|
CREATE VIEW meta AS
|
||||||
|
SELECT id, title,
|
||||||
|
(SELECT sortconcat(bal.id, name) FROM books_authors_link AS bal JOIN authors ON(author = authors.id) WHERE book = books.id) authors,
|
||||||
|
(SELECT name FROM publishers WHERE publishers.id IN (SELECT publisher from books_publishers_link WHERE book=books.id)) publisher,
|
||||||
|
(SELECT rating FROM ratings WHERE ratings.id IN (SELECT rating from books_ratings_link WHERE book=books.id)) rating,
|
||||||
|
timestamp,
|
||||||
|
(SELECT MAX(uncompressed_size) FROM data WHERE book=books.id) size,
|
||||||
|
(SELECT concat(name) FROM tags WHERE tags.id IN (SELECT tag from books_tags_link WHERE book=books.id)) tags,
|
||||||
|
(SELECT text FROM comments WHERE book=books.id) comments,
|
||||||
|
(SELECT name FROM series WHERE series.id IN (SELECT series FROM books_series_link WHERE book=books.id)) series,
|
||||||
|
series_index,
|
||||||
|
sort,
|
||||||
|
author_sort,
|
||||||
|
(SELECT concat(format) FROM data WHERE data.book=books.id) formats,
|
||||||
|
isbn,
|
||||||
|
path,
|
||||||
|
lccn,
|
||||||
|
pubdate,
|
||||||
|
flags,
|
||||||
|
uuid
|
||||||
|
FROM books;
|
||||||
|
''')
|
||||||
|
|
||||||
|
def upgrade_version_8(self):
|
||||||
|
'Add Tag Browser views'
|
||||||
|
def create_tag_browser_view(table_name, column_name):
|
||||||
|
self.conn.execute('''
|
||||||
|
DROP VIEW IF EXISTS tag_browser_{tn};
|
||||||
|
CREATE VIEW tag_browser_{tn} AS SELECT
|
||||||
|
id,
|
||||||
|
name,
|
||||||
|
(SELECT COUNT(id) FROM books_{tn}_link WHERE {cn}={tn}.id) count
|
||||||
|
FROM {tn};
|
||||||
|
'''.format(tn=table_name, cn=column_name))
|
||||||
|
|
||||||
|
for tn in ('authors', 'tags', 'publishers', 'series'):
|
||||||
|
cn = tn[:-1]
|
||||||
|
if tn == 'series':
|
||||||
|
cn = tn
|
||||||
|
create_tag_browser_view(tn, cn)
|
||||||
|
|
||||||
|
def upgrade_version_9(self):
|
||||||
|
'Add custom columns'
|
||||||
|
self.conn.execute('''
|
||||||
|
CREATE TABLE custom_columns (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
label TEXT NOT NULL,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
datatype TEXT NOT NULL,
|
||||||
|
mark_for_delete BOOL DEFAULT 0 NOT NULL,
|
||||||
|
editable BOOL DEFAULT 1 NOT NULL,
|
||||||
|
display TEXT DEFAULT "{}" NOT NULL,
|
||||||
|
is_multiple BOOL DEFAULT 0 NOT NULL,
|
||||||
|
normalized BOOL NOT NULL,
|
||||||
|
UNIQUE(label)
|
||||||
|
);
|
||||||
|
CREATE INDEX IF NOT EXISTS custom_columns_idx ON custom_columns (label);
|
||||||
|
CREATE INDEX IF NOT EXISTS formats_idx ON data (format);
|
||||||
|
''')
|
||||||
|
|
||||||
|
def upgrade_version_10(self):
|
||||||
|
'Add restricted Tag Browser views'
|
||||||
|
def create_tag_browser_view(table_name, column_name, view_column_name):
|
||||||
|
script = ('''
|
||||||
|
DROP VIEW IF EXISTS tag_browser_{tn};
|
||||||
|
CREATE VIEW tag_browser_{tn} AS SELECT
|
||||||
|
id,
|
||||||
|
{vcn},
|
||||||
|
(SELECT COUNT(id) FROM books_{tn}_link WHERE {cn}={tn}.id) count
|
||||||
|
FROM {tn};
|
||||||
|
DROP VIEW IF EXISTS tag_browser_filtered_{tn};
|
||||||
|
CREATE VIEW tag_browser_filtered_{tn} AS SELECT
|
||||||
|
id,
|
||||||
|
{vcn},
|
||||||
|
(SELECT COUNT(books_{tn}_link.id) FROM books_{tn}_link WHERE
|
||||||
|
{cn}={tn}.id AND books_list_filter(book)) count
|
||||||
|
FROM {tn};
|
||||||
|
'''.format(tn=table_name, cn=column_name, vcn=view_column_name))
|
||||||
|
self.conn.execute(script)
|
||||||
|
|
||||||
|
for field in self.field_metadata.itervalues():
|
||||||
|
if field['is_category'] and not field['is_custom'] and 'link_column' in field:
|
||||||
|
table = self.conn.get(
|
||||||
|
'SELECT name FROM sqlite_master WHERE type="table" AND name=?',
|
||||||
|
('books_%s_link'%field['table'],), all=False)
|
||||||
|
if table is not None:
|
||||||
|
create_tag_browser_view(field['table'], field['link_column'], field['column'])
|
||||||
|
|
||||||
|
def upgrade_version_11(self):
|
||||||
|
'Add average rating to tag browser views'
|
||||||
|
def create_std_tag_browser_view(table_name, column_name,
|
||||||
|
view_column_name, sort_column_name):
|
||||||
|
script = ('''
|
||||||
|
DROP VIEW IF EXISTS tag_browser_{tn};
|
||||||
|
CREATE VIEW tag_browser_{tn} AS SELECT
|
||||||
|
id,
|
||||||
|
{vcn},
|
||||||
|
(SELECT COUNT(id) FROM books_{tn}_link WHERE {cn}={tn}.id) count,
|
||||||
|
(SELECT AVG(ratings.rating)
|
||||||
|
FROM books_{tn}_link AS tl, books_ratings_link AS bl, ratings
|
||||||
|
WHERE tl.{cn}={tn}.id AND bl.book=tl.book AND
|
||||||
|
ratings.id = bl.rating AND ratings.rating <> 0) avg_rating,
|
||||||
|
{scn} AS sort
|
||||||
|
FROM {tn};
|
||||||
|
DROP VIEW IF EXISTS tag_browser_filtered_{tn};
|
||||||
|
CREATE VIEW tag_browser_filtered_{tn} AS SELECT
|
||||||
|
id,
|
||||||
|
{vcn},
|
||||||
|
(SELECT COUNT(books_{tn}_link.id) FROM books_{tn}_link WHERE
|
||||||
|
{cn}={tn}.id AND books_list_filter(book)) count,
|
||||||
|
(SELECT AVG(ratings.rating)
|
||||||
|
FROM books_{tn}_link AS tl, books_ratings_link AS bl, ratings
|
||||||
|
WHERE tl.{cn}={tn}.id AND bl.book=tl.book AND
|
||||||
|
ratings.id = bl.rating AND ratings.rating <> 0 AND
|
||||||
|
books_list_filter(bl.book)) avg_rating,
|
||||||
|
{scn} AS sort
|
||||||
|
FROM {tn};
|
||||||
|
|
||||||
|
'''.format(tn=table_name, cn=column_name,
|
||||||
|
vcn=view_column_name, scn= sort_column_name))
|
||||||
|
self.conn.execute(script)
|
||||||
|
|
||||||
|
def create_cust_tag_browser_view(table_name, link_table_name):
|
||||||
|
script = '''
|
||||||
|
DROP VIEW IF EXISTS tag_browser_{table};
|
||||||
|
CREATE VIEW tag_browser_{table} AS SELECT
|
||||||
|
id,
|
||||||
|
value,
|
||||||
|
(SELECT COUNT(id) FROM {lt} WHERE value={table}.id) count,
|
||||||
|
(SELECT AVG(r.rating)
|
||||||
|
FROM {lt},
|
||||||
|
books_ratings_link AS bl,
|
||||||
|
ratings AS r
|
||||||
|
WHERE {lt}.value={table}.id AND bl.book={lt}.book AND
|
||||||
|
r.id = bl.rating AND r.rating <> 0) avg_rating,
|
||||||
|
value AS sort
|
||||||
|
FROM {table};
|
||||||
|
|
||||||
|
DROP VIEW IF EXISTS tag_browser_filtered_{table};
|
||||||
|
CREATE VIEW tag_browser_filtered_{table} AS SELECT
|
||||||
|
id,
|
||||||
|
value,
|
||||||
|
(SELECT COUNT({lt}.id) FROM {lt} WHERE value={table}.id AND
|
||||||
|
books_list_filter(book)) count,
|
||||||
|
(SELECT AVG(r.rating)
|
||||||
|
FROM {lt},
|
||||||
|
books_ratings_link AS bl,
|
||||||
|
ratings AS r
|
||||||
|
WHERE {lt}.value={table}.id AND bl.book={lt}.book AND
|
||||||
|
r.id = bl.rating AND r.rating <> 0 AND
|
||||||
|
books_list_filter(bl.book)) avg_rating,
|
||||||
|
value AS sort
|
||||||
|
FROM {table};
|
||||||
|
'''.format(lt=link_table_name, table=table_name)
|
||||||
|
self.conn.execute(script)
|
||||||
|
|
||||||
|
for field in self.field_metadata.itervalues():
|
||||||
|
if field['is_category'] and not field['is_custom'] and 'link_column' in field:
|
||||||
|
table = self.conn.get(
|
||||||
|
'SELECT name FROM sqlite_master WHERE type="table" AND name=?',
|
||||||
|
('books_%s_link'%field['table'],), all=False)
|
||||||
|
if table is not None:
|
||||||
|
create_std_tag_browser_view(field['table'], field['link_column'],
|
||||||
|
field['column'], field['category_sort'])
|
||||||
|
|
||||||
|
db_tables = self.conn.get('''SELECT name FROM sqlite_master
|
||||||
|
WHERE type='table'
|
||||||
|
ORDER BY name''')
|
||||||
|
tables = []
|
||||||
|
for (table,) in db_tables:
|
||||||
|
tables.append(table)
|
||||||
|
for table in tables:
|
||||||
|
link_table = 'books_%s_link'%table
|
||||||
|
if table.startswith('custom_column_') and link_table in tables:
|
||||||
|
create_cust_tag_browser_view(table, link_table)
|
||||||
|
|
||||||
|
self.conn.execute('UPDATE authors SET sort=author_to_author_sort(name)')
|
||||||
|
|
||||||
|
def upgrade_version_12(self):
|
||||||
|
'DB based preference store'
|
||||||
|
script = '''
|
||||||
|
DROP TABLE IF EXISTS preferences;
|
||||||
|
CREATE TABLE preferences(id INTEGER PRIMARY KEY,
|
||||||
|
key TEXT NON NULL,
|
||||||
|
val TEXT NON NULL,
|
||||||
|
UNIQUE(key));
|
||||||
|
'''
|
||||||
|
self.conn.execute(script)
|
||||||
|
|
||||||
|
def upgrade_version_13(self):
|
||||||
|
'Dirtied table for OPF metadata backups'
|
||||||
|
script = '''
|
||||||
|
DROP TABLE IF EXISTS metadata_dirtied;
|
||||||
|
CREATE TABLE metadata_dirtied(id INTEGER PRIMARY KEY,
|
||||||
|
book INTEGER NOT NULL,
|
||||||
|
UNIQUE(book));
|
||||||
|
INSERT INTO metadata_dirtied (book) SELECT id FROM books;
|
||||||
|
'''
|
||||||
|
self.conn.execute(script)
|
||||||
|
|
||||||
|
def upgrade_version_14(self):
|
||||||
|
'Cache has_cover'
|
||||||
|
self.conn.execute('ALTER TABLE books ADD COLUMN has_cover BOOL DEFAULT 0')
|
||||||
|
data = self.conn.get('SELECT id,path FROM books', all=True)
|
||||||
|
def has_cover(path):
|
||||||
|
if path:
|
||||||
|
path = os.path.join(self.library_path, path.replace('/', os.sep),
|
||||||
|
'cover.jpg')
|
||||||
|
return os.path.exists(path)
|
||||||
|
return False
|
||||||
|
|
||||||
|
ids = [(x[0],) for x in data if has_cover(x[1])]
|
||||||
|
self.conn.executemany('UPDATE books SET has_cover=1 WHERE id=?', ids)
|
||||||
|
|
||||||
|
def upgrade_version_15(self):
|
||||||
|
'Remove commas from tags'
|
||||||
|
self.conn.execute("UPDATE OR IGNORE tags SET name=REPLACE(name, ',', ';')")
|
||||||
|
self.conn.execute("UPDATE OR IGNORE tags SET name=REPLACE(name, ',', ';;')")
|
||||||
|
self.conn.execute("UPDATE OR IGNORE tags SET name=REPLACE(name, ',', '')")
|
||||||
|
|
||||||
|
def upgrade_version_16(self):
|
||||||
|
self.conn.execute('''
|
||||||
|
DROP TRIGGER IF EXISTS books_update_trg;
|
||||||
|
CREATE TRIGGER books_update_trg
|
||||||
|
AFTER UPDATE ON books
|
||||||
|
BEGIN
|
||||||
|
UPDATE books SET sort=title_sort(NEW.title)
|
||||||
|
WHERE id=NEW.id AND OLD.title <> NEW.title;
|
||||||
|
END;
|
||||||
|
''')
|
||||||
|
|
||||||
|
def upgrade_version_17(self):
|
||||||
|
'custom book data table (for plugins)'
|
||||||
|
script = '''
|
||||||
|
DROP TABLE IF EXISTS books_plugin_data;
|
||||||
|
CREATE TABLE books_plugin_data(id INTEGER PRIMARY KEY,
|
||||||
|
book INTEGER NON NULL,
|
||||||
|
name TEXT NON NULL,
|
||||||
|
val TEXT NON NULL,
|
||||||
|
UNIQUE(book,name));
|
||||||
|
DROP TRIGGER IF EXISTS books_delete_trg;
|
||||||
|
CREATE TRIGGER books_delete_trg
|
||||||
|
AFTER DELETE ON books
|
||||||
|
BEGIN
|
||||||
|
DELETE FROM books_authors_link WHERE book=OLD.id;
|
||||||
|
DELETE FROM books_publishers_link WHERE book=OLD.id;
|
||||||
|
DELETE FROM books_ratings_link WHERE book=OLD.id;
|
||||||
|
DELETE FROM books_series_link WHERE book=OLD.id;
|
||||||
|
DELETE FROM books_tags_link WHERE book=OLD.id;
|
||||||
|
DELETE FROM data WHERE book=OLD.id;
|
||||||
|
DELETE FROM comments WHERE book=OLD.id;
|
||||||
|
DELETE FROM conversion_options WHERE book=OLD.id;
|
||||||
|
DELETE FROM books_plugin_data WHERE book=OLD.id;
|
||||||
|
END;
|
||||||
|
'''
|
||||||
|
self.conn.execute(script)
|
||||||
|
|
||||||
|
def upgrade_version_18(self):
|
||||||
|
'''
|
||||||
|
Add a library UUID.
|
||||||
|
Add an identifiers table.
|
||||||
|
Add a languages table.
|
||||||
|
Add a last_modified column.
|
||||||
|
NOTE: You cannot downgrade after this update, if you do
|
||||||
|
any changes you make to book isbns will be lost.
|
||||||
|
'''
|
||||||
|
script = '''
|
||||||
|
DROP TABLE IF EXISTS library_id;
|
||||||
|
CREATE TABLE library_id ( id INTEGER PRIMARY KEY,
|
||||||
|
uuid TEXT NOT NULL,
|
||||||
|
UNIQUE(uuid)
|
||||||
|
);
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS identifiers;
|
||||||
|
CREATE TABLE identifiers ( id INTEGER PRIMARY KEY,
|
||||||
|
book INTEGER NON NULL,
|
||||||
|
type TEXT NON NULL DEFAULT "isbn" COLLATE NOCASE,
|
||||||
|
val TEXT NON NULL COLLATE NOCASE,
|
||||||
|
UNIQUE(book, type)
|
||||||
|
);
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS languages;
|
||||||
|
CREATE TABLE languages ( id INTEGER PRIMARY KEY,
|
||||||
|
lang_code TEXT NON NULL COLLATE NOCASE,
|
||||||
|
UNIQUE(lang_code)
|
||||||
|
);
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS books_languages_link;
|
||||||
|
CREATE TABLE books_languages_link ( id INTEGER PRIMARY KEY,
|
||||||
|
book INTEGER NOT NULL,
|
||||||
|
lang_code INTEGER NOT NULL,
|
||||||
|
item_order INTEGER NOT NULL DEFAULT 0,
|
||||||
|
UNIQUE(book, lang_code)
|
||||||
|
);
|
||||||
|
|
||||||
|
DROP TRIGGER IF EXISTS fkc_delete_on_languages;
|
||||||
|
CREATE TRIGGER fkc_delete_on_languages
|
||||||
|
BEFORE DELETE ON languages
|
||||||
|
BEGIN
|
||||||
|
SELECT CASE
|
||||||
|
WHEN (SELECT COUNT(id) FROM books_languages_link WHERE lang_code=OLD.id) > 0
|
||||||
|
THEN RAISE(ABORT, 'Foreign key violation: language is still referenced')
|
||||||
|
END;
|
||||||
|
END;
|
||||||
|
|
||||||
|
DROP TRIGGER IF EXISTS fkc_delete_on_languages_link;
|
||||||
|
CREATE TRIGGER fkc_delete_on_languages_link
|
||||||
|
BEFORE INSERT ON books_languages_link
|
||||||
|
BEGIN
|
||||||
|
SELECT CASE
|
||||||
|
WHEN (SELECT id from books WHERE id=NEW.book) IS NULL
|
||||||
|
THEN RAISE(ABORT, 'Foreign key violation: book not in books')
|
||||||
|
WHEN (SELECT id from languages WHERE id=NEW.lang_code) IS NULL
|
||||||
|
THEN RAISE(ABORT, 'Foreign key violation: lang_code not in languages')
|
||||||
|
END;
|
||||||
|
END;
|
||||||
|
|
||||||
|
DROP TRIGGER IF EXISTS fkc_update_books_languages_link_a;
|
||||||
|
CREATE TRIGGER fkc_update_books_languages_link_a
|
||||||
|
BEFORE UPDATE OF book ON books_languages_link
|
||||||
|
BEGIN
|
||||||
|
SELECT CASE
|
||||||
|
WHEN (SELECT id from books WHERE id=NEW.book) IS NULL
|
||||||
|
THEN RAISE(ABORT, 'Foreign key violation: book not in books')
|
||||||
|
END;
|
||||||
|
END;
|
||||||
|
DROP TRIGGER IF EXISTS fkc_update_books_languages_link_b;
|
||||||
|
CREATE TRIGGER fkc_update_books_languages_link_b
|
||||||
|
BEFORE UPDATE OF lang_code ON books_languages_link
|
||||||
|
BEGIN
|
||||||
|
SELECT CASE
|
||||||
|
WHEN (SELECT id from languages WHERE id=NEW.lang_code) IS NULL
|
||||||
|
THEN RAISE(ABORT, 'Foreign key violation: lang_code not in languages')
|
||||||
|
END;
|
||||||
|
END;
|
||||||
|
|
||||||
|
DROP INDEX IF EXISTS books_languages_link_aidx;
|
||||||
|
CREATE INDEX books_languages_link_aidx ON books_languages_link (lang_code);
|
||||||
|
DROP INDEX IF EXISTS books_languages_link_bidx;
|
||||||
|
CREATE INDEX books_languages_link_bidx ON books_languages_link (book);
|
||||||
|
DROP INDEX IF EXISTS languages_idx;
|
||||||
|
CREATE INDEX languages_idx ON languages (lang_code COLLATE NOCASE);
|
||||||
|
|
||||||
|
DROP TRIGGER IF EXISTS books_delete_trg;
|
||||||
|
CREATE TRIGGER books_delete_trg
|
||||||
|
AFTER DELETE ON books
|
||||||
|
BEGIN
|
||||||
|
DELETE FROM books_authors_link WHERE book=OLD.id;
|
||||||
|
DELETE FROM books_publishers_link WHERE book=OLD.id;
|
||||||
|
DELETE FROM books_ratings_link WHERE book=OLD.id;
|
||||||
|
DELETE FROM books_series_link WHERE book=OLD.id;
|
||||||
|
DELETE FROM books_tags_link WHERE book=OLD.id;
|
||||||
|
DELETE FROM books_languages_link WHERE book=OLD.id;
|
||||||
|
DELETE FROM data WHERE book=OLD.id;
|
||||||
|
DELETE FROM comments WHERE book=OLD.id;
|
||||||
|
DELETE FROM conversion_options WHERE book=OLD.id;
|
||||||
|
DELETE FROM books_plugin_data WHERE book=OLD.id;
|
||||||
|
DELETE FROM identifiers WHERE book=OLD.id;
|
||||||
|
END;
|
||||||
|
|
||||||
|
INSERT INTO identifiers (book, val) SELECT id,isbn FROM books WHERE isbn;
|
||||||
|
|
||||||
|
ALTER TABLE books ADD COLUMN last_modified TIMESTAMP NOT NULL DEFAULT "%s";
|
||||||
|
|
||||||
|
'''%isoformat(DEFAULT_DATE, sep=' ')
|
||||||
|
# Sqlite does not support non constant default values in alter
|
||||||
|
# statements
|
||||||
|
self.conn.execute(script)
|
||||||
|
|
||||||
|
def upgrade_version_19(self):
|
||||||
|
recipes = self.conn.get('SELECT id,title,script FROM feeds')
|
||||||
|
if recipes:
|
||||||
|
from calibre.web.feeds.recipes import (custom_recipes,
|
||||||
|
custom_recipe_filename)
|
||||||
|
bdir = os.path.dirname(custom_recipes.file_path)
|
||||||
|
for id_, title, script in recipes:
|
||||||
|
existing = frozenset(map(int, custom_recipes.iterkeys()))
|
||||||
|
if id_ in existing:
|
||||||
|
id_ = max(existing) + 1000
|
||||||
|
id_ = str(id_)
|
||||||
|
fname = custom_recipe_filename(id_, title)
|
||||||
|
custom_recipes[id_] = (title, fname)
|
||||||
|
if isinstance(script, unicode):
|
||||||
|
script = script.encode('utf-8')
|
||||||
|
with open(os.path.join(bdir, fname), 'wb') as f:
|
||||||
|
f.write(script)
|
||||||
|
|
||||||
|
def upgrade_version_20(self):
|
||||||
|
'''
|
||||||
|
Add a link column to the authors table.
|
||||||
|
'''
|
||||||
|
|
||||||
|
script = '''
|
||||||
|
ALTER TABLE authors ADD COLUMN link TEXT NOT NULL DEFAULT "";
|
||||||
|
'''
|
||||||
|
self.conn.execute(script)
|
||||||
|
|
||||||
|
|
@ -121,7 +121,8 @@ def isoformat(date_time, assume_utc=False, as_utc=True, sep='T'):
|
|||||||
date_time = date_time.replace(tzinfo=_utc_tz if assume_utc else
|
date_time = date_time.replace(tzinfo=_utc_tz if assume_utc else
|
||||||
_local_tz)
|
_local_tz)
|
||||||
date_time = date_time.astimezone(_utc_tz if as_utc else _local_tz)
|
date_time = date_time.astimezone(_utc_tz if as_utc else _local_tz)
|
||||||
return unicode(date_time.isoformat(sep))
|
# str(sep) because isoformat barfs with unicode sep on python 2.x
|
||||||
|
return unicode(date_time.isoformat(str(sep)))
|
||||||
|
|
||||||
def as_local_time(date_time, assume_utc=True):
|
def as_local_time(date_time, assume_utc=True):
|
||||||
if not hasattr(date_time, 'tzinfo'):
|
if not hasattr(date_time, 'tzinfo'):
|
||||||
|
@ -93,3 +93,24 @@ def find_executable_in_path(name, path=None):
|
|||||||
q = os.path.abspath(os.path.join(x, name))
|
q = os.path.abspath(os.path.join(x, name))
|
||||||
if os.access(q, os.X_OK):
|
if os.access(q, os.X_OK):
|
||||||
return q
|
return q
|
||||||
|
|
||||||
|
def is_case_sensitive(path):
|
||||||
|
'''
|
||||||
|
Return True if the filesystem is case sensitive.
|
||||||
|
|
||||||
|
path must be the path to an existing directory. You must have permission
|
||||||
|
to create and delete files in this directory. The results of this test
|
||||||
|
apply to the filesystem containing the directory in path.
|
||||||
|
'''
|
||||||
|
is_case_sensitive = False
|
||||||
|
if not iswindows:
|
||||||
|
name1, name2 = ('calibre_test_case_sensitivity.txt',
|
||||||
|
'calibre_TesT_CaSe_sensitiVitY.Txt')
|
||||||
|
f1, f2 = os.path.join(path, name1), os.path.join(path, name2)
|
||||||
|
if os.path.exists(f1):
|
||||||
|
os.remove(f1)
|
||||||
|
open(f1, 'w').close()
|
||||||
|
is_case_sensitive = not os.path.exists(f2)
|
||||||
|
os.remove(f1)
|
||||||
|
return is_case_sensitive
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user