mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
Do not use detect_types in legacy sqlite code
This code is unused anywhere except tests. detect_types did not work with python3 so make the type co-ercion explicit. fixes a whole lot of failing tests on py3
This commit is contained in:
parent
4c7e194a92
commit
1529fb2f8b
@ -158,9 +158,14 @@ def force_to_bool(val):
|
|||||||
|
|
||||||
class CacheRow(list): # {{{
|
class CacheRow(list): # {{{
|
||||||
|
|
||||||
def __init__(self, db, composites, val, series_col, series_sort_col):
|
def __init__(self, db, composites, datetimes, val, series_col, series_sort_col):
|
||||||
|
from calibre.db.tables import c_parse
|
||||||
self.db = db
|
self.db = db
|
||||||
self._composites = composites
|
self._composites = composites
|
||||||
|
for num in datetimes:
|
||||||
|
val[num] = c_parse(val[num])
|
||||||
|
if val[num] is UNDEFINED_DATE:
|
||||||
|
val[num] = None
|
||||||
list.__init__(self, val)
|
list.__init__(self, val)
|
||||||
self._must_do = len(composites) > 0
|
self._must_do = len(composites) > 0
|
||||||
self._series_col = series_col
|
self._series_col = series_col
|
||||||
@ -216,10 +221,14 @@ class ResultCache(SearchQueryParser): # {{{
|
|||||||
self.FIELD_MAP = FIELD_MAP
|
self.FIELD_MAP = FIELD_MAP
|
||||||
self.db_prefs = db_prefs
|
self.db_prefs = db_prefs
|
||||||
self.composites = {}
|
self.composites = {}
|
||||||
|
self.datetimes = set()
|
||||||
self.udc = get_udc()
|
self.udc = get_udc()
|
||||||
for key in field_metadata:
|
for key in field_metadata:
|
||||||
if field_metadata[key]['datatype'] == 'composite':
|
dt = field_metadata[key]['datatype']
|
||||||
|
if dt == 'composite':
|
||||||
self.composites[field_metadata[key]['rec_index']] = key
|
self.composites[field_metadata[key]['rec_index']] = key
|
||||||
|
elif dt == 'datetime':
|
||||||
|
self.datetimes.add(field_metadata[key]['rec_index'])
|
||||||
self.series_col = field_metadata['series']['rec_index']
|
self.series_col = field_metadata['series']['rec_index']
|
||||||
self.series_sort_col = field_metadata['series_sort']['rec_index']
|
self.series_sort_col = field_metadata['series_sort']['rec_index']
|
||||||
self._data = []
|
self._data = []
|
||||||
@ -991,7 +1000,7 @@ class ResultCache(SearchQueryParser): # {{{
|
|||||||
'''
|
'''
|
||||||
for id in ids:
|
for id in ids:
|
||||||
try:
|
try:
|
||||||
self._data[id] = CacheRow(db, self.composites,
|
self._data[id] = CacheRow(db, self.composites, self.datetimes,
|
||||||
db.conn.get('SELECT * from meta2 WHERE id=?', (id,))[0],
|
db.conn.get('SELECT * from meta2 WHERE id=?', (id,))[0],
|
||||||
self.series_col, self.series_sort_col)
|
self.series_col, self.series_sort_col)
|
||||||
self._data[id].append(db.book_on_device_string(id))
|
self._data[id].append(db.book_on_device_string(id))
|
||||||
@ -1011,7 +1020,7 @@ class ResultCache(SearchQueryParser): # {{{
|
|||||||
return
|
return
|
||||||
self._data.extend(repeat(None, max(ids)-len(self._data)+2))
|
self._data.extend(repeat(None, max(ids)-len(self._data)+2))
|
||||||
for id in ids:
|
for id in ids:
|
||||||
self._data[id] = CacheRow(db, self.composites,
|
self._data[id] = CacheRow(db, self.composites, self.datetimes,
|
||||||
db.conn.get('SELECT * from meta2 WHERE id=?', (id,))[0],
|
db.conn.get('SELECT * from meta2 WHERE id=?', (id,))[0],
|
||||||
self.series_col, self.series_sort_col)
|
self.series_col, self.series_sort_col)
|
||||||
self._data[id].append(db.book_on_device_string(id))
|
self._data[id].append(db.book_on_device_string(id))
|
||||||
@ -1042,7 +1051,7 @@ class ResultCache(SearchQueryParser): # {{{
|
|||||||
temp = db.conn.get('SELECT * FROM meta2')
|
temp = db.conn.get('SELECT * FROM meta2')
|
||||||
self._data = list(repeat(None, temp[-1][0]+2)) if temp else []
|
self._data = list(repeat(None, temp[-1][0]+2)) if temp else []
|
||||||
for r in temp:
|
for r in temp:
|
||||||
self._data[r[0]] = CacheRow(db, self.composites, r,
|
self._data[r[0]] = CacheRow(db, self.composites, self.datetimes, r,
|
||||||
self.series_col, self.series_sort_col)
|
self.series_col, self.series_sort_col)
|
||||||
self._uuid_map[self._data[r[0]][self._uuid_column_index]] = r[0]
|
self._uuid_map[self._data[r[0]][self._uuid_column_index]] = r[0]
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@ from calibre.constants import preferred_encoding
|
|||||||
from calibre.library.field_metadata import FieldMetadata
|
from calibre.library.field_metadata import FieldMetadata
|
||||||
from calibre.utils.date import parse_date
|
from calibre.utils.date import parse_date
|
||||||
from calibre.utils.config import tweaks
|
from calibre.utils.config import tweaks
|
||||||
from polyglot.builtins import unicode_type
|
from polyglot.builtins import unicode_type, string_or_bytes
|
||||||
|
|
||||||
|
|
||||||
class CustomColumns(object):
|
class CustomColumns(object):
|
||||||
@ -71,11 +71,11 @@ class CustomColumns(object):
|
|||||||
'label':record[0],
|
'label':record[0],
|
||||||
'name':record[1],
|
'name':record[1],
|
||||||
'datatype':record[2],
|
'datatype':record[2],
|
||||||
'editable':record[3],
|
'editable':bool(record[3]),
|
||||||
'display':json.loads(record[4]),
|
'display':json.loads(record[4]),
|
||||||
'normalized':record[5],
|
'normalized':bool(record[5]),
|
||||||
'num':record[6],
|
'num':record[6],
|
||||||
'is_multiple':record[7],
|
'is_multiple':bool(record[7]),
|
||||||
}
|
}
|
||||||
if data['display'] is None:
|
if data['display'] is None:
|
||||||
data['display'] = {}
|
data['display'] = {}
|
||||||
@ -217,6 +217,11 @@ class CustomColumns(object):
|
|||||||
ans = ans.split(data['multiple_seps']['cache_to_list']) if ans else []
|
ans = ans.split(data['multiple_seps']['cache_to_list']) if ans else []
|
||||||
if data['display'].get('sort_alpha', False):
|
if data['display'].get('sort_alpha', False):
|
||||||
ans.sort(key=lambda x:x.lower())
|
ans.sort(key=lambda x:x.lower())
|
||||||
|
if data['datatype'] == 'datetime' and isinstance(ans, string_or_bytes):
|
||||||
|
from calibre.db.tables import c_parse, UNDEFINED_DATE
|
||||||
|
ans = c_parse(ans)
|
||||||
|
if ans is UNDEFINED_DATE:
|
||||||
|
ans = None
|
||||||
return ans
|
return ans
|
||||||
|
|
||||||
def get_custom_extra(self, idx, label=None, num=None, index_is_id=False):
|
def get_custom_extra(self, idx, label=None, num=None, index_is_id=False):
|
||||||
@ -244,6 +249,11 @@ class CustomColumns(object):
|
|||||||
ans = ans.split(data['multiple_seps']['cache_to_list']) if ans else []
|
ans = ans.split(data['multiple_seps']['cache_to_list']) if ans else []
|
||||||
if data['display'].get('sort_alpha', False):
|
if data['display'].get('sort_alpha', False):
|
||||||
ans.sort(key=lambda x: x.lower())
|
ans.sort(key=lambda x: x.lower())
|
||||||
|
if data['datatype'] == 'datetime' and isinstance(ans, string_or_bytes):
|
||||||
|
from calibre.db.tables import c_parse, UNDEFINED_DATE
|
||||||
|
ans = c_parse(ans)
|
||||||
|
if ans is UNDEFINED_DATE:
|
||||||
|
ans = None
|
||||||
if data['datatype'] != 'series':
|
if data['datatype'] != 'series':
|
||||||
return (ans, None)
|
return (ans, None)
|
||||||
ign,lt = self.custom_table_names(data['num'])
|
ign,lt = self.custom_table_names(data['num'])
|
||||||
|
@ -251,8 +251,7 @@ def load_c_extensions(conn, debug=DEBUG):
|
|||||||
|
|
||||||
|
|
||||||
def do_connect(path, row_factory=None):
|
def do_connect(path, row_factory=None):
|
||||||
conn = sqlite.connect(path, factory=Connection,
|
conn = sqlite.connect(path, factory=Connection)
|
||||||
detect_types=sqlite.PARSE_DECLTYPES|sqlite.PARSE_COLNAMES)
|
|
||||||
conn.execute('pragma cache_size=-5000')
|
conn.execute('pragma cache_size=-5000')
|
||||||
encoding = conn.execute('pragma encoding').fetchone()[0]
|
encoding = conn.execute('pragma encoding').fetchone()[0]
|
||||||
conn.create_aggregate('sortconcat', 2, SortedConcatenate)
|
conn.create_aggregate('sortconcat', 2, SortedConcatenate)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user