Merge from trunk

This commit is contained in:
Charles Haley 2011-07-23 16:46:21 +01:00
commit b02430f091
4 changed files with 50 additions and 6 deletions

View File

@ -128,7 +128,7 @@ class ANDROID(USBMS):
'7', 'A956', 'A955', 'A43', 'ANDROID_PLATFORM', 'TEGRA_2', '7', 'A956', 'A955', 'A43', 'ANDROID_PLATFORM', 'TEGRA_2',
'MB860', 'MULTI-CARD', 'MID7015A', 'INCREDIBLE', 'A7EB', 'STREAK', 'MB860', 'MULTI-CARD', 'MID7015A', 'INCREDIBLE', 'A7EB', 'STREAK',
'MB525', 'ANDROID2.3', 'SGH-I997', 'GT-I5800_CARD', 'MB612', 'MB525', 'ANDROID2.3', 'SGH-I997', 'GT-I5800_CARD', 'MB612',
'GT-S5830_CARD'] 'GT-S5830_CARD', 'GT-S5570_CARD']
WINDOWS_CARD_A_MEM = ['ANDROID_PHONE', 'GT-I9000_CARD', 'SGH-I897', WINDOWS_CARD_A_MEM = ['ANDROID_PHONE', 'GT-I9000_CARD', 'SGH-I897',
'FILE-STOR_GADGET', 'SGH-T959', 'SAMSUNG_ANDROID', 'GT-P1000_CARD', 'FILE-STOR_GADGET', 'SGH-T959', 'SAMSUNG_ANDROID', 'GT-P1000_CARD',
'A70S', 'A101IT', '7', 'INCREDIBLE', 'A7EB', 'SGH-T849_CARD', 'A70S', 'A101IT', '7', 'INCREDIBLE', 'A7EB', 'SGH-T849_CARD',

View File

@ -638,7 +638,7 @@ class IndexEntry(object): # {{{
self.tags.append(Tag(aut_tag[0], [val], self.entry_type, self.tags.append(Tag(aut_tag[0], [val], self.entry_type,
cncx)) cncx))
if raw.replace(b'\x00', b''): if raw.replace(b'\x00', b''): # There can be padding null bytes
raise ValueError('Extra bytes in INDX table entry %d: %r'%(self.index, raw)) raise ValueError('Extra bytes in INDX table entry %d: %r'%(self.index, raw))
@property @property
@ -736,6 +736,9 @@ class IndexRecord(object): # {{{
for i in range(self.idxt_count): for i in range(self.idxt_count):
off, = u(b'>H', indices[i*2:(i+1)*2]) off, = u(b'>H', indices[i*2:(i+1)*2])
self.index_offsets.append(off-192) self.index_offsets.append(off-192)
rest = indices[(i+1)*2:]
if rest.replace(b'\0', ''): # There can be padding null bytes
raise ValueError('Extra bytes after IDXT table: %r'%rest)
indxt = raw[192:self.idxt_offset] indxt = raw[192:self.idxt_offset]
self.indices = [] self.indices = []
@ -772,7 +775,7 @@ class IndexRecord(object): # {{{
len(w), not bool(w.replace(b'\0', b'')) )) len(w), not bool(w.replace(b'\0', b'')) ))
a('Header length: %d'%self.header_length) a('Header length: %d'%self.header_length)
u(self.unknown1) u(self.unknown1)
a('Header Type: %d'%self.header_type) a('Unknown (header type? index record number? always 1?): %d'%self.header_type)
u(self.unknown2) u(self.unknown2)
a('IDXT Offset: %d'%self.idxt_offset) a('IDXT Offset: %d'%self.idxt_offset)
a('IDXT Count: %d'%self.idxt_count) a('IDXT Count: %d'%self.idxt_count)

View File

@ -173,10 +173,52 @@ class Indexer(object):
if self.is_periodical: if self.is_periodical:
indices = self.create_periodical_index() indices = self.create_periodical_index()
indices
else: else:
raise NotImplementedError() raise NotImplementedError()
self.records.append(self.create_index_record(indices))
def create_index_record(self, indices):
header_length = 192
buf = StringIO()
# Write index entries
offsets = []
for i in indices:
offsets.append(buf.tell())
buf.write(i.bytestring)
index_block = align_block(buf.getvalue())
# Write offsets to index entries as an IDXT block
idxt_block = b'IDXT'
buf.truncate(0)
for offset in offsets:
buf.write(pack(b'>H', header_length+offset))
idxt_block = align_block(idxt_block + buf.getvalue())
body = index_block + idxt_block
header = b'INDX'
buf.truncate(0)
buf.write(pack(b'>I', header_length))
buf.write(b'\0'*4) # Unknown
buf.write(pack(b'>I', 1)) # Header type? Or index record number?
buf.write(b'\0'*4) # Unknown
# IDXT block offset
buf.write(pack(b'>I', header_length + len(index_block)))
# Number of index entries
buf.write(pack(b'>I', len(offsets)))
# Unknown
buf.write(b'\xff'*8)
# Unknown
buf.write(b'\0'*156)
header += buf.getvalue()
ans = header + body
if len(ans) > 0x10000:
raise ValueError('Too many entries (%d) in the TOC'%len(offsets))
return ans
def create_periodical_index(self): # {{{ def create_periodical_index(self): # {{{
periodical_node = iter(self.oeb.toc).next() periodical_node = iter(self.oeb.toc).next()
periodical_node_offset = self.serializer.body_start_offset periodical_node_offset = self.serializer.body_start_offset

View File

@ -133,6 +133,7 @@ def render_data(mi, use_roman_numbers=True, all_fields=False):
authors = [] authors = []
formatter = EvalFormatter() formatter = EvalFormatter()
for aut in mi.authors: for aut in mi.authors:
link = ''
if mi.author_link_map[aut]: if mi.author_link_map[aut]:
link = mi.author_link_map[aut] link = mi.author_link_map[aut]
elif gprefs.get('default_author_link'): elif gprefs.get('default_author_link'):
@ -143,8 +144,6 @@ def render_data(mi, use_roman_numbers=True, all_fields=False):
vals['author_sort'] = aut.replace(' ', '+') vals['author_sort'] = aut.replace(' ', '+')
link = formatter.safe_format( link = formatter.safe_format(
gprefs.get('default_author_link'), vals, '', vals) gprefs.get('default_author_link'), vals, '', vals)
else:
link = ''
if link: if link:
link = prepare_string_for_xml(link) link = prepare_string_for_xml(link)
authors.append(u'<a href="%s">%s</a>'%(link, aut)) authors.append(u'<a href="%s">%s</a>'%(link, aut))