Merge from trunk

This commit is contained in:
Charles Haley 2011-01-25 22:17:46 +00:00
commit e15c23c11c
7 changed files with 27 additions and 11 deletions

View File

@ -390,7 +390,7 @@ class ChooseLibraryAction(InterfaceAction):
#self.dbref = weakref.ref(self.gui.library_view.model().db) #self.dbref = weakref.ref(self.gui.library_view.model().db)
#self.before_mem = memory()/1024**2 #self.before_mem = memory()/1024**2
self.gui.library_moved(loc) self.gui.library_moved(loc)
#QTimer.singleShot(1000, self.debug_leak) #QTimer.singleShot(5000, self.debug_leak)
def debug_leak(self): def debug_leak(self):
import gc import gc
@ -398,7 +398,7 @@ class ChooseLibraryAction(InterfaceAction):
ref = self.dbref ref = self.dbref
for i in xrange(3): gc.collect() for i in xrange(3): gc.collect()
if ref() is not None: if ref() is not None:
print 11111, ref() print 'DB object alive:', ref()
for r in gc.get_referrers(ref())[:10]: for r in gc.get_referrers(ref())[:10]:
print r print r
print print

View File

@ -43,7 +43,7 @@
<double>0.000000000000000</double> <double>0.000000000000000</double>
</property> </property>
<property name="maximum"> <property name="maximum">
<double>30.000000000000000</double> <double>50.000000000000000</double>
</property> </property>
<property name="singleStep"> <property name="singleStep">
<double>1.000000000000000</double> <double>1.000000000000000</double>

View File

@ -250,22 +250,27 @@ class Scheduler(QObject):
self.timer = QTimer(self) self.timer = QTimer(self)
self.timer.start(int(self.INTERVAL * 60 * 1000)) self.timer.start(int(self.INTERVAL * 60 * 1000))
self.oldest_timer = QTimer()
self.connect(self.oldest_timer, SIGNAL('timeout()'), self.oldest_check)
self.connect(self.timer, SIGNAL('timeout()'), self.check) self.connect(self.timer, SIGNAL('timeout()'), self.check)
self.oldest = gconf['oldest_news'] self.oldest = gconf['oldest_news']
self.oldest_timer.start(int(60 * 60 * 1000))
QTimer.singleShot(5 * 1000, self.oldest_check) QTimer.singleShot(5 * 1000, self.oldest_check)
self.database_changed = self.recipe_model.database_changed self.database_changed = self.recipe_model.database_changed
def oldest_check(self): def oldest_check(self):
if self.oldest > 0: if self.oldest > 0:
delta = timedelta(days=self.oldest) delta = timedelta(days=self.oldest)
ids = self.recipe_model.db.tags_older_than(_('News'), delta) try:
ids = self.recipe_model.db.tags_older_than(_('News'), delta)
except:
# Should never happen
ids = []
import traceback
traceback.print_exc()
if ids: if ids:
ids = list(ids) ids = list(ids)
if ids: if ids:
self.delete_old_news.emit(ids) self.delete_old_news.emit(ids)
QTimer.singleShot(60 * 60 * 1000, self.oldest_check)
def show_dialog(self, *args): def show_dialog(self, *args):
self.lock.lock() self.lock.lock()

View File

@ -727,7 +727,11 @@ class TagsModel(QAbstractItemModel): # {{{
for user_cat in sorted(self.db.prefs.get('user_categories', {}).keys(), for user_cat in sorted(self.db.prefs.get('user_categories', {}).keys(),
key=sort_key): key=sort_key):
cat_name = '@' + user_cat # add the '@' to avoid name collision cat_name = '@' + user_cat # add the '@' to avoid name collision
tb_cats.add_user_category(label=cat_name, name=user_cat) try:
tb_cats.add_user_category(label=cat_name, name=user_cat)
except ValueError:
import traceback
traceback.print_exc()
if len(saved_searches().names()): if len(saved_searches().names()):
tb_cats.add_search_category(label='search', name=_('Searches')) tb_cats.add_search_category(label='search', name=_('Searches'))

View File

@ -42,7 +42,8 @@ class MetadataBackup(Thread): # {{{
def stop(self): def stop(self):
self.keep_running = False self.keep_running = False
self.flush()
def break_cycles(self):
# Break cycles so that this object doesn't hold references to db # Break cycles so that this object doesn't hold references to db
self.do_write = self.get_metadata_for_dump = self.clear_dirtied = \ self.do_write = self.get_metadata_for_dump = self.clear_dirtied = \
self.set_dirtied = self.db = None self.set_dirtied = self.db = None
@ -111,6 +112,8 @@ class MetadataBackup(Thread): # {{{
continue continue
self.in_limbo = None self.in_limbo = None
self.flush()
self.break_cycles()
def flush(self): def flush(self):
'Used during shutdown to ensure that a dirtied book is not missed' 'Used during shutdown to ensure that a dirtied book is not missed'

View File

@ -1376,10 +1376,12 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
def tags_older_than(self, tag, delta): def tags_older_than(self, tag, delta):
tag = tag.lower().strip() tag = tag.lower().strip()
now = nowf() now = nowf()
tindex = self.FIELD_MAP['timestamp']
gindex = self.FIELD_MAP['tags']
for r in self.data._data: for r in self.data._data:
if r is not None: if r is not None:
if (now - r[self.FIELD_MAP['timestamp']]) > delta: if (now - r[tindex]) > delta:
tags = r[self.FIELD_MAP['tags']] tags = r[gindex]
if tags and tag in [x.strip() for x in if tags and tag in [x.strip() for x in
tags.lower().split(',')]: tags.lower().split(',')]:
yield r[self.FIELD_MAP['id']] yield r[self.FIELD_MAP['id']]

View File

@ -479,6 +479,8 @@ class FieldMetadata(dict):
del self._tb_cats[key] del self._tb_cats[key]
if key in self._search_term_map: if key in self._search_term_map:
del self._search_term_map[key] del self._search_term_map[key]
if key.lower() in self._search_term_map:
del self._search_term_map[key.lower()]
def cc_series_index_column_for(self, key): def cc_series_index_column_for(self, key):
return self._tb_cats[key]['rec_index'] + 1 return self._tb_cats[key]['rec_index'] + 1