mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-31 14:33:54 -04:00
Fix remaining set([])
This commit is contained in:
parent
f04ea0f22e
commit
ec3238d1cd
@ -42,7 +42,7 @@ class GrantLand(BasicNewsRecipe):
|
|||||||
|
|
||||||
def parse_index(self):
|
def parse_index(self):
|
||||||
feeds = []
|
feeds = []
|
||||||
seen_urls = set([])
|
seen_urls = set()
|
||||||
|
|
||||||
for category in self.CATEGORIES:
|
for category in self.CATEGORIES:
|
||||||
|
|
||||||
|
@ -43,7 +43,7 @@ class insider(BasicNewsRecipe):
|
|||||||
raise ValueError('Could not find category content')
|
raise ValueError('Could not find category content')
|
||||||
|
|
||||||
articles = []
|
articles = []
|
||||||
seen_titles = set([])
|
seen_titles = set()
|
||||||
for title in titles:
|
for title in titles:
|
||||||
if title.string in seen_titles:
|
if title.string in seen_titles:
|
||||||
continue
|
continue
|
||||||
|
@ -31,7 +31,7 @@ class LetsGetCritical(BasicNewsRecipe):
|
|||||||
def parse_index(self):
|
def parse_index(self):
|
||||||
self.cover_url = 'http://www.letsgetcritical.org/wp-content/themes/lets_get_critical/images/lgc.jpg'
|
self.cover_url = 'http://www.letsgetcritical.org/wp-content/themes/lets_get_critical/images/lgc.jpg'
|
||||||
feeds = []
|
feeds = []
|
||||||
seen_urls = set([])
|
seen_urls = set()
|
||||||
regex = re.compile(r'http://(www\.)?([^/:]+)', re.I)
|
regex = re.compile(r'http://(www\.)?([^/:]+)', re.I)
|
||||||
|
|
||||||
for category in self.CATEGORIES:
|
for category in self.CATEGORIES:
|
||||||
|
@ -98,7 +98,7 @@ class MWJournal(BasicNewsRecipe):
|
|||||||
self.log('Found Cover image:', self.cover_url)
|
self.log('Found Cover image:', self.cover_url)
|
||||||
|
|
||||||
feeds = []
|
feeds = []
|
||||||
seen_titles = set([]) # This is used to remove duplicant articles
|
seen_titles = set() # This is used to remove duplicant articles
|
||||||
sections = soup.find('div', attrs={'class': 'box2 publication'})
|
sections = soup.find('div', attrs={'class': 'box2 publication'})
|
||||||
for section in sections.findAll('div', attrs={'class': 'records'}):
|
for section in sections.findAll('div', attrs={'class': 'records'}):
|
||||||
section_title = self.tag_to_string(section.find('h3'))
|
section_title = self.tag_to_string(section.find('h3'))
|
||||||
|
@ -55,7 +55,7 @@ class Pagina12(BasicNewsRecipe):
|
|||||||
|
|
||||||
feeds = []
|
feeds = []
|
||||||
|
|
||||||
seen_titles = set([])
|
seen_titles = set()
|
||||||
for section in soup.findAll('div', 'seccionx'):
|
for section in soup.findAll('div', 'seccionx'):
|
||||||
numero += 1
|
numero += 1
|
||||||
print(numero)
|
print(numero)
|
||||||
|
@ -21,7 +21,7 @@ class pravo(BasicNewsRecipe):
|
|||||||
no_stylesheets = True
|
no_stylesheets = True
|
||||||
|
|
||||||
# our variables
|
# our variables
|
||||||
seen_titles = set([])
|
seen_titles = set()
|
||||||
# only yesterday's articles are online
|
# only yesterday's articles are online
|
||||||
parent_url = 'http://pravo.novinky.cz/minule/'
|
parent_url = 'http://pravo.novinky.cz/minule/'
|
||||||
feeds = [
|
feeds = [
|
||||||
|
@ -48,7 +48,7 @@ class TheSouthernStar(BasicNewsRecipe):
|
|||||||
|
|
||||||
def parse_index(self):
|
def parse_index(self):
|
||||||
feeds = []
|
feeds = []
|
||||||
seen_titles = set([])
|
seen_titles = set()
|
||||||
|
|
||||||
articles = self.fetch_ss_articles(self.NEWS_INDEX, seen_titles)
|
articles = self.fetch_ss_articles(self.NEWS_INDEX, seen_titles)
|
||||||
if articles:
|
if articles:
|
||||||
|
@ -45,7 +45,7 @@ class VanityFair(BasicNewsRecipe):
|
|||||||
self.cover_url = 'http://www.vanityfair.com/magazine/toc/contents-%s/_jcr_content/par/cn_contentwell/par-main/cn_pagination_contai/cn_image.size.cover_vanityfair_300.jpg' % ( # noqa
|
self.cover_url = 'http://www.vanityfair.com/magazine/toc/contents-%s/_jcr_content/par/cn_contentwell/par-main/cn_pagination_contai/cn_image.size.cover_vanityfair_300.jpg' % ( # noqa
|
||||||
date.today().strftime('%Y%m'))
|
date.today().strftime('%Y%m'))
|
||||||
feeds = []
|
feeds = []
|
||||||
seen_urls = set([])
|
seen_urls = set()
|
||||||
features = []
|
features = []
|
||||||
|
|
||||||
for category in self.CATEGORIES:
|
for category in self.CATEGORIES:
|
||||||
|
@ -405,7 +405,7 @@ class MetadataReaderPlugin(Plugin): # {{{
|
|||||||
'''
|
'''
|
||||||
#: Set of file types for which this plugin should be run.
|
#: Set of file types for which this plugin should be run.
|
||||||
#: For example: ``set(['lit', 'mobi', 'prc'])``
|
#: For example: ``set(['lit', 'mobi', 'prc'])``
|
||||||
file_types = set([])
|
file_types = set()
|
||||||
|
|
||||||
supported_platforms = ['windows', 'osx', 'linux']
|
supported_platforms = ['windows', 'osx', 'linux']
|
||||||
version = numeric_version
|
version = numeric_version
|
||||||
@ -437,7 +437,7 @@ class MetadataWriterPlugin(Plugin): # {{{
|
|||||||
'''
|
'''
|
||||||
#: Set of file types for which this plugin should be run.
|
#: Set of file types for which this plugin should be run.
|
||||||
#: For example: ``set(['lit', 'mobi', 'prc'])``
|
#: For example: ``set(['lit', 'mobi', 'prc'])``
|
||||||
file_types = set([])
|
file_types = set()
|
||||||
|
|
||||||
supported_platforms = ['windows', 'osx', 'linux']
|
supported_platforms = ['windows', 'osx', 'linux']
|
||||||
version = numeric_version
|
version = numeric_version
|
||||||
@ -473,7 +473,7 @@ class CatalogPlugin(Plugin): # {{{
|
|||||||
|
|
||||||
#: Output file type for which this plugin should be run.
|
#: Output file type for which this plugin should be run.
|
||||||
#: For example: 'epub' or 'xml'
|
#: For example: 'epub' or 'xml'
|
||||||
file_types = set([])
|
file_types = set()
|
||||||
|
|
||||||
type = _('Catalog generator')
|
type = _('Catalog generator')
|
||||||
|
|
||||||
|
@ -37,8 +37,8 @@ def _config():
|
|||||||
c.add_opt('plugins', default={}, help=_('Installed plugins'))
|
c.add_opt('plugins', default={}, help=_('Installed plugins'))
|
||||||
c.add_opt('filetype_mapping', default={}, help=_('Mapping for filetype plugins'))
|
c.add_opt('filetype_mapping', default={}, help=_('Mapping for filetype plugins'))
|
||||||
c.add_opt('plugin_customization', default={}, help=_('Local plugin customization'))
|
c.add_opt('plugin_customization', default={}, help=_('Local plugin customization'))
|
||||||
c.add_opt('disabled_plugins', default=set([]), help=_('Disabled plugins'))
|
c.add_opt('disabled_plugins', default=set(), help=_('Disabled plugins'))
|
||||||
c.add_opt('enabled_plugins', default=set([]), help=_('Enabled plugins'))
|
c.add_opt('enabled_plugins', default=set(), help=_('Enabled plugins'))
|
||||||
|
|
||||||
return ConfigProxy(c)
|
return ConfigProxy(c)
|
||||||
|
|
||||||
@ -307,14 +307,14 @@ def available_store_plugins():
|
|||||||
|
|
||||||
|
|
||||||
def stores():
|
def stores():
|
||||||
stores = set([])
|
stores = set()
|
||||||
for plugin in store_plugins():
|
for plugin in store_plugins():
|
||||||
stores.add(plugin.name)
|
stores.add(plugin.name)
|
||||||
return stores
|
return stores
|
||||||
|
|
||||||
|
|
||||||
def available_stores():
|
def available_stores():
|
||||||
stores = set([])
|
stores = set()
|
||||||
for plugin in available_store_plugins():
|
for plugin in available_store_plugins():
|
||||||
stores.add(plugin.name)
|
stores.add(plugin.name)
|
||||||
return stores
|
return stores
|
||||||
@ -575,7 +575,7 @@ def catalog_plugins():
|
|||||||
|
|
||||||
|
|
||||||
def available_catalog_formats():
|
def available_catalog_formats():
|
||||||
formats = set([])
|
formats = set()
|
||||||
for plugin in catalog_plugins():
|
for plugin in catalog_plugins():
|
||||||
if not is_disabled(plugin):
|
if not is_disabled(plugin):
|
||||||
for format in plugin.file_types:
|
for format in plugin.file_types:
|
||||||
|
@ -61,7 +61,7 @@ class DynamicFilter(object): # {{{
|
|||||||
|
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.ids = frozenset([])
|
self.ids = frozenset()
|
||||||
|
|
||||||
def __call__(self, id_):
|
def __call__(self, id_):
|
||||||
return int(id_ in self.ids)
|
return int(id_ in self.ids)
|
||||||
|
@ -29,7 +29,7 @@ def cleanup_tags(tags):
|
|||||||
tags = [x.decode(preferred_encoding, 'replace')
|
tags = [x.decode(preferred_encoding, 'replace')
|
||||||
if isbytestring(x) else x for x in tags]
|
if isbytestring(x) else x for x in tags]
|
||||||
tags = [u' '.join(x.split()) for x in tags]
|
tags = [u' '.join(x.split()) for x in tags]
|
||||||
ans, seen = [], set([])
|
ans, seen = [], set()
|
||||||
for tag in tags:
|
for tag in tags:
|
||||||
if tag.lower() not in seen:
|
if tag.lower() not in seen:
|
||||||
seen.add(tag.lower())
|
seen.add(tag.lower())
|
||||||
|
@ -192,7 +192,7 @@ class XMLCache(object):
|
|||||||
|
|
||||||
def ensure_unique_playlist_titles(self):
|
def ensure_unique_playlist_titles(self):
|
||||||
for i, root in self.record_roots.items():
|
for i, root in self.record_roots.items():
|
||||||
seen = set([])
|
seen = set()
|
||||||
for playlist in root.xpath('//*[local-name()="playlist"]'):
|
for playlist in root.xpath('//*[local-name()="playlist"]'):
|
||||||
title = playlist.get('title', None)
|
title = playlist.get('title', None)
|
||||||
if title is None:
|
if title is None:
|
||||||
|
@ -74,7 +74,7 @@ class xISBN(object):
|
|||||||
|
|
||||||
def get_associated_isbns(self, isbn):
|
def get_associated_isbns(self, isbn):
|
||||||
data = self.get_data(isbn)
|
data = self.get_data(isbn)
|
||||||
ans = set([])
|
ans = set()
|
||||||
for rec in data:
|
for rec in data:
|
||||||
for i in rec.get('isbn', []):
|
for i in rec.get('isbn', []):
|
||||||
ans.add(i)
|
ans.add(i)
|
||||||
|
@ -491,7 +491,7 @@ class Page(object):
|
|||||||
for i, x in enumerate(self.elements):
|
for i, x in enumerate(self.elements):
|
||||||
x.idx = i
|
x.idx = i
|
||||||
current_region = Region(self.opts, self.log)
|
current_region = Region(self.opts, self.log)
|
||||||
processed = set([])
|
processed = set()
|
||||||
for x in self.elements:
|
for x in self.elements:
|
||||||
if x in processed:
|
if x in processed:
|
||||||
continue
|
continue
|
||||||
@ -526,8 +526,8 @@ class Page(object):
|
|||||||
# closer to the avg number of cols in the set, if equal use larger
|
# closer to the avg number of cols in the set, if equal use larger
|
||||||
# region)
|
# region)
|
||||||
found = True
|
found = True
|
||||||
absorbed = set([])
|
absorbed = set()
|
||||||
processed = set([])
|
processed = set()
|
||||||
while found:
|
while found:
|
||||||
found = False
|
found = False
|
||||||
for i, region in enumerate(self.regions):
|
for i, region in enumerate(self.regions):
|
||||||
|
@ -90,11 +90,11 @@ class InterfaceAction(QObject):
|
|||||||
|
|
||||||
#: Set of locations to which this action must not be added.
|
#: Set of locations to which this action must not be added.
|
||||||
#: See :attr:`all_locations` for a list of possible locations
|
#: See :attr:`all_locations` for a list of possible locations
|
||||||
dont_add_to = frozenset([])
|
dont_add_to = frozenset()
|
||||||
|
|
||||||
#: Set of locations from which this action must not be removed.
|
#: Set of locations from which this action must not be removed.
|
||||||
#: See :attr:`all_locations` for a list of possible locations
|
#: See :attr:`all_locations` for a list of possible locations
|
||||||
dont_remove_from = frozenset([])
|
dont_remove_from = frozenset()
|
||||||
|
|
||||||
all_locations = frozenset(['toolbar', 'toolbar-device', 'context-menu',
|
all_locations = frozenset(['toolbar', 'toolbar-device', 'context-menu',
|
||||||
'context-menu-device', 'toolbar-child', 'menubar', 'menubar-device',
|
'context-menu-device', 'toolbar-child', 'menubar', 'menubar-device',
|
||||||
|
@ -109,7 +109,7 @@ class MarkBooksAction(InterfaceAction):
|
|||||||
if not rows or len(rows) == 0:
|
if not rows or len(rows) == 0:
|
||||||
d = error_dialog(self.gui, _('Cannot mark'), _('No books selected'))
|
d = error_dialog(self.gui, _('Cannot mark'), _('No books selected'))
|
||||||
d.exec_()
|
d.exec_()
|
||||||
return set([])
|
return set()
|
||||||
return set(map(self.gui.library_view.model().id, rows))
|
return set(map(self.gui.library_view.model().id, rows))
|
||||||
|
|
||||||
def toggle_ids(self, book_ids):
|
def toggle_ids(self, book_ids):
|
||||||
|
@ -828,7 +828,7 @@ class BulkBase(Base):
|
|||||||
return self._cached_gui_val_
|
return self._cached_gui_val_
|
||||||
|
|
||||||
def get_initial_value(self, book_ids):
|
def get_initial_value(self, book_ids):
|
||||||
values = set([])
|
values = set()
|
||||||
for book_id in book_ids:
|
for book_id in book_ids:
|
||||||
val = self.db.get_custom(book_id, num=self.col_id, index_is_id=True)
|
val = self.db.get_custom(book_id, num=self.col_id, index_is_id=True)
|
||||||
if isinstance(val, list):
|
if isinstance(val, list):
|
||||||
|
@ -181,7 +181,7 @@ class JobManager(QAbstractTableModel, AdaptSQP): # {{{
|
|||||||
self.dataChanged.emit(idx, idx)
|
self.dataChanged.emit(idx, idx)
|
||||||
|
|
||||||
# Update parallel jobs
|
# Update parallel jobs
|
||||||
jobs = set([])
|
jobs = set()
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
jobs.add(self.server.changed_jobs_queue.get_nowait())
|
jobs.add(self.server.changed_jobs_queue.get_nowait())
|
||||||
|
@ -297,7 +297,7 @@ class ConfigModel(SearchQueryParser, QAbstractItemModel):
|
|||||||
def get_matches(self, location, query, candidates=None):
|
def get_matches(self, location, query, candidates=None):
|
||||||
if candidates is None:
|
if candidates is None:
|
||||||
candidates = self.universal_set()
|
candidates = self.universal_set()
|
||||||
ans = set([])
|
ans = set()
|
||||||
if not query:
|
if not query:
|
||||||
return ans
|
return ans
|
||||||
query = lower(query)
|
query = lower(query)
|
||||||
|
@ -1243,8 +1243,8 @@ class OnDeviceSearch(SearchQueryParser): # {{{
|
|||||||
query = query.lower()
|
query = query.lower()
|
||||||
|
|
||||||
if location not in self.USABLE_LOCATIONS:
|
if location not in self.USABLE_LOCATIONS:
|
||||||
return set([])
|
return set()
|
||||||
matches = set([])
|
matches = set()
|
||||||
all_locs = set(self.USABLE_LOCATIONS) - {'all', 'tags'}
|
all_locs = set(self.USABLE_LOCATIONS) - {'all', 'tags'}
|
||||||
locations = all_locs if location == 'all' else [location]
|
locations = all_locs if location == 'all' else [location]
|
||||||
q = {
|
q = {
|
||||||
|
@ -1125,7 +1125,7 @@ class BooksView(QTableView): # {{{
|
|||||||
rows = {x.row() if hasattr(x, 'row') else x for x in
|
rows = {x.row() if hasattr(x, 'row') else x for x in
|
||||||
identifiers}
|
identifiers}
|
||||||
if using_ids:
|
if using_ids:
|
||||||
rows = set([])
|
rows = set()
|
||||||
identifiers = set(identifiers)
|
identifiers = set(identifiers)
|
||||||
m = self.model()
|
m = self.model()
|
||||||
for row in range(m.rowCount(QModelIndex())):
|
for row in range(m.rowCount(QModelIndex())):
|
||||||
|
@ -390,7 +390,7 @@ class AuthorsEdit(EditWithComplete, ToMetadataMixin):
|
|||||||
return self.original_val != self.current_val
|
return self.original_val != self.current_val
|
||||||
|
|
||||||
def initialize(self, db, id_):
|
def initialize(self, db, id_):
|
||||||
self.books_to_refresh = set([])
|
self.books_to_refresh = set()
|
||||||
self.set_separator('&')
|
self.set_separator('&')
|
||||||
self.set_space_before_sep(True)
|
self.set_space_before_sep(True)
|
||||||
self.set_add_separator(tweaks['authors_completer_append_separator'])
|
self.set_add_separator(tweaks['authors_completer_append_separator'])
|
||||||
@ -602,7 +602,7 @@ class SeriesEdit(EditWithComplete, ToMetadataMixin):
|
|||||||
self.setToolTip(self.TOOLTIP)
|
self.setToolTip(self.TOOLTIP)
|
||||||
self.setWhatsThis(self.TOOLTIP)
|
self.setWhatsThis(self.TOOLTIP)
|
||||||
self.setEditable(True)
|
self.setEditable(True)
|
||||||
self.books_to_refresh = set([])
|
self.books_to_refresh = set()
|
||||||
self.lineEdit().textChanged.connect(self.data_changed)
|
self.lineEdit().textChanged.connect(self.data_changed)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -618,7 +618,7 @@ class SeriesEdit(EditWithComplete, ToMetadataMixin):
|
|||||||
self.lineEdit().setCursorPosition(0)
|
self.lineEdit().setCursorPosition(0)
|
||||||
|
|
||||||
def initialize(self, db, id_):
|
def initialize(self, db, id_):
|
||||||
self.books_to_refresh = set([])
|
self.books_to_refresh = set()
|
||||||
all_series = db.all_series()
|
all_series = db.all_series()
|
||||||
all_series.sort(key=lambda x: sort_key(x[1]))
|
all_series.sort(key=lambda x: sort_key(x[1]))
|
||||||
self.update_items_cache([x[1] for x in all_series])
|
self.update_items_cache([x[1] for x in all_series])
|
||||||
@ -908,7 +908,7 @@ class FormatsManager(QWidget):
|
|||||||
self.changed = False
|
self.changed = False
|
||||||
self.formats.clear()
|
self.formats.clear()
|
||||||
exts = db.formats(id_, index_is_id=True)
|
exts = db.formats(id_, index_is_id=True)
|
||||||
self.original_val = set([])
|
self.original_val = set()
|
||||||
if exts:
|
if exts:
|
||||||
exts = exts.split(',')
|
exts = exts.split(',')
|
||||||
for ext in exts:
|
for ext in exts:
|
||||||
@ -1370,7 +1370,7 @@ class TagsEdit(EditWithComplete, ToMetadataMixin): # {{{
|
|||||||
EditWithComplete.__init__(self, parent)
|
EditWithComplete.__init__(self, parent)
|
||||||
self.currentTextChanged.connect(self.data_changed)
|
self.currentTextChanged.connect(self.data_changed)
|
||||||
self.lineEdit().setMaxLength(655360) # see https://bugs.launchpad.net/bugs/1630944
|
self.lineEdit().setMaxLength(655360) # see https://bugs.launchpad.net/bugs/1630944
|
||||||
self.books_to_refresh = set([])
|
self.books_to_refresh = set()
|
||||||
self.setToolTip(self.TOOLTIP)
|
self.setToolTip(self.TOOLTIP)
|
||||||
self.setWhatsThis(self.TOOLTIP)
|
self.setWhatsThis(self.TOOLTIP)
|
||||||
|
|
||||||
@ -1386,7 +1386,7 @@ class TagsEdit(EditWithComplete, ToMetadataMixin): # {{{
|
|||||||
self.setCursorPosition(0)
|
self.setCursorPosition(0)
|
||||||
|
|
||||||
def initialize(self, db, id_):
|
def initialize(self, db, id_):
|
||||||
self.books_to_refresh = set([])
|
self.books_to_refresh = set()
|
||||||
tags = db.tags(id_, index_is_id=True)
|
tags = db.tags(id_, index_is_id=True)
|
||||||
tags = tags.split(',') if tags else []
|
tags = tags.split(',') if tags else []
|
||||||
self.current_val = tags
|
self.current_val = tags
|
||||||
@ -1753,7 +1753,7 @@ class PublisherEdit(EditWithComplete, ToMetadataMixin): # {{{
|
|||||||
self.set_separator(None)
|
self.set_separator(None)
|
||||||
self.setSizeAdjustPolicy(
|
self.setSizeAdjustPolicy(
|
||||||
self.AdjustToMinimumContentsLengthWithIcon)
|
self.AdjustToMinimumContentsLengthWithIcon)
|
||||||
self.books_to_refresh = set([])
|
self.books_to_refresh = set()
|
||||||
self.clear_button = QToolButton(parent)
|
self.clear_button = QToolButton(parent)
|
||||||
self.clear_button.setIcon(QIcon(I('trash.png')))
|
self.clear_button.setIcon(QIcon(I('trash.png')))
|
||||||
self.clear_button.setToolTip(_('Clear publisher'))
|
self.clear_button.setToolTip(_('Clear publisher'))
|
||||||
@ -1772,7 +1772,7 @@ class PublisherEdit(EditWithComplete, ToMetadataMixin): # {{{
|
|||||||
self.lineEdit().setCursorPosition(0)
|
self.lineEdit().setCursorPosition(0)
|
||||||
|
|
||||||
def initialize(self, db, id_):
|
def initialize(self, db, id_):
|
||||||
self.books_to_refresh = set([])
|
self.books_to_refresh = set()
|
||||||
all_publishers = db.all_publishers()
|
all_publishers = db.all_publishers()
|
||||||
all_publishers.sort(key=lambda x: sort_key(x[1]))
|
all_publishers.sort(key=lambda x: sort_key(x[1]))
|
||||||
self.update_items_cache([x[1] for x in all_publishers])
|
self.update_items_cache([x[1] for x in all_publishers])
|
||||||
|
@ -388,7 +388,7 @@ class MetadataSingleDialogBase(QDialog):
|
|||||||
|
|
||||||
def __call__(self, id_):
|
def __call__(self, id_):
|
||||||
self.book_id = id_
|
self.book_id = id_
|
||||||
self.books_to_refresh = set([])
|
self.books_to_refresh = set()
|
||||||
self.metadata_before_fetch = None
|
self.metadata_before_fetch = None
|
||||||
for widget in self.basic_metadata_widgets:
|
for widget in self.basic_metadata_widgets:
|
||||||
widget.initialize(self.db, id_)
|
widget.initialize(self.db, id_)
|
||||||
|
@ -229,8 +229,8 @@ class SearchFilter(SearchQueryParser):
|
|||||||
query = query.lower()
|
query = query.lower()
|
||||||
|
|
||||||
if location not in self.USABLE_LOCATIONS:
|
if location not in self.USABLE_LOCATIONS:
|
||||||
return set([])
|
return set()
|
||||||
matches = set([])
|
matches = set()
|
||||||
all_locs = set(self.USABLE_LOCATIONS) - {'all'}
|
all_locs = set(self.USABLE_LOCATIONS) - {'all'}
|
||||||
locations = all_locs if location == 'all' else [location]
|
locations = all_locs if location == 'all' else [location]
|
||||||
q = {
|
q = {
|
||||||
|
@ -342,7 +342,7 @@ class SearchFilter(SearchQueryParser):
|
|||||||
self.srs.add(search_result)
|
self.srs.add(search_result)
|
||||||
|
|
||||||
def clear_search_results(self):
|
def clear_search_results(self):
|
||||||
self.srs = set([])
|
self.srs = set()
|
||||||
|
|
||||||
def universal_set(self):
|
def universal_set(self):
|
||||||
return self.srs
|
return self.srs
|
||||||
@ -391,8 +391,8 @@ class SearchFilter(SearchQueryParser):
|
|||||||
query = query.lower()
|
query = query.lower()
|
||||||
|
|
||||||
if location not in self.USABLE_LOCATIONS:
|
if location not in self.USABLE_LOCATIONS:
|
||||||
return set([])
|
return set()
|
||||||
matches = set([])
|
matches = set()
|
||||||
all_locs = set(self.USABLE_LOCATIONS) - {'all'}
|
all_locs = set(self.USABLE_LOCATIONS) - {'all'}
|
||||||
locations = all_locs if location == 'all' else [location]
|
locations = all_locs if location == 'all' else [location]
|
||||||
q = {
|
q = {
|
||||||
|
@ -150,8 +150,8 @@ class SearchFilter(SearchQueryParser):
|
|||||||
query = query.lower()
|
query = query.lower()
|
||||||
|
|
||||||
if location not in self.USABLE_LOCATIONS:
|
if location not in self.USABLE_LOCATIONS:
|
||||||
return set([])
|
return set()
|
||||||
matches = set([])
|
matches = set()
|
||||||
all_locs = set(self.USABLE_LOCATIONS) - {'all'}
|
all_locs = set(self.USABLE_LOCATIONS) - {'all'}
|
||||||
locations = all_locs if location == 'all' else [location]
|
locations = all_locs if location == 'all' else [location]
|
||||||
q = {
|
q = {
|
||||||
|
@ -29,7 +29,7 @@ def find_folders_under(root, db, add_root=True, # {{{
|
|||||||
|
|
||||||
root = os.path.abspath(root)
|
root = os.path.abspath(root)
|
||||||
|
|
||||||
ans = set([])
|
ans = set()
|
||||||
for dirpath, dirnames, __ in os.walk(root, topdown=True, followlinks=follow_links):
|
for dirpath, dirnames, __ in os.walk(root, topdown=True, followlinks=follow_links):
|
||||||
if cancel_callback():
|
if cancel_callback():
|
||||||
break
|
break
|
||||||
|
@ -432,7 +432,7 @@ class ResultCache(SearchQueryParser): # {{{
|
|||||||
}
|
}
|
||||||
|
|
||||||
def get_numeric_matches(self, location, query, candidates, val_func=None):
|
def get_numeric_matches(self, location, query, candidates, val_func=None):
|
||||||
matches = set([])
|
matches = set()
|
||||||
if len(query) == 0:
|
if len(query) == 0:
|
||||||
return matches
|
return matches
|
||||||
|
|
||||||
@ -499,7 +499,7 @@ class ResultCache(SearchQueryParser): # {{{
|
|||||||
return matches
|
return matches
|
||||||
|
|
||||||
def get_user_category_matches(self, location, query, candidates):
|
def get_user_category_matches(self, location, query, candidates):
|
||||||
matches = set([])
|
matches = set()
|
||||||
if self.db_prefs is None or len(query) < 2:
|
if self.db_prefs is None or len(query) < 2:
|
||||||
return matches
|
return matches
|
||||||
user_cats = self.db_prefs.get('user_categories', [])
|
user_cats = self.db_prefs.get('user_categories', [])
|
||||||
@ -522,7 +522,7 @@ class ResultCache(SearchQueryParser): # {{{
|
|||||||
return matches
|
return matches
|
||||||
|
|
||||||
def get_keypair_matches(self, location, query, candidates):
|
def get_keypair_matches(self, location, query, candidates):
|
||||||
matches = set([])
|
matches = set()
|
||||||
if query.find(':') >= 0:
|
if query.find(':') >= 0:
|
||||||
q = [q.strip() for q in query.split(':')]
|
q = [q.strip() for q in query.split(':')]
|
||||||
if len(q) != 2:
|
if len(q) != 2:
|
||||||
@ -640,7 +640,7 @@ class ResultCache(SearchQueryParser): # {{{
|
|||||||
allow_recursion=True):
|
allow_recursion=True):
|
||||||
# If candidates is not None, it must not be modified. Changing its
|
# If candidates is not None, it must not be modified. Changing its
|
||||||
# value will break query optimization in the search parser
|
# value will break query optimization in the search parser
|
||||||
matches = set([])
|
matches = set()
|
||||||
if candidates is None:
|
if candidates is None:
|
||||||
candidates = self.universal_set()
|
candidates = self.universal_set()
|
||||||
if len(candidates) == 0:
|
if len(candidates) == 0:
|
||||||
@ -681,7 +681,7 @@ class ResultCache(SearchQueryParser): # {{{
|
|||||||
# apply the limit if appropriate
|
# apply the limit if appropriate
|
||||||
if location == 'all' and prefs['limit_search_columns'] and \
|
if location == 'all' and prefs['limit_search_columns'] and \
|
||||||
prefs['limit_search_columns_to']:
|
prefs['limit_search_columns_to']:
|
||||||
terms = set([])
|
terms = set()
|
||||||
for l in prefs['limit_search_columns_to']:
|
for l in prefs['limit_search_columns_to']:
|
||||||
l = icu_lower(l.strip())
|
l = icu_lower(l.strip())
|
||||||
if l and l != 'all' and l in self.all_search_locations:
|
if l and l != 'all' and l in self.all_search_locations:
|
||||||
|
@ -159,7 +159,7 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
self.dirtied_lock = threading.RLock()
|
self.dirtied_lock = threading.RLock()
|
||||||
if not os.path.exists(library_path):
|
if not os.path.exists(library_path):
|
||||||
os.makedirs(library_path)
|
os.makedirs(library_path)
|
||||||
self.listeners = set([])
|
self.listeners = set()
|
||||||
self.library_path = os.path.abspath(library_path)
|
self.library_path = os.path.abspath(library_path)
|
||||||
self.row_factory = row_factory
|
self.row_factory = row_factory
|
||||||
self.dbpath = os.path.join(library_path, 'metadata.db')
|
self.dbpath = os.path.join(library_path, 'metadata.db')
|
||||||
@ -1122,7 +1122,7 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
title = pat.sub(repl, title)
|
title = pat.sub(repl, title)
|
||||||
return title
|
return title
|
||||||
|
|
||||||
identical_book_ids = set([])
|
identical_book_ids = set()
|
||||||
if mi.authors:
|
if mi.authors:
|
||||||
try:
|
try:
|
||||||
quathors = mi.authors[:10] # Too many authors causes parsing of
|
quathors = mi.authors[:10] # Too many authors causes parsing of
|
||||||
@ -1247,7 +1247,7 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
def all_formats(self):
|
def all_formats(self):
|
||||||
formats = self.conn.get('SELECT DISTINCT format from data')
|
formats = self.conn.get('SELECT DISTINCT format from data')
|
||||||
if not formats:
|
if not formats:
|
||||||
return set([])
|
return set()
|
||||||
return {f[0] for f in formats}
|
return {f[0] for f in formats}
|
||||||
|
|
||||||
def format_files(self, index, index_is_id=False):
|
def format_files(self, index, index_is_id=False):
|
||||||
@ -1700,7 +1700,7 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
|
|
||||||
def get_books_for_category(self, category, id_):
|
def get_books_for_category(self, category, id_):
|
||||||
ans = set([])
|
ans = set()
|
||||||
|
|
||||||
if category not in self.field_metadata:
|
if category not in self.field_metadata:
|
||||||
return ans
|
return ans
|
||||||
@ -2984,7 +2984,7 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
'SELECT name FROM tags WHERE id IN (SELECT tag FROM books_tags_link WHERE book=?)',
|
'SELECT name FROM tags WHERE id IN (SELECT tag FROM books_tags_link WHERE book=?)',
|
||||||
(id,), all=True)
|
(id,), all=True)
|
||||||
if not result:
|
if not result:
|
||||||
return set([])
|
return set()
|
||||||
return {r[0] for r in result}
|
return {r[0] for r in result}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -2993,7 +2993,7 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
tags = [x.decode(preferred_encoding, 'replace')
|
tags = [x.decode(preferred_encoding, 'replace')
|
||||||
if isbytestring(x) else x for x in tags]
|
if isbytestring(x) else x for x in tags]
|
||||||
tags = [u' '.join(x.split()) for x in tags]
|
tags = [u' '.join(x.split()) for x in tags]
|
||||||
ans, seen = [], set([])
|
ans, seen = [], set()
|
||||||
for tag in tags:
|
for tag in tags:
|
||||||
if tag.lower() not in seen:
|
if tag.lower() not in seen:
|
||||||
seen.add(tag.lower())
|
seen.add(tag.lower())
|
||||||
@ -3580,7 +3580,7 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
|
|
||||||
def get_top_level_move_items(self):
|
def get_top_level_move_items(self):
|
||||||
items = set(os.listdir(self.library_path))
|
items = set(os.listdir(self.library_path))
|
||||||
paths = set([])
|
paths = set()
|
||||||
for x in self.data.universal_set():
|
for x in self.data.universal_set():
|
||||||
path = self.path(x, index_is_id=True)
|
path = self.path(x, index_is_id=True)
|
||||||
path = path.split(os.sep)[0]
|
path = path.split(os.sep)[0]
|
||||||
@ -3602,7 +3602,7 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
progress = lambda x:x
|
progress = lambda x:x
|
||||||
if not os.path.exists(newloc):
|
if not os.path.exists(newloc):
|
||||||
os.makedirs(newloc)
|
os.makedirs(newloc)
|
||||||
old_dirs = set([])
|
old_dirs = set()
|
||||||
items, path_map = self.get_top_level_move_items()
|
items, path_map = self.get_top_level_move_items()
|
||||||
for x in items:
|
for x in items:
|
||||||
src = os.path.join(self.library_path, x)
|
src = os.path.join(self.library_path, x)
|
||||||
|
@ -94,7 +94,7 @@ class DynamicFilter(object):
|
|||||||
|
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.ids = frozenset([])
|
self.ids = frozenset()
|
||||||
|
|
||||||
def __call__(self, id_):
|
def __call__(self, id_):
|
||||||
return int(id_ in self.ids)
|
return int(id_ in self.ids)
|
||||||
|
@ -337,7 +337,7 @@ class SearchQueryParser(object):
|
|||||||
def parse(self, query, candidates=None):
|
def parse(self, query, candidates=None):
|
||||||
# empty the list of searches used for recursion testing
|
# empty the list of searches used for recursion testing
|
||||||
self.recurse_level = 0
|
self.recurse_level = 0
|
||||||
self.searches_seen = set([])
|
self.searches_seen = set()
|
||||||
candidates = self.universal_set()
|
candidates = self.universal_set()
|
||||||
return self._parse(query, candidates=candidates)
|
return self._parse(query, candidates=candidates)
|
||||||
|
|
||||||
|
@ -309,7 +309,7 @@ class Tester(SearchQueryParser):
|
|||||||
'(tag:txt or tag:pdf)': {33, 258, 354, 305, 242, 51, 55, 56, 154},
|
'(tag:txt or tag:pdf)': {33, 258, 354, 305, 242, 51, 55, 56, 154},
|
||||||
'(tag:txt OR tag:pdf) and author:Tolstoy': {55, 56},
|
'(tag:txt OR tag:pdf) and author:Tolstoy': {55, 56},
|
||||||
'Tolstoy txt': {55, 56},
|
'Tolstoy txt': {55, 56},
|
||||||
'Hamilton Amsterdam' : set([]),
|
'Hamilton Amsterdam' : set(),
|
||||||
u'Beär' : {91},
|
u'Beär' : {91},
|
||||||
'dysfunc or tolstoy': {348, 55, 56},
|
'dysfunc or tolstoy': {348, 55, 56},
|
||||||
'tag:txt AND NOT tolstoy': {33, 258, 354, 305, 242, 154},
|
'tag:txt AND NOT tolstoy': {33, 258, 354, 305, 242, 154},
|
||||||
@ -341,7 +341,7 @@ class Tester(SearchQueryParser):
|
|||||||
getter = lambda x: ''
|
getter = lambda x: ''
|
||||||
|
|
||||||
if not query:
|
if not query:
|
||||||
return set([])
|
return set()
|
||||||
query = query.lower()
|
query = query.lower()
|
||||||
if candidates:
|
if candidates:
|
||||||
return set(key for key, val in self.texts.items()
|
return set(key for key, val in self.texts.items()
|
||||||
|
@ -297,7 +297,7 @@ class RecipeModel(QAbstractItemModel, AdaptSQP):
|
|||||||
query = query.strip().lower()
|
query = query.strip().lower()
|
||||||
if not query:
|
if not query:
|
||||||
return self.universal_set()
|
return self.universal_set()
|
||||||
results = set([])
|
results = set()
|
||||||
for urn in self.universal_set():
|
for urn in self.universal_set():
|
||||||
recipe = self.recipe_from_urn(urn)
|
recipe = self.recipe_from_urn(urn)
|
||||||
if query in recipe.get('title', '').lower() or \
|
if query in recipe.get('title', '').lower() or \
|
||||||
|
Loading…
x
Reference in New Issue
Block a user