Edit book: Spell check: Ignore soft hyphens in words

This commit is contained in:
Kovid Goyal 2019-12-18 10:26:38 +05:30
parent ab81c2f309
commit 5ad63e6d9a
No known key found for this signature in database
GPG Key ID: 06BC317B515ACE7C

View File

@ -20,8 +20,7 @@ from calibre.spell import parse_lang_code
from calibre.utils.config import JSONConfig
from calibre.utils.icu import capitalize
from calibre.utils.localization import get_lang, get_system_locale
from polyglot.builtins import filter, iteritems, itervalues, unicode_type
from polyglot.builtins import filter, iteritems, itervalues, map, unicode_type
Dictionary = namedtuple('Dictionary', 'primary_locale locales dicpath affpath builtin name id')
LoadedDictionary = namedtuple('Dictionary', 'primary_locale locales obj builtin name id')
@ -35,6 +34,11 @@ dprefs.defaults['user_dictionaries'] = [{'name':_('Default'), 'is_active':True,
not_present = object()
def normalize_word(word):
# remove soft hyphens
return unicode_type(word).replace('\u00ad', '')
class UserDictionary(object):
__slots__ = ('name', 'is_active', 'words')
@ -227,14 +231,17 @@ class Dictionaries(object):
return ans
def ignore_word(self, word, locale):
word = normalize_word(word)
self.ignored_words.add((word, locale.langcode))
self.word_cache[(word, locale)] = True
def unignore_word(self, word, locale):
word = normalize_word(word)
self.ignored_words.discard((word, locale.langcode))
self.word_cache.pop((word, locale), None)
def is_word_ignored(self, word, locale):
word = normalize_word(word)
return (word, locale.langcode) in self.ignored_words
@property
@ -268,12 +275,14 @@ class Dictionaries(object):
for d in itervalues(self.dictionaries):
if d and getattr(d.primary_locale, 'langcode', None) == langcode:
for word in words:
word = normalize_word(word)
d.obj.add(word)
def remove_user_words(self, words, langcode):
for d in itervalues(self.dictionaries):
if d and d.primary_locale.langcode == langcode:
for word in words:
word = normalize_word(word)
d.obj.remove(word)
def add_to_user_dictionary(self, name, word, locale):
@ -282,9 +291,11 @@ class Dictionaries(object):
raise ValueError('Cannot add to the dictionary named: %s as no such dictionary exists' % name)
wl = len(ud.words)
if isinstance(word, (set, frozenset)):
word = frozenset(map(normalize_word, word))
ud.words |= word
self.add_user_words(word, locale.langcode)
else:
word = normalize_word(word)
ud.words.add((word, locale.langcode))
self.add_user_words((word,), locale.langcode)
if len(ud.words) > wl:
@ -297,6 +308,7 @@ class Dictionaries(object):
return False
def remove_from_user_dictionaries(self, word, locale):
word = normalize_word(word)
key = (word, locale.langcode)
changed = False
for ud in self.active_user_dictionaries:
@ -312,7 +324,7 @@ class Dictionaries(object):
def remove_from_user_dictionary(self, name, words):
changed = False
removals = defaultdict(set)
keys = [(w, l.langcode) for w, l in words]
keys = [(normalize_word(w), l.langcode) for w, l in words]
for d in self.all_user_dictionaries:
if d.name == name:
for key in keys:
@ -329,6 +341,7 @@ class Dictionaries(object):
return changed
def word_in_user_dictionary(self, word, locale):
word = normalize_word(word)
key = (word, locale.langcode)
for ud in self.active_user_dictionaries:
if key in ud.words:
@ -364,6 +377,7 @@ class Dictionaries(object):
return changed
def recognized(self, word, locale=None):
word = normalize_word(word)
locale = locale or self.default_locale
key = (word, locale)
ans = self.word_cache.get(key, None)
@ -392,6 +406,7 @@ class Dictionaries(object):
return ans
def suggestions(self, word, locale=None):
word = normalize_word(word)
locale = locale or self.default_locale
d = self.dictionary_for_locale(locale)
has_unicode_hyphen = '\u2010' in word