mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
DRYer
This commit is contained in:
parent
7a4a6d0d19
commit
dc92c8f9bb
@ -15,7 +15,6 @@ except ImportError:
|
|||||||
from lxml import etree, html
|
from lxml import etree, html
|
||||||
|
|
||||||
from calibre import url_slash_cleaner
|
from calibre import url_slash_cleaner
|
||||||
from calibre.ebooks.chardet import strip_encoding_declarations
|
|
||||||
from calibre.ebooks.metadata import authors_to_string
|
from calibre.ebooks.metadata import authors_to_string
|
||||||
from calibre.gui2 import open_url
|
from calibre.gui2 import open_url
|
||||||
from calibre.gui2.store import StorePlugin
|
from calibre.gui2.store import StorePlugin
|
||||||
@ -23,17 +22,15 @@ from calibre.gui2.store.basic_config import BasicStoreConfig
|
|||||||
from calibre.gui2.store.search_result import SearchResult
|
from calibre.gui2.store.search_result import SearchResult
|
||||||
from calibre.gui2.store.web_store_dialog import WebStoreDialog
|
from calibre.gui2.store.web_store_dialog import WebStoreDialog
|
||||||
|
|
||||||
scraper = None
|
|
||||||
|
|
||||||
|
|
||||||
def read_url(url):
|
def read_url(url):
|
||||||
# Kobo uses Akamai which has some bot detection that uses network/tls
|
# Kobo uses Akamai which has some bot detection that uses network/tls
|
||||||
# protocol data. So use the Chromium network stack to make the request
|
# protocol data. So use the Chromium network stack to make the request
|
||||||
global scraper
|
from calibre.scraper.simple import read_url as ru
|
||||||
if scraper is None:
|
return ru(read_url.storage, url)
|
||||||
from calibre.scraper.simple import Overseer
|
|
||||||
scraper = Overseer()
|
|
||||||
return strip_encoding_declarations(scraper.fetch_url(url))
|
read_url.storage = []
|
||||||
|
|
||||||
|
|
||||||
def search_kobo(query, max_results=10, timeout=60, write_html_to=None):
|
def search_kobo(query, max_results=10, timeout=60, write_html_to=None):
|
||||||
|
@ -114,6 +114,18 @@ def cleanup_overseers():
|
|||||||
return join_all
|
return join_all
|
||||||
|
|
||||||
|
|
||||||
|
read_url_lock = Lock()
|
||||||
|
|
||||||
|
|
||||||
|
def read_url(storage, url):
|
||||||
|
with read_url_lock:
|
||||||
|
if not storage:
|
||||||
|
storage.append(Overseer())
|
||||||
|
scraper = storage[0]
|
||||||
|
from calibre.ebooks.chardet import strip_encoding_declarations
|
||||||
|
return strip_encoding_declarations(scraper.fetch_url(url))
|
||||||
|
|
||||||
|
|
||||||
def find_tests():
|
def find_tests():
|
||||||
import re
|
import re
|
||||||
import unittest
|
import unittest
|
||||||
|
Loading…
x
Reference in New Issue
Block a user