mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-08-11 09:13:57 -04:00
Use a monotonic clock
This commit is contained in:
parent
bec6c7c0cd
commit
fe1886393b
@ -236,7 +236,7 @@ class RecursiveFetcher(object):
|
|||||||
def fetch_url(self, url):
|
def fetch_url(self, url):
|
||||||
data = None
|
data = None
|
||||||
self.log.debug('Fetching', url)
|
self.log.debug('Fetching', url)
|
||||||
st = time.time()
|
st = time.monotonic()
|
||||||
|
|
||||||
# Check for a URL pointing to the local filesystem and special case it
|
# Check for a URL pointing to the local filesystem and special case it
|
||||||
# for efficiency and robustness. Bypasses delay checking as it does not
|
# for efficiency and robustness. Bypasses delay checking as it does not
|
||||||
@ -255,10 +255,10 @@ class RecursiveFetcher(object):
|
|||||||
data = response(f.read())
|
data = response(f.read())
|
||||||
data.newurl = 'file:'+url # This is what mechanize does for
|
data.newurl = 'file:'+url # This is what mechanize does for
|
||||||
# local URLs
|
# local URLs
|
||||||
self.log.debug('Fetched %s in %.1f seconds' % (url, time.time() - st))
|
self.log.debug('Fetched %s in %.1f seconds' % (url, time.monotonic() - st))
|
||||||
return data
|
return data
|
||||||
|
|
||||||
delta = time.time() - self.last_fetch_at
|
delta = time.monotonic() - self.last_fetch_at
|
||||||
if delta < self.delay:
|
if delta < self.delay:
|
||||||
time.sleep(self.delay - delta)
|
time.sleep(self.delay - delta)
|
||||||
# mechanize does not handle quoting automatically
|
# mechanize does not handle quoting automatically
|
||||||
@ -292,8 +292,8 @@ class RecursiveFetcher(object):
|
|||||||
else:
|
else:
|
||||||
raise err
|
raise err
|
||||||
finally:
|
finally:
|
||||||
self.last_fetch_at = time.time()
|
self.last_fetch_at = time.monotonic()
|
||||||
self.log.debug('Fetched %s in %f seconds' % (url, time.time() - st))
|
self.log.debug('Fetched %s in %f seconds' % (url, time.monotonic() - st))
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def start_fetch(self, url):
|
def start_fetch(self, url):
|
||||||
@ -341,9 +341,12 @@ class RecursiveFetcher(object):
|
|||||||
iurl = tag['href']
|
iurl = tag['href']
|
||||||
if not urlsplit(iurl).scheme:
|
if not urlsplit(iurl).scheme:
|
||||||
iurl = urljoin(baseurl, iurl, False)
|
iurl = urljoin(baseurl, iurl, False)
|
||||||
|
found_cached = False
|
||||||
with self.stylemap_lock:
|
with self.stylemap_lock:
|
||||||
if iurl in self.stylemap:
|
if iurl in self.stylemap:
|
||||||
tag['href'] = self.stylemap[iurl]
|
tag['href'] = self.stylemap[iurl]
|
||||||
|
found_cached = True
|
||||||
|
if found_cached:
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
data = self.fetch_url(iurl)
|
data = self.fetch_url(iurl)
|
||||||
@ -364,9 +367,12 @@ class RecursiveFetcher(object):
|
|||||||
iurl = m.group(1)
|
iurl = m.group(1)
|
||||||
if not urlsplit(iurl).scheme:
|
if not urlsplit(iurl).scheme:
|
||||||
iurl = urljoin(baseurl, iurl, False)
|
iurl = urljoin(baseurl, iurl, False)
|
||||||
|
found_cached = False
|
||||||
with self.stylemap_lock:
|
with self.stylemap_lock:
|
||||||
if iurl in self.stylemap:
|
if iurl in self.stylemap:
|
||||||
ns.replaceWith(src.replace(m.group(1), self.stylemap[iurl]))
|
ns.replaceWith(src.replace(m.group(1), self.stylemap[iurl]))
|
||||||
|
found_cached = True
|
||||||
|
if found_cached:
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
data = self.fetch_url(iurl)
|
data = self.fetch_url(iurl)
|
||||||
@ -402,9 +408,12 @@ class RecursiveFetcher(object):
|
|||||||
iurl = self.image_url_processor(baseurl, iurl)
|
iurl = self.image_url_processor(baseurl, iurl)
|
||||||
if not urlsplit(iurl).scheme:
|
if not urlsplit(iurl).scheme:
|
||||||
iurl = urljoin(baseurl, iurl, False)
|
iurl = urljoin(baseurl, iurl, False)
|
||||||
|
found_in_cache = False
|
||||||
with self.imagemap_lock:
|
with self.imagemap_lock:
|
||||||
if iurl in self.imagemap:
|
if iurl in self.imagemap:
|
||||||
tag['src'] = self.imagemap[iurl]
|
tag['src'] = self.imagemap[iurl]
|
||||||
|
found_in_cache = True
|
||||||
|
if found_in_cache:
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
data = self.fetch_url(iurl)
|
data = self.fetch_url(iurl)
|
||||||
@ -529,9 +538,9 @@ class RecursiveFetcher(object):
|
|||||||
else:
|
else:
|
||||||
dsrc = xml_to_unicode(dsrc, self.verbose)[0]
|
dsrc = xml_to_unicode(dsrc, self.verbose)[0]
|
||||||
|
|
||||||
st = time.time()
|
st = time.monotonic()
|
||||||
soup = self.get_soup(dsrc, url=iurl)
|
soup = self.get_soup(dsrc, url=iurl)
|
||||||
self.log.debug('Parsed %s in %.1f seconds' % (iurl, time.time() - st))
|
self.log.debug('Parsed %s in %.1f seconds' % (iurl, time.monotonic() - st))
|
||||||
|
|
||||||
base = soup.find('base', href=True)
|
base = soup.find('base', href=True)
|
||||||
if base is not None:
|
if base is not None:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user