News download: More multithreading fixes

This commit is contained in:
Kovid Goyal 2010-01-02 10:22:54 -07:00
parent dcda88a054
commit 4378c69bcd
2 changed files with 5 additions and 5 deletions

View File

@ -403,9 +403,9 @@ class BasicNewsRecipe(Recipe):
`url_or_raw`: Either a URL or the downloaded index page as a string
'''
if re.match(r'\w+://', url_or_raw):
f = self.browser.open(url_or_raw)
_raw = f.read()
f.close()
open_func = getattr(self.browser, 'open_novisit', self.browser.open)
with closing(open_func(url_or_raw)) as f:
_raw = f.read()
if not _raw:
raise RuntimeError('Could not fetch index from %s'%url_or_raw)
else:

View File

@ -198,8 +198,8 @@ class RecursiveFetcher(object):
for i in range(2, 6):
purl[i] = quote(purl[i])
url = urlparse.urlunparse(purl)
open_func = getattr(self.browser, 'open_novisit', self.browser.open)
try:
open_func = getattr(self.browser, 'open_novisit', self.browser.open)
with closing(open_func(url, timeout=self.timeout)) as f:
data = response(f.read()+f.read())
data.newurl = f.geturl()
@ -210,7 +210,7 @@ class RecursiveFetcher(object):
getattr(getattr(err, 'args', [None])[0], 'errno', None) == -2: # Connection reset by peer or Name or service not know
self.log.debug('Temporary error, retrying in 1 second')
time.sleep(1)
with closing(self.browser.open(url, timeout=self.timeout)) as f:
with closing(open_func(url, timeout=self.timeout)) as f:
data = response(f.read()+f.read())
data.newurl = f.geturl()
else: