Experimenting with more thread limitations

This commit is contained in:
krateng 2022-03-29 18:46:22 +02:00
parent e611d05c34
commit 3275e4ec5d
3 changed files with 47 additions and 45 deletions

View File

@ -13,7 +13,7 @@ import base64
import requests import requests
import datauri import datauri
import io import io
from threading import Thread, Timer from threading import Thread, Timer, BoundedSemaphore
import re import re
import datetime import datetime
@ -118,11 +118,12 @@ def get_artist_image(artist=None,artist_id=None):
resolve_semaphore = BoundedSemaphore(8)
def resolve_track_image(track_id): def resolve_track_image(track_id):
with resolve_semaphore:
# check cache # check cache
result = get_image_from_cache(track_id,'tracks') result = get_image_from_cache(track_id,'tracks')
if result is not None: if result is not None:
@ -150,6 +151,7 @@ def resolve_track_image(track_id):
def resolve_artist_image(artist_id): def resolve_artist_image(artist_id):
with resolve_semaphore:
# check cache # check cache
result = get_image_from_cache(artist_id,'artists') result = get_image_from_cache(artist_id,'artists')
if result is not None: if result is not None:

View File

@ -35,7 +35,7 @@ from .proccontrol.profiler import profile
PORT = malojaconfig["PORT"] PORT = malojaconfig["PORT"]
HOST = malojaconfig["HOST"] HOST = malojaconfig["HOST"]
THREADS = 24 THREADS = 8
BaseRequest.MEMFILE_MAX = 15 * 1024 * 1024 BaseRequest.MEMFILE_MAX = 15 * 1024 * 1024
#STATICFOLDER = importlib.resources.path(__name__,"web/static") #STATICFOLDER = importlib.resources.path(__name__,"web/static")

View File

@ -11,7 +11,7 @@ import json
import urllib.parse, urllib.request import urllib.parse, urllib.request
import base64 import base64
from doreah.logging import log from doreah.logging import log
from threading import Semaphore from threading import BoundedSemaphore
from ..globalconf import malojaconfig from ..globalconf import malojaconfig
from .. import database from .. import database
@ -26,7 +26,7 @@ services = {
# have a limited number of worker threads so we don't completely hog the cpu with # have a limited number of worker threads so we don't completely hog the cpu with
# these requests. they are mostly network bound, so python will happily open up 200 new # these requests. they are mostly network bound, so python will happily open up 200 new
# requests and then when all the responses come in we suddenly can't load pages anymore # requests and then when all the responses come in we suddenly can't load pages anymore
thirdpartylock = Semaphore(4) thirdpartylock = BoundedSemaphore(4)
def import_scrobbles(identifier): def import_scrobbles(identifier):