Merge branch 'krateng:master' into as2.0-xml

This commit is contained in:
duck 2024-01-03 21:50:56 -05:00 committed by GitHub
commit 5ec8035cb5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 197 additions and 110 deletions

1
.github/FUNDING.yml vendored
View File

@ -1 +1,2 @@
custom: ["https://paypal.me/krateng"] custom: ["https://paypal.me/krateng"]
patreon: krateng

View File

@ -1,4 +1,4 @@
FROM lsiobase/alpine:3.17 as base FROM lsiobase/alpine:3.19 as base
WORKDIR /usr/src/app WORKDIR /usr/src/app
@ -32,13 +32,15 @@ RUN \
tzdata && \ tzdata && \
echo "" && \ echo "" && \
echo "**** install pip dependencies ****" && \ echo "**** install pip dependencies ****" && \
python3 -m venv /venv && \
. /venv/bin/activate && \
python3 -m ensurepip && \ python3 -m ensurepip && \
pip3 install -U --no-cache-dir \ pip install -U --no-cache-dir \
pip \ pip \
wheel && \ wheel && \
echo "" && \ echo "" && \
echo "**** install maloja requirements ****" && \ echo "**** install maloja requirements ****" && \
pip3 install --no-cache-dir -r requirements.txt && \ pip install --no-cache-dir -r requirements.txt && \
echo "" && \ echo "" && \
echo "**** cleanup ****" && \ echo "**** cleanup ****" && \
apk del --purge \ apk del --purge \
@ -56,6 +58,8 @@ RUN \
echo "**** install maloja ****" && \ echo "**** install maloja ****" && \
apk add --no-cache --virtual=install-deps \ apk add --no-cache --virtual=install-deps \
py3-pip && \ py3-pip && \
python3 -m venv /venv && \
. /venv/bin/activate && \
pip3 install /usr/src/app && \ pip3 install /usr/src/app && \
apk del --purge \ apk del --purge \
install-deps && \ install-deps && \

View File

@ -4,4 +4,4 @@
echo -e "\nMaloja is starting!" echo -e "\nMaloja is starting!"
exec \ exec \
s6-setuidgid abc python -m maloja run s6-setuidgid abc /venv/bin/python -m maloja run

View File

@ -32,8 +32,15 @@ minor_release_name: "Nicole"
- "[Bugfix] Fixed Spotify authentication thread blocking the process from terminating" - "[Bugfix] Fixed Spotify authentication thread blocking the process from terminating"
- "[Technical] Upgraded all third party modules to use requests module and send User Agent" - "[Technical] Upgraded all third party modules to use requests module and send User Agent"
3.2.2: 3.2.2:
commit: "febaff97228b37a192f2630aa331cac5e5c3e98e"
notes: notes:
- "[Security] Fixed XSS vulnerability in error page (Disclosed by https://github.com/NULLYUKI)" - "[Security] Fixed XSS vulnerability in error page (Disclosed by https://github.com/NULLYUKI)"
- "[Architecture] Reworked the default directory selection" - "[Architecture] Reworked the default directory selection"
- "[Feature] Added option to show scrobbles on tile charts" - "[Feature] Added option to show scrobbles on tile charts"
- "[Bugfix] Fixed Last.fm authentication" - "[Bugfix] Fixed Last.fm authentication"
3.2.3:
notes:
- "[Architecture] Upgraded doreah, significant rework of authentication"
- "[Bugfix] Fixed initial permission check"
- "[Bugfix] Fixed and updated various texts"
- "[Bugfix] Fixed moving tracks to different album"

View File

@ -7,7 +7,6 @@ from bottle import response, static_file, FormsDict
from inspect import signature from inspect import signature
from doreah.logging import log from doreah.logging import log
from doreah.auth import authenticated_function
# nimrodel API # nimrodel API
from nimrodel import EAPI as API from nimrodel import EAPI as API
@ -15,7 +14,7 @@ from nimrodel import Multi
from .. import database from .. import database
from ..pkg_global.conf import malojaconfig, data_dir from ..pkg_global.conf import malojaconfig, data_dir, auth
@ -82,6 +81,14 @@ errors = {
'desc':"This entity does not exist in the database." 'desc':"This entity does not exist in the database."
} }
}), }),
database.exceptions.DuplicateTimestamp: lambda e: (409,{
"status":"error",
"error":{
'type':'duplicate_timestamp',
'value':e.rejected_scrobble,
'desc':"A scrobble is already registered with this timestamp."
}
}),
images.MalformedB64: lambda e: (400,{ images.MalformedB64: lambda e: (400,{
"status":"failure", "status":"failure",
"error":{ "error":{
@ -474,7 +481,7 @@ def get_top_artists_external(k_filter, k_limit, k_delimit, k_amount):
:rtype: Dictionary""" :rtype: Dictionary"""
ckeys = {**k_limit, **k_delimit} ckeys = {**k_limit, **k_delimit}
results = database.get_top_artists(**ckeys) results = database.get_top_artists(**ckeys,compatibility=True)
return { return {
"status":"ok", "status":"ok",
@ -493,7 +500,7 @@ def get_top_tracks_external(k_filter, k_limit, k_delimit, k_amount):
:rtype: Dictionary""" :rtype: Dictionary"""
ckeys = {**k_limit, **k_delimit} ckeys = {**k_limit, **k_delimit}
results = database.get_top_tracks(**ckeys) results = database.get_top_tracks(**ckeys,compatibility=True)
# IMPLEMENT THIS FOR TOP TRACKS OF ARTIST/ALBUM AS WELL? # IMPLEMENT THIS FOR TOP TRACKS OF ARTIST/ALBUM AS WELL?
return { return {
@ -513,7 +520,7 @@ def get_top_albums_external(k_filter, k_limit, k_delimit, k_amount):
:rtype: Dictionary""" :rtype: Dictionary"""
ckeys = {**k_limit, **k_delimit} ckeys = {**k_limit, **k_delimit}
results = database.get_top_albums(**ckeys) results = database.get_top_albums(**ckeys,compatibility=True)
# IMPLEMENT THIS FOR TOP ALBUMS OF ARTIST AS WELL? # IMPLEMENT THIS FOR TOP ALBUMS OF ARTIST AS WELL?
return { return {
@ -567,7 +574,7 @@ def album_info_external(k_filter, k_limit, k_delimit, k_amount):
@api.post("newscrobble") @api.post("newscrobble")
@authenticated_function(alternate=api_key_correct,api=True,pass_auth_result_as='auth_result') @auth.authenticated_function(alternate=api_key_correct,api=True,pass_auth_result_as='auth_result')
@catch_exceptions @catch_exceptions
def post_scrobble( def post_scrobble(
artist:Multi=None, artist:Multi=None,
@ -647,7 +654,7 @@ def post_scrobble(
@api.post("addpicture") @api.post("addpicture")
@authenticated_function(alternate=api_key_correct,api=True) @auth.authenticated_function(alternate=api_key_correct,api=True)
@catch_exceptions @catch_exceptions
@convert_kwargs @convert_kwargs
def add_picture(k_filter, k_limit, k_delimit, k_amount, k_special): def add_picture(k_filter, k_limit, k_delimit, k_amount, k_special):
@ -670,7 +677,7 @@ def add_picture(k_filter, k_limit, k_delimit, k_amount, k_special):
@api.post("importrules") @api.post("importrules")
@authenticated_function(api=True) @auth.authenticated_function(api=True)
@catch_exceptions @catch_exceptions
def import_rulemodule(**keys): def import_rulemodule(**keys):
"""Internal Use Only""" """Internal Use Only"""
@ -689,7 +696,7 @@ def import_rulemodule(**keys):
@api.post("rebuild") @api.post("rebuild")
@authenticated_function(api=True) @auth.authenticated_function(api=True)
@catch_exceptions @catch_exceptions
def rebuild(**keys): def rebuild(**keys):
"""Internal Use Only""" """Internal Use Only"""
@ -765,7 +772,7 @@ def search(**keys):
@api.post("newrule") @api.post("newrule")
@authenticated_function(api=True) @auth.authenticated_function(api=True)
@catch_exceptions @catch_exceptions
def newrule(**keys): def newrule(**keys):
"""Internal Use Only""" """Internal Use Only"""
@ -776,21 +783,21 @@ def newrule(**keys):
@api.post("settings") @api.post("settings")
@authenticated_function(api=True) @auth.authenticated_function(api=True)
@catch_exceptions @catch_exceptions
def set_settings(**keys): def set_settings(**keys):
"""Internal Use Only""" """Internal Use Only"""
malojaconfig.update(keys) malojaconfig.update(keys)
@api.post("apikeys") @api.post("apikeys")
@authenticated_function(api=True) @auth.authenticated_function(api=True)
@catch_exceptions @catch_exceptions
def set_apikeys(**keys): def set_apikeys(**keys):
"""Internal Use Only""" """Internal Use Only"""
apikeystore.update(keys) apikeystore.update(keys)
@api.post("import") @api.post("import")
@authenticated_function(api=True) @auth.authenticated_function(api=True)
@catch_exceptions @catch_exceptions
def import_scrobbles(identifier): def import_scrobbles(identifier):
"""Internal Use Only""" """Internal Use Only"""
@ -798,7 +805,7 @@ def import_scrobbles(identifier):
import_scrobbles(identifier) import_scrobbles(identifier)
@api.get("backup") @api.get("backup")
@authenticated_function(api=True) @auth.authenticated_function(api=True)
@catch_exceptions @catch_exceptions
def get_backup(**keys): def get_backup(**keys):
"""Internal Use Only""" """Internal Use Only"""
@ -811,7 +818,7 @@ def get_backup(**keys):
return static_file(os.path.basename(archivefile),root=tmpfolder) return static_file(os.path.basename(archivefile),root=tmpfolder)
@api.get("export") @api.get("export")
@authenticated_function(api=True) @auth.authenticated_function(api=True)
@catch_exceptions @catch_exceptions
def get_export(**keys): def get_export(**keys):
"""Internal Use Only""" """Internal Use Only"""
@ -825,7 +832,7 @@ def get_export(**keys):
@api.post("delete_scrobble") @api.post("delete_scrobble")
@authenticated_function(api=True) @auth.authenticated_function(api=True)
@catch_exceptions @catch_exceptions
def delete_scrobble(timestamp): def delete_scrobble(timestamp):
"""Internal Use Only""" """Internal Use Only"""
@ -837,7 +844,7 @@ def delete_scrobble(timestamp):
@api.post("edit_artist") @api.post("edit_artist")
@authenticated_function(api=True) @auth.authenticated_function(api=True)
@catch_exceptions @catch_exceptions
def edit_artist(id,name): def edit_artist(id,name):
"""Internal Use Only""" """Internal Use Only"""
@ -847,7 +854,7 @@ def edit_artist(id,name):
} }
@api.post("edit_track") @api.post("edit_track")
@authenticated_function(api=True) @auth.authenticated_function(api=True)
@catch_exceptions @catch_exceptions
def edit_track(id,title): def edit_track(id,title):
"""Internal Use Only""" """Internal Use Only"""
@ -857,7 +864,7 @@ def edit_track(id,title):
} }
@api.post("edit_album") @api.post("edit_album")
@authenticated_function(api=True) @auth.authenticated_function(api=True)
@catch_exceptions @catch_exceptions
def edit_album(id,albumtitle): def edit_album(id,albumtitle):
"""Internal Use Only""" """Internal Use Only"""
@ -868,7 +875,7 @@ def edit_album(id,albumtitle):
@api.post("merge_tracks") @api.post("merge_tracks")
@authenticated_function(api=True) @auth.authenticated_function(api=True)
@catch_exceptions @catch_exceptions
def merge_tracks(target_id,source_ids): def merge_tracks(target_id,source_ids):
"""Internal Use Only""" """Internal Use Only"""
@ -879,7 +886,7 @@ def merge_tracks(target_id,source_ids):
} }
@api.post("merge_artists") @api.post("merge_artists")
@authenticated_function(api=True) @auth.authenticated_function(api=True)
@catch_exceptions @catch_exceptions
def merge_artists(target_id,source_ids): def merge_artists(target_id,source_ids):
"""Internal Use Only""" """Internal Use Only"""
@ -890,7 +897,7 @@ def merge_artists(target_id,source_ids):
} }
@api.post("merge_albums") @api.post("merge_albums")
@authenticated_function(api=True) @auth.authenticated_function(api=True)
@catch_exceptions @catch_exceptions
def merge_artists(target_id,source_ids): def merge_artists(target_id,source_ids):
"""Internal Use Only""" """Internal Use Only"""
@ -901,7 +908,7 @@ def merge_artists(target_id,source_ids):
} }
@api.post("associate_albums_to_artist") @api.post("associate_albums_to_artist")
@authenticated_function(api=True) @auth.authenticated_function(api=True)
@catch_exceptions @catch_exceptions
def associate_albums_to_artist(target_id,source_ids,remove=False): def associate_albums_to_artist(target_id,source_ids,remove=False):
result = database.associate_albums_to_artist(target_id,source_ids,remove=remove) result = database.associate_albums_to_artist(target_id,source_ids,remove=remove)
@ -913,7 +920,7 @@ def associate_albums_to_artist(target_id,source_ids,remove=False):
} }
@api.post("associate_tracks_to_artist") @api.post("associate_tracks_to_artist")
@authenticated_function(api=True) @auth.authenticated_function(api=True)
@catch_exceptions @catch_exceptions
def associate_tracks_to_artist(target_id,source_ids,remove=False): def associate_tracks_to_artist(target_id,source_ids,remove=False):
result = database.associate_tracks_to_artist(target_id,source_ids,remove=remove) result = database.associate_tracks_to_artist(target_id,source_ids,remove=remove)
@ -925,7 +932,7 @@ def associate_tracks_to_artist(target_id,source_ids,remove=False):
} }
@api.post("associate_tracks_to_album") @api.post("associate_tracks_to_album")
@authenticated_function(api=True) @auth.authenticated_function(api=True)
@catch_exceptions @catch_exceptions
def associate_tracks_to_album(target_id,source_ids): def associate_tracks_to_album(target_id,source_ids):
result = database.associate_tracks_to_album(target_id,source_ids) result = database.associate_tracks_to_album(target_id,source_ids)
@ -937,7 +944,7 @@ def associate_tracks_to_album(target_id,source_ids):
@api.post("reparse_scrobble") @api.post("reparse_scrobble")
@authenticated_function(api=True) @auth.authenticated_function(api=True)
@catch_exceptions @catch_exceptions
def reparse_scrobble(timestamp): def reparse_scrobble(timestamp):
"""Internal Use Only""" """Internal Use Only"""

View File

@ -27,7 +27,6 @@ from . import exceptions
# doreah toolkit # doreah toolkit
from doreah.logging import log from doreah.logging import log
from doreah.auth import authenticated_api, authenticated_api_with_alternate
import doreah import doreah
@ -42,6 +41,7 @@ from collections import namedtuple
from threading import Lock from threading import Lock
import yaml, json import yaml, json
import math import math
from itertools import takewhile
# url handling # url handling
import urllib import urllib
@ -318,7 +318,7 @@ def associate_tracks_to_album(target_id,source_ids):
if target_id: if target_id:
target = sqldb.get_album(target_id) target = sqldb.get_album(target_id)
log(f"Adding {sources} into {target}") log(f"Adding {sources} into {target}")
sqldb.add_tracks_to_albums({src:target_id for src in source_ids}) sqldb.add_tracks_to_albums({src:target_id for src in source_ids},replace=True)
else: else:
sqldb.remove_album(source_ids) sqldb.remove_album(source_ids)
result = {'sources':sources,'target':target} result = {'sources':sources,'target':target}
@ -570,7 +570,7 @@ def get_performance(dbconn=None,**keys):
return results return results
@waitfordb @waitfordb
def get_top_artists(dbconn=None,**keys): def get_top_artists(dbconn=None,compatibility=True,**keys):
separate = keys.get('separate') separate = keys.get('separate')
@ -578,42 +578,73 @@ def get_top_artists(dbconn=None,**keys):
results = [] results = []
for rng in rngs: for rng in rngs:
try: result = {'range':rng}
res = get_charts_artists(timerange=rng,separate=separate,dbconn=dbconn)[0] res = get_charts_artists(timerange=rng,separate=separate,dbconn=dbconn)
results.append({"range":rng,"artist":res["artist"],"scrobbles":res["scrobbles"],"real_scrobbles":res["real_scrobbles"],"associated_artists":sqldb.get_associated_artists(res["artist"])})
except Exception: result['top'] = [
results.append({"range":rng,"artist":None,"scrobbles":0,"real_scrobbles":0}) {'artist': r['artist'], 'scrobbles': r['scrobbles'], 'real_scrobbles':r['real_scrobbles'], 'associated_artists': sqldb.get_associated_artists(r['artist'])}
for r in takewhile(lambda x:x['rank']==1,res)
]
# for third party applications
if compatibility:
if result['top']:
result.update(result['top'][0])
else:
result.update({'artist':None,'scrobbles':0,'real_scrobbles':0})
results.append(result)
return results return results
@waitfordb @waitfordb
def get_top_tracks(dbconn=None,**keys): def get_top_tracks(dbconn=None,compatibility=True,**keys):
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]}) rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
results = [] results = []
for rng in rngs: for rng in rngs:
try: result = {'range':rng}
res = get_charts_tracks(timerange=rng,dbconn=dbconn)[0] res = get_charts_tracks(timerange=rng,dbconn=dbconn)
results.append({"range":rng,"track":res["track"],"scrobbles":res["scrobbles"]})
except Exception: result['top'] = [
results.append({"range":rng,"track":None,"scrobbles":0}) {'track': r['track'], 'scrobbles': r['scrobbles']}
for r in takewhile(lambda x:x['rank']==1,res)
]
# for third party applications
if compatibility:
if result['top']:
result.update(result['top'][0])
else:
result.update({'track':None,'scrobbles':0})
results.append(result)
return results return results
@waitfordb @waitfordb
def get_top_albums(dbconn=None,**keys): def get_top_albums(dbconn=None,compatibility=True,**keys):
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]}) rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
results = [] results = []
for rng in rngs: for rng in rngs:
try:
res = get_charts_albums(timerange=rng,dbconn=dbconn)[0] result = {'range':rng}
results.append({"range":rng,"album":res["album"],"scrobbles":res["scrobbles"]}) res = get_charts_tracks(timerange=rng,dbconn=dbconn)
except Exception:
results.append({"range":rng,"album":None,"scrobbles":0}) result['top'] = [
{'album': r['album'], 'scrobbles': r['scrobbles']}
for r in takewhile(lambda x:x['rank']==1,res)
]
# for third party applications
if compatibility:
if result['top']:
result.update(result['top'][0])
else:
result.update({'album':None,'scrobbles':0})
results.append(result)
return results return results
@ -913,7 +944,7 @@ def start_db():
# inform time module about begin of scrobbling # inform time module about begin of scrobbling
try: try:
firstscrobble = sqldb.get_scrobbles()[0] firstscrobble = sqldb.get_scrobbles(limit=1)[0]
register_scrobbletime(firstscrobble['time']) register_scrobbletime(firstscrobble['time'])
except IndexError: except IndexError:
register_scrobbletime(int(datetime.datetime.now().timestamp())) register_scrobbletime(int(datetime.datetime.now().timestamp()))

View File

@ -14,6 +14,13 @@ class ArtistExists(EntityExists):
class AlbumExists(EntityExists): class AlbumExists(EntityExists):
pass pass
class DuplicateTimestamp(Exception):
def __init__(self,existing_scrobble,rejected_scrobble):
self.existing_scrobble = existing_scrobble
self.rejected_scrobble = rejected_scrobble
class DatabaseNotBuilt(HTTPError): class DatabaseNotBuilt(HTTPError):
def __init__(self): def __init__(self):
super().__init__( super().__init__(

View File

@ -328,7 +328,10 @@ def album_dict_to_db(info,dbconn=None):
@connection_provider @connection_provider
def add_scrobble(scrobbledict,update_album=False,dbconn=None): def add_scrobble(scrobbledict,update_album=False,dbconn=None):
add_scrobbles([scrobbledict],update_album=update_album,dbconn=dbconn) _, e = add_scrobbles([scrobbledict],update_album=update_album,dbconn=dbconn)
if e > 0:
raise exc.DuplicateTimestamp(existing_scrobble=None,rejected_scrobble=scrobbledict)
# TODO: actually pass existing scrobble
@connection_provider @connection_provider
def add_scrobbles(scrobbleslist,update_album=False,dbconn=None): def add_scrobbles(scrobbleslist,update_album=False,dbconn=None):
@ -406,7 +409,7 @@ def add_track_to_album(track_id,album_id,replace=False,dbconn=None):
def add_tracks_to_albums(track_to_album_id_dict,replace=False,dbconn=None): def add_tracks_to_albums(track_to_album_id_dict,replace=False,dbconn=None):
for track_id in track_to_album_id_dict: for track_id in track_to_album_id_dict:
add_track_to_album(track_id,track_to_album_id_dict[track_id],dbconn=dbconn) add_track_to_album(track_id,track_to_album_id_dict[track_id],replace=replace,dbconn=dbconn)
@connection_provider @connection_provider
def remove_album(*track_ids,dbconn=None): def remove_album(*track_ids,dbconn=None):
@ -860,19 +863,24 @@ def get_scrobbles_of_artist(artist,since=None,to=None,resolve_references=True,li
op = op.order_by(sql.desc('timestamp')) op = op.order_by(sql.desc('timestamp'))
else: else:
op = op.order_by(sql.asc('timestamp')) op = op.order_by(sql.asc('timestamp'))
if limit: if limit and associated:
# if we count associated we cant limit here because we remove stuff later!
op = op.limit(limit) op = op.limit(limit)
result = dbconn.execute(op).all() result = dbconn.execute(op).all()
# remove duplicates (multiple associated artists in the song, e.g. Irene & Seulgi being both counted as Red Velvet) # remove duplicates (multiple associated artists in the song, e.g. Irene & Seulgi being both counted as Red Velvet)
# distinct on doesn't seem to exist in sqlite # distinct on doesn't seem to exist in sqlite
seen = set() if associated:
filtered_result = [] seen = set()
for row in result: filtered_result = []
if row.timestamp not in seen: for row in result:
filtered_result.append(row) if row.timestamp not in seen:
seen.add(row.timestamp) filtered_result.append(row)
result = filtered_result seen.add(row.timestamp)
result = filtered_result
if limit:
result = result[:limit]
if resolve_references: if resolve_references:
@ -1072,7 +1080,7 @@ def count_scrobbles_by_artist(since,to,associated=True,resolve_ids=True,dbconn=N
DB['scrobbles'].c.timestamp.between(since,to) DB['scrobbles'].c.timestamp.between(since,to)
).group_by( ).group_by(
artistselect artistselect
).order_by(sql.desc('count')) ).order_by(sql.desc('count'),sql.desc('really_by_this_artist'))
result = dbconn.execute(op).all() result = dbconn.execute(op).all()
if resolve_ids: if resolve_ids:

View File

@ -1,9 +1,9 @@
import os import os
import cProfile, pstats import cProfile, pstats
import time
from doreah.logging import log from doreah.logging import log
from doreah.timing import Clock
from ..pkg_global.conf import data_dir from ..pkg_global.conf import data_dir
@ -27,8 +27,7 @@ def profile(func):
def newfunc(*args,**kwargs): def newfunc(*args,**kwargs):
clock = Clock() starttime = time.time()
clock.start()
if FULL_PROFILE: if FULL_PROFILE:
benchmarkfolder = data_dir['logs']("benchmarks") benchmarkfolder = data_dir['logs']("benchmarks")
@ -44,7 +43,7 @@ def profile(func):
if FULL_PROFILE: if FULL_PROFILE:
localprofiler.disable() localprofiler.disable()
seconds = clock.stop() seconds = time.time() - starttime
if not SINGLE_CALLS: if not SINGLE_CALLS:
times.setdefault(realfunc,[]).append(seconds) times.setdefault(realfunc,[]).append(seconds)

View File

@ -284,6 +284,12 @@ def image_request(artist_id=None,track_id=None,album_id=None):
if result is not None: if result is not None:
# we got an entry, even if it's that there is no image (value None) # we got an entry, even if it's that there is no image (value None)
if result['value'] is None: if result['value'] is None:
# fallback to album regardless of setting (because we have no image)
if track_id:
track = database.sqldb.get_track(track_id)
if track.get("album"):
album_id = database.sqldb.get_album_id(track["album"])
return image_request(album_id=album_id)
# use placeholder # use placeholder
if malojaconfig["FANCY_PLACEHOLDER_ART"]: if malojaconfig["FANCY_PLACEHOLDER_ART"]:
placeholder_url = "https://generative-placeholders.glitch.me/image?width=300&height=300&style=" placeholder_url = "https://generative-placeholders.glitch.me/image?width=300&height=300&style="

View File

@ -1,4 +1,7 @@
import os import os
import doreah.auth
import doreah.logging
from doreah.configuration import Configuration from doreah.configuration import Configuration
from doreah.configuration import types as tp from doreah.configuration import types as tp
@ -177,7 +180,7 @@ malojaconfig = Configuration(
"name":(tp.String(), "Name", "Generic Maloja User") "name":(tp.String(), "Name", "Generic Maloja User")
}, },
"Third Party Services":{ "Third Party Services":{
"metadata_providers":(tp.List(tp.String()), "Metadata Providers", ['lastfm','spotify','deezer','audiodb','musicbrainz'], "Which metadata providers should be used in what order. Musicbrainz is rate-limited and should not be used first."), "metadata_providers":(tp.List(tp.String()), "Metadata Providers", ['lastfm','spotify','deezer','audiodb','musicbrainz'], "List of which metadata providers should be used in what order. Musicbrainz is rate-limited and should not be used first."),
"scrobble_lastfm":(tp.Boolean(), "Proxy-Scrobble to Last.fm", False), "scrobble_lastfm":(tp.Boolean(), "Proxy-Scrobble to Last.fm", False),
"lastfm_api_key":(tp.String(), "Last.fm API Key", None), "lastfm_api_key":(tp.String(), "Last.fm API Key", None),
"lastfm_api_secret":(tp.String(), "Last.fm API Secret", None), "lastfm_api_secret":(tp.String(), "Last.fm API Secret", None),
@ -330,26 +333,15 @@ data_dir = {
### DOREAH CONFIGURATION ### DOREAH OBJECTS
from doreah import config auth = doreah.auth.AuthManager(singleuser=True,cookieprefix='maloja',stylesheets=("/maloja.css",),dbfile=data_dir['auth']("auth.sqlite"))
config(
auth={
"multiuser":False,
"cookieprefix":"maloja",
"stylesheets":["/maloja.css"],
"dbfile":data_dir['auth']("auth.ddb")
},
logging={
"logfolder": data_dir['logs']() if malojaconfig["LOGGING"] else None
},
regular={
"offset": malojaconfig["TIMEZONE"]
}
)
#logger = doreah.logging.Logger(logfolder=data_dir['logs']() if malojaconfig["LOGGING"] else None)
#log = logger.log
# this is not how its supposed to be done, but lets ease the transition
doreah.logging.defaultlogger.logfolder = data_dir['logs']() if malojaconfig["LOGGING"] else None

View File

@ -12,11 +12,12 @@ def export(targetfolder=None):
targetfolder = os.getcwd() targetfolder = os.getcwd()
timestr = time.strftime("%Y_%m_%d_%H_%M_%S") timestr = time.strftime("%Y_%m_%d_%H_%M_%S")
timestamp = int(time.time()) # ok this is technically a separate time get from above, but those ms are not gonna matter, and im too lazy to change it all to datetime
filename = f"maloja_export_{timestr}.json" filename = f"maloja_export_{timestr}.json"
outputfile = os.path.join(targetfolder,filename) outputfile = os.path.join(targetfolder,filename)
assert not os.path.exists(outputfile) assert not os.path.exists(outputfile)
data = {'scrobbles':get_scrobbles()} data = {'maloja':{'export_time': timestamp },'scrobbles':get_scrobbles()}
with open(outputfile,'w') as outfd: with open(outputfile,'w') as outfd:
json.dump(data,outfd,indent=3) json.dump(data,outfd,indent=3)

View File

@ -32,6 +32,8 @@ def import_scrobbles(inputf):
} }
filename = os.path.basename(inputf) filename = os.path.basename(inputf)
importfunc = None
if re.match(r".*\.csv",filename): if re.match(r".*\.csv",filename):
typeid,typedesc = "lastfm","Last.fm" typeid,typedesc = "lastfm","Last.fm"
@ -62,7 +64,17 @@ def import_scrobbles(inputf):
typeid,typedesc = "rockbox","Rockbox" typeid,typedesc = "rockbox","Rockbox"
importfunc = parse_rockbox importfunc = parse_rockbox
else: elif re.match(r".*\.json",filename):
try:
with open(filename,'r') as fd:
data = json.load(fd)
if 'maloja' in data:
typeid,typedesc = "maloja","Maloja"
importfunc = parse_maloja
except Exception:
pass
if not importfunc:
print("File",inputf,"could not be identified as a valid import source.") print("File",inputf,"could not be identified as a valid import source.")
return result return result

View File

@ -12,14 +12,13 @@ from jinja2.exceptions import TemplateNotFound
# doreah toolkit # doreah toolkit
from doreah.logging import log from doreah.logging import log
from doreah import auth
# rest of the project # rest of the project
from . import database from . import database
from .database.jinjaview import JinjaDBConnection from .database.jinjaview import JinjaDBConnection
from .images import image_request from .images import image_request
from .malojauri import uri_to_internal, remove_identical from .malojauri import uri_to_internal, remove_identical
from .pkg_global.conf import malojaconfig, data_dir from .pkg_global.conf import malojaconfig, data_dir, auth
from .pkg_global import conf from .pkg_global import conf
from .jinjaenv.context import jinja_environment from .jinjaenv.context import jinja_environment
from .apis import init_apis, apikeystore from .apis import init_apis, apikeystore
@ -97,7 +96,7 @@ aliases = {
### API ### API
auth.authapi.mount(server=webserver) conf.auth.authapi.mount(server=webserver)
init_apis(webserver) init_apis(webserver)
# redirects for backwards compatibility # redirects for backwards compatibility
@ -197,7 +196,7 @@ def jinja_page(name):
if name in aliases: redirect(aliases[name]) if name in aliases: redirect(aliases[name])
keys = remove_identical(FormsDict.decode(request.query)) keys = remove_identical(FormsDict.decode(request.query))
adminmode = request.cookies.get("adminmode") == "true" and auth.check(request) adminmode = request.cookies.get("adminmode") == "true" and auth.check_request(request)
with JinjaDBConnection() as conn: with JinjaDBConnection() as conn:
@ -222,7 +221,7 @@ def jinja_page(name):
return res return res
@webserver.route("/<name:re:admin.*>") @webserver.route("/<name:re:admin.*>")
@auth.authenticated @auth.authenticated_function()
def jinja_page_private(name): def jinja_page_private(name):
return jinja_page(name) return jinja_page(name)

View File

@ -6,9 +6,8 @@ try:
except ImportError: except ImportError:
import distutils import distutils
from doreah.io import col, ask, prompt from doreah.io import col, ask, prompt
from doreah import auth
from .pkg_global.conf import data_dir, dir_settings, malojaconfig from .pkg_global.conf import data_dir, dir_settings, malojaconfig, auth
@ -67,10 +66,10 @@ def setup():
if forcepassword is not None: if forcepassword is not None:
# user has specified to force the pw, nothing else matters # user has specified to force the pw, nothing else matters
auth.defaultuser.setpw(forcepassword) auth.change_pw(password=forcepassword)
print("Password has been set.") print("Password has been set.")
elif auth.defaultuser.checkpw("admin"): elif auth.still_has_factory_default_user():
# if the actual pw is admin, it means we've never set this up properly (eg first start after update) # this means we've never set this up properly (eg first start after update)
while True: while True:
newpw = prompt("Please set a password for web backend access. Leave this empty to generate a random password.",skip=SKIP,secret=True) newpw = prompt("Please set a password for web backend access. Leave this empty to generate a random password.",skip=SKIP,secret=True)
if newpw is None: if newpw is None:
@ -81,7 +80,7 @@ def setup():
newpw_repeat = prompt("Please type again to confirm.",skip=SKIP,secret=True) newpw_repeat = prompt("Please type again to confirm.",skip=SKIP,secret=True)
if newpw != newpw_repeat: print("Passwords do not match!") if newpw != newpw_repeat: print("Passwords do not match!")
else: break else: break
auth.defaultuser.setpw(newpw) auth.change_pw(password=newpw)
except EOFError: except EOFError:
print("No user input possible. If you are running inside a container, set the environment variable",col['yellow']("MALOJA_SKIP_SETUP=yes")) print("No user input possible. If you are running inside a container, set the environment variable",col['yellow']("MALOJA_SKIP_SETUP=yes"))

View File

@ -56,7 +56,7 @@
If you use a Chromium-based browser and listen to music on Plex, Spotify, Soundcloud, Bandcamp or YouTube Music, download the extension and simply enter the server URL as well as your API key in the relevant fields. They will turn green if the server is accessible. If you use a Chromium-based browser and listen to music on Plex, Spotify, Soundcloud, Bandcamp or YouTube Music, download the extension and simply enter the server URL as well as your API key in the relevant fields. They will turn green if the server is accessible.
<br/><br/> <br/><br/>
You can also use any standard-compliant scrobbler. For GNUFM (audioscrobbler) scrobblers, enter <span class="stats"><span name="serverurl">yourserver.tld</span>/apis/audioscrobbler</span> as your Gnukebox server and your API key as the password. For Listenbrainz scrobblers, use <span class="stats"><span name="serverurl">yourserver.tld</span>/apis/listenbrainz</span> as the API URL and your API key as token. You can also use any standard-compliant scrobbler. For GNUFM (audioscrobbler) scrobblers, enter <span class="stats"><span name="serverurl">yourserver.tld</span>/apis/audioscrobbler</span> as your Gnukebox server and your API key as the password. For Listenbrainz scrobblers, use <span class="stats"><span name="serverurl">yourserver.tld</span>/apis/listenbrainz</span> as the API URL (depending on the implementation, you might need to add a <span class="stats">/1</span> at the end) and your API key as token.
<br/><br/> <br/><br/>
If you use another browser or another music player, you could try to code your own extension. The API is super simple! Just send a POST HTTP request to If you use another browser or another music player, you could try to code your own extension. The API is super simple! Just send a POST HTTP request to

View File

@ -987,6 +987,7 @@ table.misc td {
div.tiles { div.tiles {
max-height: 600px;
display: grid; display: grid;
grid-template-columns: repeat(18, calc(100% / 18)); grid-template-columns: repeat(18, calc(100% / 18));
grid-template-rows: repeat(6, calc(100% / 6)); grid-template-rows: repeat(6, calc(100% / 6));

View File

@ -22,8 +22,8 @@ div#startpage {
@media (min-width: 1401px) and (max-width: 2200px) { @media (min-width: 1401px) and (max-width: 2200px) {
div#startpage { div#startpage {
grid-template-columns: 45vw 45vw; grid-template-columns: repeat(2, 45vw);
grid-template-rows: 45vh 45vh 45vh; grid-template-rows: repeat(3, 45vh);
grid-template-areas: grid-template-areas:
"charts_artists lastscrobbles" "charts_artists lastscrobbles"

View File

@ -3,7 +3,7 @@ name = "malojaserver"
version = "3.2.2" version = "3.2.2"
description = "Self-hosted music scrobble database" description = "Self-hosted music scrobble database"
readme = "./README.md" readme = "./README.md"
requires-python = ">=3.10" requires-python = ">=3.11"
license = { file="./LICENSE" } license = { file="./LICENSE" }
authors = [ { name="Johannes Krattenmacher", email="maloja@dev.krateng.ch" } ] authors = [ { name="Johannes Krattenmacher", email="maloja@dev.krateng.ch" } ]
@ -21,7 +21,7 @@ classifiers = [
dependencies = [ dependencies = [
"bottle>=0.12.16", "bottle>=0.12.16",
"waitress>=2.1.0", "waitress>=2.1.0",
"doreah>=1.9.4, <2", "doreah>=2.0.1, <3",
"nimrodel>=0.8.0", "nimrodel>=0.8.0",
"setproctitle>=1.1.10", "setproctitle>=1.1.10",
#"pyvips>=2.1.16", #"pyvips>=2.1.16",
@ -31,7 +31,9 @@ dependencies = [
"sqlalchemy>=2.0", "sqlalchemy>=2.0",
"python-datauri>=1.1.0", "python-datauri>=1.1.0",
"requests>=2.27.1", "requests>=2.27.1",
"setuptools>68.0.0" "setuptools>68.0.0",
"toml>=0.10.2",
"PyYAML>=6.0.1"
] ]
[project.optional-dependencies] [project.optional-dependencies]

View File

@ -1,6 +1,6 @@
bottle>=0.12.16 bottle>=0.12.16
waitress>=2.1.0 waitress>=2.1.0
doreah>=1.9.4, <2 doreah>=2.0.1, <3
nimrodel>=0.8.0 nimrodel>=0.8.0
setproctitle>=1.1.10 setproctitle>=1.1.10
jinja2>=3.0.0 jinja2>=3.0.0
@ -10,3 +10,5 @@ sqlalchemy>=2.0
python-datauri>=1.1.0 python-datauri>=1.1.0
requests>=2.27.1 requests>=2.27.1
setuptools>68.0.0 setuptools>68.0.0
toml>=0.10.2
PyYAML>=6.0.1

View File

@ -32,14 +32,17 @@ Settings File | Environment Variable | Type | Description
`cache_expire_negative` | `MALOJA_CACHE_EXPIRE_NEGATIVE` | Integer | Days until failed image fetches are reattempted `cache_expire_negative` | `MALOJA_CACHE_EXPIRE_NEGATIVE` | Integer | Days until failed image fetches are reattempted
`db_max_memory` | `MALOJA_DB_MAX_MEMORY` | Integer | RAM Usage in percent at which Maloja should no longer increase its database cache. `db_max_memory` | `MALOJA_DB_MAX_MEMORY` | Integer | RAM Usage in percent at which Maloja should no longer increase its database cache.
`use_request_cache` | `MALOJA_USE_REQUEST_CACHE` | Boolean | Use request-local DB Cache `use_request_cache` | `MALOJA_USE_REQUEST_CACHE` | Boolean | Use request-local DB Cache
`use_global_cache` | `MALOJA_USE_GLOBAL_CACHE` | Boolean | Use global DB Cache `use_global_cache` | `MALOJA_USE_GLOBAL_CACHE` | Boolean | This is vital for Maloja's performance. Do not disable this unless you have a strong reason to.
**Fluff** **Fluff**
`scrobbles_gold` | `MALOJA_SCROBBLES_GOLD` | Integer | How many scrobbles a track needs to be considered 'Gold' status `scrobbles_gold` | `MALOJA_SCROBBLES_GOLD` | Integer | How many scrobbles a track needs to be considered 'Gold' status
`scrobbles_platinum` | `MALOJA_SCROBBLES_PLATINUM` | Integer | How many scrobbles a track needs to be considered 'Platinum' status `scrobbles_platinum` | `MALOJA_SCROBBLES_PLATINUM` | Integer | How many scrobbles a track needs to be considered 'Platinum' status
`scrobbles_diamond` | `MALOJA_SCROBBLES_DIAMOND` | Integer | How many scrobbles a track needs to be considered 'Diamond' status `scrobbles_diamond` | `MALOJA_SCROBBLES_DIAMOND` | Integer | How many scrobbles a track needs to be considered 'Diamond' status
`scrobbles_gold_album` | `MALOJA_SCROBBLES_GOLD_ALBUM` | Integer | How many scrobbles an album needs to be considered 'Gold' status
`scrobbles_platinum_album` | `MALOJA_SCROBBLES_PLATINUM_ALBUM` | Integer | How many scrobbles an album needs to be considered 'Platinum' status
`scrobbles_diamond_album` | `MALOJA_SCROBBLES_DIAMOND_ALBUM` | Integer | How many scrobbles an album needs to be considered 'Diamond' status
`name` | `MALOJA_NAME` | String | Name `name` | `MALOJA_NAME` | String | Name
**Third Party Services** **Third Party Services**
`metadata_providers` | `MALOJA_METADATA_PROVIDERS` | List | Which metadata providers should be used in what order. Musicbrainz is rate-limited and should not be used first. `metadata_providers` | `MALOJA_METADATA_PROVIDERS` | List | List of which metadata providers should be used in what order. Musicbrainz is rate-limited and should not be used first.
`scrobble_lastfm` | `MALOJA_SCROBBLE_LASTFM` | Boolean | Proxy-Scrobble to Last.fm `scrobble_lastfm` | `MALOJA_SCROBBLE_LASTFM` | Boolean | Proxy-Scrobble to Last.fm
`lastfm_api_key` | `MALOJA_LASTFM_API_KEY` | String | Last.fm API Key `lastfm_api_key` | `MALOJA_LASTFM_API_KEY` | String | Last.fm API Key
`lastfm_api_secret` | `MALOJA_LASTFM_API_SECRET` | String | Last.fm API Secret `lastfm_api_secret` | `MALOJA_LASTFM_API_SECRET` | String | Last.fm API Secret
@ -55,6 +58,7 @@ Settings File | Environment Variable | Type | Description
`send_stats` | `MALOJA_SEND_STATS` | Boolean | Send Statistics `send_stats` | `MALOJA_SEND_STATS` | Boolean | Send Statistics
`proxy_images` | `MALOJA_PROXY_IMAGES` | Boolean | Whether third party images should be downloaded and served directly by Maloja (instead of just linking their URL) `proxy_images` | `MALOJA_PROXY_IMAGES` | Boolean | Whether third party images should be downloaded and served directly by Maloja (instead of just linking their URL)
**Database** **Database**
`album_information_trust` | `MALOJA_ALBUM_INFORMATION_TRUST` | Choice | Whether to trust the first album information that is sent with a track or update every time a different album is sent
`invalid_artists` | `MALOJA_INVALID_ARTISTS` | Set | Artists that should be discarded immediately `invalid_artists` | `MALOJA_INVALID_ARTISTS` | Set | Artists that should be discarded immediately
`remove_from_title` | `MALOJA_REMOVE_FROM_TITLE` | Set | Phrases that should be removed from song titles `remove_from_title` | `MALOJA_REMOVE_FROM_TITLE` | Set | Phrases that should be removed from song titles
`delimiters_feat` | `MALOJA_DELIMITERS_FEAT` | Set | Delimiters used for extra artists, even when in the title field `delimiters_feat` | `MALOJA_DELIMITERS_FEAT` | Set | Delimiters used for extra artists, even when in the title field
@ -62,14 +66,19 @@ Settings File | Environment Variable | Type | Description
`delimiters_formal` | `MALOJA_DELIMITERS_FORMAL` | Set | Delimiters used to tag multiple artists when only one tag field is available `delimiters_formal` | `MALOJA_DELIMITERS_FORMAL` | Set | Delimiters used to tag multiple artists when only one tag field is available
`filters_remix` | `MALOJA_FILTERS_REMIX` | Set | Filters used to recognize the remix artists in the title `filters_remix` | `MALOJA_FILTERS_REMIX` | Set | Filters used to recognize the remix artists in the title
`parse_remix_artists` | `MALOJA_PARSE_REMIX_ARTISTS` | Boolean | Parse Remix Artists `parse_remix_artists` | `MALOJA_PARSE_REMIX_ARTISTS` | Boolean | Parse Remix Artists
`week_offset` | `MALOJA_WEEK_OFFSET` | Integer | Start of the week for the purpose of weekly statistics. 0 = Sunday, 6 = Saturday
`timezone` | `MALOJA_TIMEZONE` | Integer | UTC Offset
**Web Interface** **Web Interface**
`default_range_charts_artists` | `MALOJA_DEFAULT_RANGE_CHARTS_ARTISTS` | Choice | Default Range Artist Charts `default_range_startpage` | `MALOJA_DEFAULT_RANGE_STARTPAGE` | Choice | Default Range for Startpage Stats
`default_range_charts_tracks` | `MALOJA_DEFAULT_RANGE_CHARTS_TRACKS` | Choice | Default Range Track Charts
`default_step_pulse` | `MALOJA_DEFAULT_STEP_PULSE` | Choice | Default Pulse Step `default_step_pulse` | `MALOJA_DEFAULT_STEP_PULSE` | Choice | Default Pulse Step
`charts_display_tiles` | `MALOJA_CHARTS_DISPLAY_TILES` | Boolean | Display Chart Tiles `charts_display_tiles` | `MALOJA_CHARTS_DISPLAY_TILES` | Boolean | Display Chart Tiles
`album_showcase` | `MALOJA_ALBUM_SHOWCASE` | Boolean | Display a graphical album showcase for artist overview pages instead of a chart list
`display_art_icons` | `MALOJA_DISPLAY_ART_ICONS` | Boolean | Display Album/Artist Icons `display_art_icons` | `MALOJA_DISPLAY_ART_ICONS` | Boolean | Display Album/Artist Icons
`default_album_artist` | `MALOJA_DEFAULT_ALBUM_ARTIST` | String | Default Albumartist
`use_album_artwork_for_tracks` | `MALOJA_USE_ALBUM_ARTWORK_FOR_TRACKS` | Boolean | Use Album Artwork for tracks
`fancy_placeholder_art` | `MALOJA_FANCY_PLACEHOLDER_ART` | Boolean | Use fancy placeholder artwork
`show_play_number_on_tiles` | `MALOJA_SHOW_PLAY_NUMBER_ON_TILES` | Boolean | Show amount of plays on tiles
`discourage_cpu_heavy_stats` | `MALOJA_DISCOURAGE_CPU_HEAVY_STATS` | Boolean | Prevent visitors from mindlessly clicking on CPU-heavy options. Does not actually disable them for malicious actors! `discourage_cpu_heavy_stats` | `MALOJA_DISCOURAGE_CPU_HEAVY_STATS` | Boolean | Prevent visitors from mindlessly clicking on CPU-heavy options. Does not actually disable them for malicious actors!
`use_local_images` | `MALOJA_USE_LOCAL_IMAGES` | Boolean | Use Local Images `use_local_images` | `MALOJA_USE_LOCAL_IMAGES` | Boolean | Use Local Images
`timezone` | `MALOJA_TIMEZONE` | Integer | UTC Offset
`time_format` | `MALOJA_TIME_FORMAT` | String | Time Format `time_format` | `MALOJA_TIME_FORMAT` | String | Time Format
`theme` | `MALOJA_THEME` | String | Theme `theme` | `MALOJA_THEME` | String | Theme