mirror of
https://github.com/krateng/maloja.git
synced 2025-07-09 03:04:07 -04:00
Merge branch 'api_modularization'
This commit is contained in:
commit
9d3ffae45c
@ -129,14 +129,14 @@ You can use any third-party scrobbler that supports the audioscrobbler (GNUFM) o
|
|||||||
|
|
||||||
GNU FM |
|
GNU FM |
|
||||||
------ | ---------
|
------ | ---------
|
||||||
Gnukebox URL | Your Maloja URL followed by `/api/s/audioscrobbler`
|
Gnukebox URL | Your Maloja URL followed by `/apis/audioscrobbler`
|
||||||
Username | Any name, doesn't matter
|
Username | Any name, doesn't matter (don't leave empty)
|
||||||
Password | Any of your API keys
|
Password | Any of your API keys
|
||||||
|
|
||||||
ListenBrainz |
|
ListenBrainz |
|
||||||
------ | ---------
|
------ | ---------
|
||||||
API URL | Your Maloja URL followed by `/api/s/listenbrainz`
|
API URL | Your Maloja URL followed by `/apis/listenbrainz`
|
||||||
Username | Any name, doesn't matter
|
Username | Any name, doesn't matter (don't leave empty)
|
||||||
Auth Token | Any of your API keys
|
Auth Token | Any of your API keys
|
||||||
|
|
||||||
These are tested with the Pano Scrobbler and the Simple Last.fm Scrobbler for Android. I'm thankful for any feedback whether other scrobblers work!
|
These are tested with the Pano Scrobbler and the Simple Last.fm Scrobbler for Android. I'm thankful for any feedback whether other scrobblers work!
|
||||||
|
@ -16,7 +16,7 @@ requires = [
|
|||||||
"bottle>=0.12.16",
|
"bottle>=0.12.16",
|
||||||
"waitress>=1.3",
|
"waitress>=1.3",
|
||||||
"doreah>=1.6.10",
|
"doreah>=1.6.10",
|
||||||
"nimrodel>=0.6.3",
|
"nimrodel>=0.6.4",
|
||||||
"setproctitle>=1.1.10",
|
"setproctitle>=1.1.10",
|
||||||
"wand>=0.5.4",
|
"wand>=0.5.4",
|
||||||
"lesscpy>=0.13",
|
"lesscpy>=0.13",
|
||||||
|
40
maloja/apis/__init__.py
Normal file
40
maloja/apis/__init__.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
from . import native_v1
|
||||||
|
from .audioscrobbler import Audioscrobbler
|
||||||
|
from .listenbrainz import Listenbrainz
|
||||||
|
|
||||||
|
import copy
|
||||||
|
from bottle import redirect, request
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
|
native_apis = [
|
||||||
|
native_v1.api
|
||||||
|
]
|
||||||
|
standardized_apis = [
|
||||||
|
Listenbrainz(),
|
||||||
|
Audioscrobbler()
|
||||||
|
]
|
||||||
|
|
||||||
|
def init_apis(server):
|
||||||
|
for api in native_apis:
|
||||||
|
api.mount(server=server,path="apis/"+api.__apipath__)
|
||||||
|
|
||||||
|
for api in standardized_apis:
|
||||||
|
aliases = api.__aliases__
|
||||||
|
canonical = aliases[0]
|
||||||
|
api.nimrodelapi.mount(server=server,path="apis/" + canonical)
|
||||||
|
|
||||||
|
# redirects
|
||||||
|
for alias in aliases[1:]:
|
||||||
|
altpath = "/apis/" + alias + "/<pth:path>"
|
||||||
|
altpath_empty = "/apis/" + alias
|
||||||
|
altpath_empty_cl = "/apis/" + alias + "/"
|
||||||
|
|
||||||
|
def alias_api(pth=""):
|
||||||
|
redirect("/apis/" + canonical + "/" + pth + "?" + urlencode(request.query))
|
||||||
|
|
||||||
|
server.get(altpath)(alias_api)
|
||||||
|
server.post(altpath)(alias_api)
|
||||||
|
server.get(altpath_empty)(alias_api)
|
||||||
|
server.post(altpath_empty)(alias_api)
|
||||||
|
server.get(altpath_empty_cl)(alias_api)
|
||||||
|
server.post(altpath_empty_cl)(alias_api)
|
97
maloja/apis/_base.py
Normal file
97
maloja/apis/_base.py
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
from nimrodel import EAPI as API
|
||||||
|
from nimrodel import Multi
|
||||||
|
|
||||||
|
from ._exceptions import *
|
||||||
|
|
||||||
|
from copy import deepcopy
|
||||||
|
from types import FunctionType
|
||||||
|
import sys
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
from doreah.logging import log
|
||||||
|
|
||||||
|
from bottle import response
|
||||||
|
|
||||||
|
from ..cleanup import CleanerAgent
|
||||||
|
from .. import database
|
||||||
|
|
||||||
|
__logmodulename__ = "apis"
|
||||||
|
|
||||||
|
|
||||||
|
#def singleton(cls):
|
||||||
|
# return cls()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
cla = CleanerAgent()
|
||||||
|
|
||||||
|
class APIHandler:
|
||||||
|
# make these classes singletons
|
||||||
|
_instance = None
|
||||||
|
def __new__(cls, *args, **kwargs):
|
||||||
|
if not isinstance(cls._instance, cls):
|
||||||
|
cls._instance = object.__new__(cls, *args, **kwargs)
|
||||||
|
return cls._instance
|
||||||
|
|
||||||
|
def __init_subclass__(cls):
|
||||||
|
# Copy the handle function so we can have a unique docstring
|
||||||
|
sf = cls.__base__.wrapper
|
||||||
|
cls.wrapper = FunctionType(sf.__code__,sf.__globals__,sf.__name__,sf.__defaults__,sf.__closure__)
|
||||||
|
cls.wrapper.__annotations__ = sf.__annotations__
|
||||||
|
# need to copy annotations so nimrodel properly passes path argument
|
||||||
|
|
||||||
|
# create docstring
|
||||||
|
doc = "Accepts requests according to the <a href='{link}'>{name} Standard</a>"
|
||||||
|
cls.wrapper.__doc__ = doc.format(name=cls.__apiname__,link=cls.__doclink__)
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.init()
|
||||||
|
|
||||||
|
# creates a rump api object that exposes one generic endpoint
|
||||||
|
# we don't want the api explorer to show the different structures of
|
||||||
|
# third party apis, just mention they exist
|
||||||
|
self.nimrodelapi = API(delay=True)
|
||||||
|
self.nimrodelapi.get("{path}",pass_headers=True)(self.wrapper)
|
||||||
|
self.nimrodelapi.post("{path}",pass_headers=True)(self.wrapper)
|
||||||
|
self.nimrodelapi.get("",pass_headers=True)(self.wrapper)
|
||||||
|
self.nimrodelapi.post("",pass_headers=True)(self.wrapper)
|
||||||
|
|
||||||
|
|
||||||
|
def wrapper(self,path:Multi=[],**keys):
|
||||||
|
log("API request: " + str(path))# + " | Keys: " + str({k:keys.get(k) for k in keys}))
|
||||||
|
|
||||||
|
try:
|
||||||
|
response.status,result = self.handle(path,keys)
|
||||||
|
except:
|
||||||
|
exceptiontype = sys.exc_info()[0]
|
||||||
|
if exceptiontype in self.errors:
|
||||||
|
response.status,result = self.errors[exceptiontype]
|
||||||
|
else:
|
||||||
|
response.status,result = 500,{"status":"Unknown error","code":500}
|
||||||
|
|
||||||
|
return result
|
||||||
|
#else:
|
||||||
|
# result = {"error":"Invalid scrobble protocol"}
|
||||||
|
# response.status = 500
|
||||||
|
|
||||||
|
|
||||||
|
def handle(self,path,keys):
|
||||||
|
|
||||||
|
try:
|
||||||
|
methodname = self.get_method(path,keys)
|
||||||
|
method = self.methods[methodname]
|
||||||
|
except:
|
||||||
|
raise InvalidMethodException()
|
||||||
|
return method(path,keys)
|
||||||
|
|
||||||
|
|
||||||
|
def scrobble(self,artiststr,titlestr,time=None,duration=None,album=None):
|
||||||
|
logmsg = "Incoming scrobble (API: {api}): ARTISTS: {artiststr}, TRACK: {titlestr}"
|
||||||
|
log(logmsg.format(api=self.__apiname__,artiststr=artiststr,titlestr=titlestr))
|
||||||
|
if time is None: time = int(datetime.datetime.now(tz=datetime.timezone.utc).timestamp())
|
||||||
|
try:
|
||||||
|
(artists,title) = cla.fullclean(artiststr,titlestr)
|
||||||
|
database.createScrobble(artists,title,time)
|
||||||
|
database.sync()
|
||||||
|
except:
|
||||||
|
raise ScrobblingException()
|
6
maloja/apis/_exceptions.py
Normal file
6
maloja/apis/_exceptions.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
class BadAuthException(Exception): pass
|
||||||
|
class InvalidAuthException(Exception): pass
|
||||||
|
class InvalidMethodException(Exception): pass
|
||||||
|
class InvalidSessionKey(Exception): pass
|
||||||
|
class MalformedJSONException(Exception): pass
|
||||||
|
class ScrobblingException(Exception): pass
|
92
maloja/apis/audioscrobbler.py
Normal file
92
maloja/apis/audioscrobbler.py
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
from ._base import APIHandler
|
||||||
|
from ._exceptions import *
|
||||||
|
from .. import database
|
||||||
|
|
||||||
|
class Audioscrobbler(APIHandler):
|
||||||
|
__apiname__ = "Audioscrobbler"
|
||||||
|
__doclink__ = "https://www.last.fm/api/scrobbling"
|
||||||
|
__aliases__ = [
|
||||||
|
"audioscrobbler/2.0",
|
||||||
|
"gnufm/2.0",
|
||||||
|
"gnukebox/2.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
def init(self):
|
||||||
|
|
||||||
|
# no need to save these on disk, clients can always request a new session
|
||||||
|
self.mobile_sessions = []
|
||||||
|
self.methods = {
|
||||||
|
"auth.getMobileSession":self.authmobile,
|
||||||
|
"track.scrobble":self.submit_scrobble
|
||||||
|
}
|
||||||
|
self.errors = {
|
||||||
|
BadAuthException:(400,{"error":6,"message":"Requires authentication"}),
|
||||||
|
InvalidAuthException:(401,{"error":4,"message":"Invalid credentials"}),
|
||||||
|
InvalidMethodException:(200,{"error":3,"message":"Invalid method"}),
|
||||||
|
InvalidSessionKey:(403,{"error":9,"message":"Invalid session key"}),
|
||||||
|
ScrobblingException:(500,{"error":8,"message":"Operation failed"})
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_method(self,pathnodes,keys):
|
||||||
|
return keys.get("method")
|
||||||
|
|
||||||
|
def authmobile(self,pathnodes,keys):
|
||||||
|
token = keys.get("authToken")
|
||||||
|
user = keys.get("username")
|
||||||
|
password = keys.get("password")
|
||||||
|
# either username and password
|
||||||
|
if user is not None and password is not None:
|
||||||
|
if password in database.allAPIkeys():
|
||||||
|
sessionkey = generate_key(self.mobile_sessions)
|
||||||
|
return 200,{"session":{"key":sessionkey}}
|
||||||
|
else:
|
||||||
|
raise InvalidAuthException()
|
||||||
|
# or username and token (deprecated by lastfm)
|
||||||
|
elif user is not None and token is not None:
|
||||||
|
for key in database.allAPIkeys():
|
||||||
|
if md5(user + md5(key)) == token:
|
||||||
|
sessionkey = generate_key(self.mobile_sessions)
|
||||||
|
return 200,{"session":{"key":sessionkey}}
|
||||||
|
raise InvalidAuthException()
|
||||||
|
else:
|
||||||
|
raise BadAuthException()
|
||||||
|
|
||||||
|
def submit_scrobble(self,pathnodes,keys):
|
||||||
|
if keys.get("sk") is None or keys.get("sk") not in self.mobile_sessions:
|
||||||
|
raise InvalidSessionKey()
|
||||||
|
else:
|
||||||
|
if "track" in keys and "artist" in keys:
|
||||||
|
artiststr,titlestr = keys["artist"], keys["track"]
|
||||||
|
#(artists,title) = cla.fullclean(artiststr,titlestr)
|
||||||
|
try:
|
||||||
|
timestamp = int(keys["timestamp"])
|
||||||
|
except:
|
||||||
|
timestamp = None
|
||||||
|
#database.createScrobble(artists,title,timestamp)
|
||||||
|
self.scrobble(artiststr,titlestr,time=timestamp)
|
||||||
|
return 200,{"scrobbles":{"@attr":{"ignored":0}}}
|
||||||
|
else:
|
||||||
|
for num in range(50):
|
||||||
|
if "track[" + str(num) + "]" in keys:
|
||||||
|
artiststr,titlestr = keys["artist[" + str(num) + "]"], keys["track[" + str(num) + "]"]
|
||||||
|
#(artists,title) = cla.fullclean(artiststr,titlestr)
|
||||||
|
timestamp = int(keys["timestamp[" + str(num) + "]"])
|
||||||
|
#database.createScrobble(artists,title,timestamp)
|
||||||
|
self.scrobble(artiststr,titlestr,time=timestamp)
|
||||||
|
return 200,{"scrobbles":{"@attr":{"ignored":0}}}
|
||||||
|
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import random
|
||||||
|
|
||||||
|
def md5(input):
|
||||||
|
m = hashlib.md5()
|
||||||
|
m.update(bytes(input,encoding="utf-8"))
|
||||||
|
return m.hexdigest()
|
||||||
|
|
||||||
|
def generate_key(ls):
|
||||||
|
key = ""
|
||||||
|
for i in range(64):
|
||||||
|
key += str(random.choice(list(range(10)) + list("abcdefghijklmnopqrstuvwxyz") + list("ABCDEFGHIJKLMNOPQRSTUVWXYZ")))
|
||||||
|
ls.append(key)
|
||||||
|
return key
|
66
maloja/apis/listenbrainz.py
Normal file
66
maloja/apis/listenbrainz.py
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
from ._base import APIHandler
|
||||||
|
from ._exceptions import *
|
||||||
|
from .. import database
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
|
class Listenbrainz(APIHandler):
|
||||||
|
__apiname__ = "Listenbrainz"
|
||||||
|
__doclink__ = "https://listenbrainz.readthedocs.io/en/production/"
|
||||||
|
__aliases__ = [
|
||||||
|
"listenbrainz/1",
|
||||||
|
"lbrnz/1"
|
||||||
|
]
|
||||||
|
|
||||||
|
def init(self):
|
||||||
|
self.methods = {
|
||||||
|
"submit-listens":self.submit,
|
||||||
|
"validate-token":self.validate_token
|
||||||
|
}
|
||||||
|
self.errors = {
|
||||||
|
BadAuthException:(401,{"code":401,"error":"You need to provide an Authorization header."}),
|
||||||
|
InvalidAuthException:(401,{"code":401,"error":"Incorrect Authorization"}),
|
||||||
|
InvalidMethodException:(200,{"code":200,"error":"Invalid Method"}),
|
||||||
|
MalformedJSONException:(400,{"code":400,"error":"Invalid JSON document submitted."}),
|
||||||
|
ScrobblingException:(500,{"code":500,"error":"Unspecified server error."})
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_method(self,pathnodes,keys):
|
||||||
|
return pathnodes.pop(0)
|
||||||
|
|
||||||
|
def submit(self,pathnodes,keys):
|
||||||
|
try:
|
||||||
|
token = keys.get("Authorization").replace("token ","").replace("Token ","").strip()
|
||||||
|
except:
|
||||||
|
raise BadAuthException()
|
||||||
|
|
||||||
|
if token not in database.allAPIkeys():
|
||||||
|
raise InvalidAuthException()
|
||||||
|
|
||||||
|
try:
|
||||||
|
listentype = keys["listen_type"]
|
||||||
|
payload = keys["payload"]
|
||||||
|
if listentype in ["single","import"]:
|
||||||
|
for listen in payload:
|
||||||
|
metadata = listen["track_metadata"]
|
||||||
|
artiststr, titlestr = metadata["artist_name"], metadata["track_name"]
|
||||||
|
#(artists,title) = cla.fullclean(artiststr,titlestr)
|
||||||
|
try:
|
||||||
|
timestamp = int(listen["listened_at"])
|
||||||
|
except:
|
||||||
|
timestamp = None
|
||||||
|
except:
|
||||||
|
raise MalformedJSONException()
|
||||||
|
|
||||||
|
self.scrobble(artiststr,titlestr,timestamp)
|
||||||
|
return 200,{"status":"ok"}
|
||||||
|
|
||||||
|
def validate_token(self,pathnodes,keys):
|
||||||
|
try:
|
||||||
|
token = keys.get("token").strip()
|
||||||
|
except:
|
||||||
|
raise BadAuthException()
|
||||||
|
if token not in database.allAPIkeys():
|
||||||
|
raise InvalidAuthException()
|
||||||
|
else:
|
||||||
|
return 200,{"code":200,"message":"Token valid.","valid":True,"user_name":"n/a"}
|
297
maloja/apis/native_v1.py
Normal file
297
maloja/apis/native_v1.py
Normal file
@ -0,0 +1,297 @@
|
|||||||
|
from ..database import *
|
||||||
|
from doreah import settings
|
||||||
|
from ..__pkginfo__ import version
|
||||||
|
from ..malojauri import uri_to_internal
|
||||||
|
from .. import utilities
|
||||||
|
|
||||||
|
from bottle import response
|
||||||
|
|
||||||
|
# nimrodel API
|
||||||
|
from nimrodel import EAPI as API
|
||||||
|
from nimrodel import Multi
|
||||||
|
|
||||||
|
|
||||||
|
api = API(delay=True)
|
||||||
|
api.__apipath__ = "mlj_1"
|
||||||
|
|
||||||
|
|
||||||
|
@api.get("test")
|
||||||
|
def test_server(key=None):
|
||||||
|
response.set_header("Access-Control-Allow-Origin","*")
|
||||||
|
if key is not None and not (checkAPIkey(key)):
|
||||||
|
response.status = 403
|
||||||
|
return {"error":"Wrong API key"}
|
||||||
|
|
||||||
|
else:
|
||||||
|
response.status = 200
|
||||||
|
return {"status":"ok"}
|
||||||
|
|
||||||
|
# 200 Database server is up and operational
|
||||||
|
# 403 Database server is up, but provided API key is not valid
|
||||||
|
|
||||||
|
@api.get("serverinfo")
|
||||||
|
def server_info():
|
||||||
|
|
||||||
|
|
||||||
|
response.set_header("Access-Control-Allow-Origin","*")
|
||||||
|
response.set_header("Content-Type","application/json")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"name":settings.get_settings("NAME"),
|
||||||
|
"version":version,
|
||||||
|
"versionstring":".".join(str(n) for n in version)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
## API ENDPOINTS THAT CLOSELY MATCH ONE DATABASE FUNCTION
|
||||||
|
|
||||||
|
|
||||||
|
@api.get("scrobbles")
|
||||||
|
def get_scrobbles_external(**keys):
|
||||||
|
k_filter, k_time, _, k_amount = uri_to_internal(keys)
|
||||||
|
ckeys = {**k_filter, **k_time, **k_amount}
|
||||||
|
|
||||||
|
result = get_scrobbles(**ckeys)
|
||||||
|
return {"list":result}
|
||||||
|
|
||||||
|
|
||||||
|
# info for comparison
|
||||||
|
@api.get("info")
|
||||||
|
def info_external(**keys):
|
||||||
|
|
||||||
|
response.set_header("Access-Control-Allow-Origin","*")
|
||||||
|
response.set_header("Content-Type","application/json")
|
||||||
|
|
||||||
|
result = info()
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@api.get("numscrobbles")
|
||||||
|
def get_scrobbles_num_external(**keys):
|
||||||
|
k_filter, k_time, _, k_amount = uri_to_internal(keys)
|
||||||
|
ckeys = {**k_filter, **k_time, **k_amount}
|
||||||
|
|
||||||
|
result = get_scrobbles_num(**ckeys)
|
||||||
|
return {"amount":result}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@api.get("tracks")
|
||||||
|
def get_tracks_external(**keys):
|
||||||
|
k_filter, _, _, _ = uri_to_internal(keys,forceArtist=True)
|
||||||
|
ckeys = {**k_filter}
|
||||||
|
|
||||||
|
result = get_tracks(**ckeys)
|
||||||
|
return {"list":result}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@api.get("artists")
|
||||||
|
def get_artists_external():
|
||||||
|
result = get_artists()
|
||||||
|
return {"list":result}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@api.get("charts/artists")
|
||||||
|
def get_charts_artists_external(**keys):
|
||||||
|
_, k_time, _, _ = uri_to_internal(keys)
|
||||||
|
ckeys = {**k_time}
|
||||||
|
|
||||||
|
result = get_charts_artists(**ckeys)
|
||||||
|
return {"list":result}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@api.get("charts/tracks")
|
||||||
|
def get_charts_tracks_external(**keys):
|
||||||
|
k_filter, k_time, _, _ = uri_to_internal(keys,forceArtist=True)
|
||||||
|
ckeys = {**k_filter, **k_time}
|
||||||
|
|
||||||
|
result = get_charts_tracks(**ckeys)
|
||||||
|
return {"list":result}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@api.get("pulse")
|
||||||
|
def get_pulse_external(**keys):
|
||||||
|
k_filter, k_time, k_internal, k_amount = uri_to_internal(keys)
|
||||||
|
ckeys = {**k_filter, **k_time, **k_internal, **k_amount}
|
||||||
|
|
||||||
|
results = get_pulse(**ckeys)
|
||||||
|
return {"list":results}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@api.get("performance")
|
||||||
|
def get_performance_external(**keys):
|
||||||
|
k_filter, k_time, k_internal, k_amount = uri_to_internal(keys)
|
||||||
|
ckeys = {**k_filter, **k_time, **k_internal, **k_amount}
|
||||||
|
|
||||||
|
results = get_performance(**ckeys)
|
||||||
|
return {"list":results}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@api.get("top/artists")
|
||||||
|
def get_top_artists_external(**keys):
|
||||||
|
_, k_time, k_internal, _ = uri_to_internal(keys)
|
||||||
|
ckeys = {**k_time, **k_internal}
|
||||||
|
|
||||||
|
results = get_top_artists(**ckeys)
|
||||||
|
return {"list":results}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@api.get("top/tracks")
|
||||||
|
def get_top_tracks_external(**keys):
|
||||||
|
_, k_time, k_internal, _ = uri_to_internal(keys)
|
||||||
|
ckeys = {**k_time, **k_internal}
|
||||||
|
|
||||||
|
# IMPLEMENT THIS FOR TOP TRACKS OF ARTIST AS WELL?
|
||||||
|
|
||||||
|
results = get_top_tracks(**ckeys)
|
||||||
|
return {"list":results}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@api.get("artistinfo")
|
||||||
|
def artistInfo_external(**keys):
|
||||||
|
k_filter, _, _, _ = uri_to_internal(keys,forceArtist=True)
|
||||||
|
ckeys = {**k_filter}
|
||||||
|
|
||||||
|
results = artistInfo(**ckeys)
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@api.get("trackinfo")
|
||||||
|
def trackInfo_external(artist:Multi[str],**keys):
|
||||||
|
# transform into a multidict so we can use our nomral uri_to_internal function
|
||||||
|
keys = FormsDict(keys)
|
||||||
|
for a in artist:
|
||||||
|
keys.append("artist",a)
|
||||||
|
k_filter, _, _, _ = uri_to_internal(keys,forceTrack=True)
|
||||||
|
ckeys = {**k_filter}
|
||||||
|
|
||||||
|
results = trackInfo(**ckeys)
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
@api.get("compare")
|
||||||
|
def compare_external(**keys):
|
||||||
|
|
||||||
|
results = compare(keys["remote"])
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@api.get("newscrobble")
|
||||||
|
@api.post("newscrobble")
|
||||||
|
@authenticated_api_with_alternate(api_key_correct)
|
||||||
|
def post_scrobble(artist:Multi,**keys):
|
||||||
|
artists = "/".join(artist)
|
||||||
|
title = keys.get("title")
|
||||||
|
album = keys.get("album")
|
||||||
|
duration = keys.get("seconds")
|
||||||
|
time = keys.get("time")
|
||||||
|
if time is not None: time = int(time)
|
||||||
|
|
||||||
|
return incoming_scrobble(artists,title,album=album,duration=duration,time=time)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@api.post("importrules")
|
||||||
|
@authenticated_api
|
||||||
|
def import_rulemodule(**keys):
|
||||||
|
filename = keys.get("filename")
|
||||||
|
remove = keys.get("remove") is not None
|
||||||
|
validchars = "-_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
|
||||||
|
filename = "".join(c for c in filename if c in validchars)
|
||||||
|
|
||||||
|
if remove:
|
||||||
|
log("Deactivating predefined rulefile " + filename)
|
||||||
|
os.remove(datadir("rules/" + filename + ".tsv"))
|
||||||
|
else:
|
||||||
|
log("Importing predefined rulefile " + filename)
|
||||||
|
os.symlink(datadir("rules/predefined/" + filename + ".tsv"),datadir("rules/" + filename + ".tsv"))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@api.post("rebuild")
|
||||||
|
@authenticated_api
|
||||||
|
def rebuild(**keys):
|
||||||
|
log("Database rebuild initiated!")
|
||||||
|
sync()
|
||||||
|
from .proccontrol.tasks.fixexisting import fix
|
||||||
|
fix()
|
||||||
|
global cla, coa
|
||||||
|
cla = CleanerAgent()
|
||||||
|
coa = CollectorAgent()
|
||||||
|
build_db()
|
||||||
|
invalidate_caches()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@api.get("search")
|
||||||
|
def search(**keys):
|
||||||
|
query = keys.get("query")
|
||||||
|
max_ = keys.get("max")
|
||||||
|
if max_ is not None: max_ = int(max_)
|
||||||
|
query = query.lower()
|
||||||
|
|
||||||
|
artists = db_search(query,type="ARTIST")
|
||||||
|
tracks = db_search(query,type="TRACK")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# if the string begins with the query it's a better match, if a word in it begins with it, still good
|
||||||
|
# also, shorter is better (because longer titles would be easier to further specify)
|
||||||
|
artists.sort(key=lambda x: ((0 if x.lower().startswith(query) else 1 if " " + query in x.lower() else 2),len(x)))
|
||||||
|
tracks.sort(key=lambda x: ((0 if x["title"].lower().startswith(query) else 1 if " " + query in x["title"].lower() else 2),len(x["title"])))
|
||||||
|
|
||||||
|
# add links
|
||||||
|
artists_result = []
|
||||||
|
for a in artists:
|
||||||
|
result = {"name":a}
|
||||||
|
result["link"] = "/artist?" + compose_querystring(internal_to_uri({"artist":a}))
|
||||||
|
result["image"] = "/image?" + compose_querystring(internal_to_uri({"artist":a}))
|
||||||
|
artists_result.append(result)
|
||||||
|
|
||||||
|
tracks_result = []
|
||||||
|
for t in tracks:
|
||||||
|
result = t
|
||||||
|
result["link"] = "/track?" + compose_querystring(internal_to_uri({"track":t}))
|
||||||
|
result["image"] = "/image?" + compose_querystring(internal_to_uri({"track":t}))
|
||||||
|
tracks_result.append(result)
|
||||||
|
|
||||||
|
return {"artists":artists_result[:max_],"tracks":tracks_result[:max_]}
|
||||||
|
|
||||||
|
|
||||||
|
@api.post("addpicture")
|
||||||
|
@authenticated_api
|
||||||
|
def add_picture(b64,artist:Multi=[],title=None):
|
||||||
|
keys = FormsDict()
|
||||||
|
for a in artist:
|
||||||
|
keys.append("artist",a)
|
||||||
|
if title is not None: keys.append("title",title)
|
||||||
|
k_filter, _, _, _, _ = uri_to_internal(keys)
|
||||||
|
if "track" in k_filter: k_filter = k_filter["track"]
|
||||||
|
utilities.set_image(b64,**k_filter)
|
||||||
|
|
||||||
|
|
||||||
|
@api.post("newrule")
|
||||||
|
@authenticated_api
|
||||||
|
def newrule(**keys):
|
||||||
|
tsv.add_entry(datadir("rules/webmade.tsv"),[k for k in keys])
|
||||||
|
#addEntry("rules/webmade.tsv",[k for k in keys])
|
@ -1,221 +0,0 @@
|
|||||||
from doreah.logging import log
|
|
||||||
import hashlib
|
|
||||||
import random
|
|
||||||
from . import database
|
|
||||||
import datetime
|
|
||||||
import itertools
|
|
||||||
import sys
|
|
||||||
from .cleanup import CleanerAgent
|
|
||||||
from bottle import response
|
|
||||||
|
|
||||||
## GNU-FM-compliant scrobbling
|
|
||||||
|
|
||||||
|
|
||||||
cla = CleanerAgent()
|
|
||||||
|
|
||||||
def md5(input):
|
|
||||||
m = hashlib.md5()
|
|
||||||
m.update(bytes(input,encoding="utf-8"))
|
|
||||||
return m.hexdigest()
|
|
||||||
|
|
||||||
def generate_key(ls):
|
|
||||||
key = ""
|
|
||||||
for i in range(64):
|
|
||||||
key += str(random.choice(list(range(10)) + list("abcdefghijklmnopqrstuvwxyz") + list("ABCDEFGHIJKLMNOPQRSTUVWXYZ")))
|
|
||||||
ls.append(key)
|
|
||||||
return key
|
|
||||||
|
|
||||||
#def check_sig(keys):
|
|
||||||
# try:
|
|
||||||
# sig = keys.pop("api_sig")
|
|
||||||
# text = "".join([key + keys[key] for key in sorted(keys.keys())]) + # secret
|
|
||||||
# assert sig == md5(text)
|
|
||||||
# return True
|
|
||||||
# except:
|
|
||||||
# return False
|
|
||||||
|
|
||||||
|
|
||||||
handlers = {}
|
|
||||||
|
|
||||||
def handler(apiname,version):
|
|
||||||
def deco(cls):
|
|
||||||
handlers[(apiname,version)] = cls()
|
|
||||||
return cls
|
|
||||||
return deco
|
|
||||||
|
|
||||||
def handle(path,keys):
|
|
||||||
log("API request: " + str(path))# + " | Keys: " + str({k:keys.get(k) for k in keys}))
|
|
||||||
|
|
||||||
if len(path)>1 and (path[0],path[1]) in handlers:
|
|
||||||
handler = handlers[(path[0],path[1])]
|
|
||||||
path = path[2:]
|
|
||||||
try:
|
|
||||||
response.status,result = handler.handle(path,keys)
|
|
||||||
except:
|
|
||||||
type = sys.exc_info()[0]
|
|
||||||
response.status,result = handler.errors[type]
|
|
||||||
else:
|
|
||||||
result = {"error":"Invalid scrobble protocol"}
|
|
||||||
response.status = 500
|
|
||||||
|
|
||||||
|
|
||||||
log("Response: " + str(result))
|
|
||||||
return result
|
|
||||||
|
|
||||||
def scrobbletrack(artiststr,titlestr,timestamp):
|
|
||||||
try:
|
|
||||||
log("Incoming scrobble (compliant API): ARTISTS: " + artiststr + ", TRACK: " + titlestr,module="debug")
|
|
||||||
(artists,title) = cla.fullclean(artiststr,titlestr)
|
|
||||||
database.createScrobble(artists,title,timestamp)
|
|
||||||
database.sync()
|
|
||||||
except:
|
|
||||||
raise ScrobblingException()
|
|
||||||
|
|
||||||
|
|
||||||
class BadAuthException(Exception): pass
|
|
||||||
class InvalidAuthException(Exception): pass
|
|
||||||
class InvalidMethodException(Exception): pass
|
|
||||||
class InvalidSessionKey(Exception): pass
|
|
||||||
class MalformedJSONException(Exception): pass
|
|
||||||
class ScrobblingException(Exception): pass
|
|
||||||
|
|
||||||
class APIHandler:
|
|
||||||
# make these classes singletons
|
|
||||||
_instance = None
|
|
||||||
def __new__(cls, *args, **kwargs):
|
|
||||||
if not isinstance(cls._instance, cls):
|
|
||||||
cls._instance = object.__new__(cls, *args, **kwargs)
|
|
||||||
return cls._instance
|
|
||||||
|
|
||||||
|
|
||||||
def handle(self,pathnodes,keys):
|
|
||||||
try:
|
|
||||||
methodname = self.get_method(pathnodes,keys)
|
|
||||||
method = self.methods[methodname]
|
|
||||||
except:
|
|
||||||
raise InvalidMethodException()
|
|
||||||
return method(pathnodes,keys)
|
|
||||||
|
|
||||||
@handler("audioscrobbler","2.0")
|
|
||||||
@handler("gnufm","2.0")
|
|
||||||
@handler("gnukebox","2.0")
|
|
||||||
class GNUFM2(APIHandler):
|
|
||||||
def __init__(self):
|
|
||||||
# no need to save these on disk, clients can always request a new session
|
|
||||||
self.mobile_sessions = []
|
|
||||||
self.methods = {
|
|
||||||
"auth.getMobileSession":self.authmobile,
|
|
||||||
"track.scrobble":self.scrobble
|
|
||||||
}
|
|
||||||
self.errors = {
|
|
||||||
BadAuthException:(400,{"error":6,"message":"Requires authentication"}),
|
|
||||||
InvalidAuthException:(401,{"error":4,"message":"Invalid credentials"}),
|
|
||||||
InvalidMethodException:(200,{"error":3,"message":"Invalid method"}),
|
|
||||||
InvalidSessionKey:(403,{"error":9,"message":"Invalid session key"}),
|
|
||||||
ScrobblingException:(500,{"error":8,"message":"Operation failed"})
|
|
||||||
}
|
|
||||||
|
|
||||||
def get_method(self,pathnodes,keys):
|
|
||||||
return keys.get("method")
|
|
||||||
|
|
||||||
def authmobile(self,pathnodes,keys):
|
|
||||||
token = keys.get("authToken")
|
|
||||||
user = keys.get("username")
|
|
||||||
password = keys.get("password")
|
|
||||||
# either username and password
|
|
||||||
if user is not None and password is not None:
|
|
||||||
if password in database.allAPIkeys():
|
|
||||||
sessionkey = generate_key(self.mobile_sessions)
|
|
||||||
return 200,{"session":{"key":sessionkey}}
|
|
||||||
else:
|
|
||||||
raise InvalidAuthException()
|
|
||||||
# or username and token (deprecated by lastfm)
|
|
||||||
elif user is not None and token is not None:
|
|
||||||
for key in database.allAPIkeys():
|
|
||||||
if md5(user + md5(key)) == token:
|
|
||||||
sessionkey = generate_key(self.mobile_sessions)
|
|
||||||
return 200,{"session":{"key":sessionkey}}
|
|
||||||
raise InvalidAuthException()
|
|
||||||
else:
|
|
||||||
raise BadAuthException()
|
|
||||||
|
|
||||||
def scrobble(self,pathnodes,keys):
|
|
||||||
if keys.get("sk") is None or keys.get("sk") not in self.mobile_sessions:
|
|
||||||
raise InvalidSessionKey()
|
|
||||||
else:
|
|
||||||
if "track" in keys and "artist" in keys:
|
|
||||||
artiststr,titlestr = keys["artist"], keys["track"]
|
|
||||||
#(artists,title) = cla.fullclean(artiststr,titlestr)
|
|
||||||
timestamp = int(keys["timestamp"])
|
|
||||||
#database.createScrobble(artists,title,timestamp)
|
|
||||||
scrobbletrack(artiststr,titlestr,timestamp)
|
|
||||||
return 200,{"scrobbles":{"@attr":{"ignored":0}}}
|
|
||||||
else:
|
|
||||||
for num in range(50):
|
|
||||||
if "track[" + str(num) + "]" in keys:
|
|
||||||
artiststr,titlestr = keys["artist[" + str(num) + "]"], keys["track[" + str(num) + "]"]
|
|
||||||
#(artists,title) = cla.fullclean(artiststr,titlestr)
|
|
||||||
timestamp = int(keys["timestamp[" + str(num) + "]"])
|
|
||||||
#database.createScrobble(artists,title,timestamp)
|
|
||||||
scrobbletrack(artiststr,titlestr,timestamp)
|
|
||||||
return 200,{"scrobbles":{"@attr":{"ignored":0}}}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@handler("listenbrainz","1")
|
|
||||||
@handler("lbrnz","1")
|
|
||||||
class LBrnz1(APIHandler):
|
|
||||||
def __init__(self):
|
|
||||||
self.methods = {
|
|
||||||
"submit-listens":self.submit,
|
|
||||||
"validate-token":self.validate_token
|
|
||||||
}
|
|
||||||
self.errors = {
|
|
||||||
BadAuthException:(401,{"code":401,"error":"You need to provide an Authorization header."}),
|
|
||||||
InvalidAuthException:(401,{"code":401,"error":"Incorrect Authorization"}),
|
|
||||||
InvalidMethodException:(200,{"code":200,"error":"Invalid Method"}),
|
|
||||||
MalformedJSONException:(400,{"code":400,"error":"Invalid JSON document submitted."}),
|
|
||||||
ScrobblingException:(500,{"code":500,"error":"Unspecified server error."})
|
|
||||||
}
|
|
||||||
|
|
||||||
def get_method(self,pathnodes,keys):
|
|
||||||
return pathnodes.pop(0)
|
|
||||||
|
|
||||||
def submit(self,pathnodes,keys):
|
|
||||||
try:
|
|
||||||
token = keys.get("Authorization").replace("token ","").replace("Token ","").strip()
|
|
||||||
except:
|
|
||||||
raise BadAuthException()
|
|
||||||
|
|
||||||
if token not in database.allAPIkeys():
|
|
||||||
raise InvalidAuthException()
|
|
||||||
|
|
||||||
try:
|
|
||||||
#log("scrobbling to listenbrainz, keys "+str(keys),module="debug")
|
|
||||||
if keys["listen_type"] in ["single","import"]:
|
|
||||||
payload = keys["payload"]
|
|
||||||
for listen in payload:
|
|
||||||
metadata = listen["track_metadata"]
|
|
||||||
artiststr, titlestr = metadata["artist_name"], metadata["track_name"]
|
|
||||||
#(artists,title) = cla.fullclean(artiststr,titlestr)
|
|
||||||
try:
|
|
||||||
timestamp = int(listen["listened_at"])
|
|
||||||
except:
|
|
||||||
timestamp = int(datetime.datetime.now(tz=datetime.timezone.utc).timestamp())
|
|
||||||
#database.createScrobble(artists,title,timestamp)
|
|
||||||
scrobbletrack(artiststr,titlestr,timestamp)
|
|
||||||
return 200,{"status":"ok"}
|
|
||||||
else:
|
|
||||||
return 200,{"status":"ok"}
|
|
||||||
except:
|
|
||||||
raise MalformedJSONException()
|
|
||||||
|
|
||||||
def validate_token(self,pathnodes,keys):
|
|
||||||
try:
|
|
||||||
token = keys.get("token").strip()
|
|
||||||
except:
|
|
||||||
raise BadAuthException()
|
|
||||||
if token not in database.allAPIkeys():
|
|
||||||
raise InvalidAuthException()
|
|
||||||
else:
|
|
||||||
return 200,{"code":200,"message":"Token valid.","valid":True,"user_name":"n/a"}
|
|
@ -6,7 +6,6 @@ from .cleanup import CleanerAgent, CollectorAgent
|
|||||||
from . import utilities
|
from . import utilities
|
||||||
from .malojatime import register_scrobbletime, time_stamps, ranges
|
from .malojatime import register_scrobbletime, time_stamps, ranges
|
||||||
from .malojauri import uri_to_internal, internal_to_uri, compose_querystring
|
from .malojauri import uri_to_internal, internal_to_uri, compose_querystring
|
||||||
from . import compliant_api
|
|
||||||
|
|
||||||
from .thirdparty import proxy_scrobble_all
|
from .thirdparty import proxy_scrobble_all
|
||||||
|
|
||||||
@ -24,9 +23,7 @@ try:
|
|||||||
except: pass
|
except: pass
|
||||||
import doreah
|
import doreah
|
||||||
|
|
||||||
# nimrodel API
|
|
||||||
from nimrodel import EAPI as API
|
|
||||||
from nimrodel import Multi
|
|
||||||
|
|
||||||
# technical
|
# technical
|
||||||
import os
|
import os
|
||||||
@ -260,45 +257,9 @@ def api_key_correct(request):
|
|||||||
return checkAPIkey(apikey)
|
return checkAPIkey(apikey)
|
||||||
|
|
||||||
|
|
||||||
dbserver = API(delay=True,path="api")
|
|
||||||
|
|
||||||
|
|
||||||
@dbserver.get("test")
|
|
||||||
def test_server(key=None):
|
|
||||||
response.set_header("Access-Control-Allow-Origin","*")
|
|
||||||
if key is not None and not (checkAPIkey(key)):
|
|
||||||
response.status = 403
|
|
||||||
return "Wrong API key"
|
|
||||||
|
|
||||||
else:
|
|
||||||
response.status = 200
|
|
||||||
return
|
|
||||||
|
|
||||||
# 200 Database server is up and operational
|
|
||||||
# 403 Database server is up, but provided API key is not valid
|
|
||||||
|
|
||||||
@dbserver.get("serverinfo")
|
|
||||||
def server_info():
|
|
||||||
|
|
||||||
|
|
||||||
response.set_header("Access-Control-Allow-Origin","*")
|
|
||||||
response.set_header("Content-Type","application/json")
|
|
||||||
|
|
||||||
return {
|
|
||||||
"name":settings.get_settings("NAME"),
|
|
||||||
"version":version,
|
|
||||||
"versionstring":".".join(str(n) for n in version)
|
|
||||||
}
|
|
||||||
|
|
||||||
## All database functions are separated - the external wrapper only reads the request keys, converts them into lists and renames them where necessary, and puts the end result in a dict if not already so it can be returned as json
|
|
||||||
|
|
||||||
@dbserver.get("scrobbles")
|
|
||||||
def get_scrobbles_external(**keys):
|
|
||||||
k_filter, k_time, _, k_amount = uri_to_internal(keys)
|
|
||||||
ckeys = {**k_filter, **k_time, **k_amount}
|
|
||||||
|
|
||||||
result = get_scrobbles(**ckeys)
|
|
||||||
return {"list":result}
|
|
||||||
|
|
||||||
def get_scrobbles(**keys):
|
def get_scrobbles(**keys):
|
||||||
r = db_query(**{k:keys[k] for k in keys if k in ["artist","artists","title","since","to","within","timerange","associated","track","max_"]})
|
r = db_query(**{k:keys[k] for k in keys if k in ["artist","artists","title","since","to","within","timerange","associated","track","max_"]})
|
||||||
@ -308,15 +269,6 @@ def get_scrobbles(**keys):
|
|||||||
# return r
|
# return r
|
||||||
return r
|
return r
|
||||||
|
|
||||||
# info for comparison
|
|
||||||
@dbserver.get("info")
|
|
||||||
def info_external(**keys):
|
|
||||||
|
|
||||||
response.set_header("Access-Control-Allow-Origin","*")
|
|
||||||
response.set_header("Content-Type","application/json")
|
|
||||||
|
|
||||||
result = info()
|
|
||||||
return result
|
|
||||||
|
|
||||||
def info():
|
def info():
|
||||||
totalscrobbles = get_scrobbles_num()
|
totalscrobbles = get_scrobbles_num()
|
||||||
@ -333,23 +285,6 @@ def info():
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
# UNUSED
|
|
||||||
#@dbserver.route("/amounts")
|
|
||||||
#def get_amounts_external():
|
|
||||||
# return get_amounts() #really now
|
|
||||||
#
|
|
||||||
#def get_amounts():
|
|
||||||
# return {"scrobbles":len(SCROBBLES),"tracks":len(TRACKS),"artists":len(ARTISTS)}
|
|
||||||
|
|
||||||
|
|
||||||
@dbserver.get("numscrobbles")
|
|
||||||
def get_scrobbles_num_external(**keys):
|
|
||||||
k_filter, k_time, _, k_amount = uri_to_internal(keys)
|
|
||||||
ckeys = {**k_filter, **k_time, **k_amount}
|
|
||||||
|
|
||||||
result = get_scrobbles_num(**ckeys)
|
|
||||||
return {"amount":result}
|
|
||||||
|
|
||||||
def get_scrobbles_num(**keys):
|
def get_scrobbles_num(**keys):
|
||||||
r = db_query(**{k:keys[k] for k in keys if k in ["artist","track","artists","title","since","to","within","timerange","associated"]})
|
r = db_query(**{k:keys[k] for k in keys if k in ["artist","track","artists","title","since","to","within","timerange","associated"]})
|
||||||
return len(r)
|
return len(r)
|
||||||
@ -387,32 +322,6 @@ def get_scrobbles_num(**keys):
|
|||||||
# return validtracks
|
# return validtracks
|
||||||
|
|
||||||
|
|
||||||
# UNUSED
|
|
||||||
#@dbserver.route("/charts")
|
|
||||||
#def get_charts_external():
|
|
||||||
# keys = FormsDict.decode(request.query)
|
|
||||||
# ckeys = {}
|
|
||||||
# ckeys["since"], ckeys["to"], ckeys["within"] = keys.get("since"), keys.get("to"), keys.get("in")
|
|
||||||
#
|
|
||||||
# result = get_scrobbles_num(**ckeys)
|
|
||||||
# return {"number":result}
|
|
||||||
|
|
||||||
#def get_charts(**keys):
|
|
||||||
# return db_aggregate(**{k:keys[k] for k in keys if k in ["since","to","within"]})
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@dbserver.get("tracks")
|
|
||||||
def get_tracks_external(**keys):
|
|
||||||
k_filter, _, _, _ = uri_to_internal(keys,forceArtist=True)
|
|
||||||
ckeys = {**k_filter}
|
|
||||||
|
|
||||||
result = get_tracks(**ckeys)
|
|
||||||
return {"list":result}
|
|
||||||
|
|
||||||
def get_tracks(artist=None):
|
def get_tracks(artist=None):
|
||||||
|
|
||||||
@ -429,11 +338,6 @@ def get_tracks(artist=None):
|
|||||||
#ls = [t for t in tracklist if (artist in t["artists"]) or (artist==None)]
|
#ls = [t for t in tracklist if (artist in t["artists"]) or (artist==None)]
|
||||||
|
|
||||||
|
|
||||||
@dbserver.get("artists")
|
|
||||||
def get_artists_external():
|
|
||||||
result = get_artists()
|
|
||||||
return {"list":result}
|
|
||||||
|
|
||||||
def get_artists():
|
def get_artists():
|
||||||
return ARTISTS #well
|
return ARTISTS #well
|
||||||
|
|
||||||
@ -442,15 +346,6 @@ def get_artists():
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@dbserver.get("charts/artists")
|
|
||||||
def get_charts_artists_external(**keys):
|
|
||||||
_, k_time, _, _ = uri_to_internal(keys)
|
|
||||||
ckeys = {**k_time}
|
|
||||||
|
|
||||||
result = get_charts_artists(**ckeys)
|
|
||||||
return {"list":result}
|
|
||||||
|
|
||||||
def get_charts_artists(**keys):
|
def get_charts_artists(**keys):
|
||||||
return db_aggregate(by="ARTIST",**{k:keys[k] for k in keys if k in ["since","to","within","timerange"]})
|
return db_aggregate(by="ARTIST",**{k:keys[k] for k in keys if k in ["since","to","within","timerange"]})
|
||||||
|
|
||||||
@ -459,13 +354,6 @@ def get_charts_artists(**keys):
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
@dbserver.get("charts/tracks")
|
|
||||||
def get_charts_tracks_external(**keys):
|
|
||||||
k_filter, k_time, _, _ = uri_to_internal(keys,forceArtist=True)
|
|
||||||
ckeys = {**k_filter, **k_time}
|
|
||||||
|
|
||||||
result = get_charts_tracks(**ckeys)
|
|
||||||
return {"list":result}
|
|
||||||
|
|
||||||
def get_charts_tracks(**keys):
|
def get_charts_tracks(**keys):
|
||||||
return db_aggregate(by="TRACK",**{k:keys[k] for k in keys if k in ["since","to","within","timerange","artist"]})
|
return db_aggregate(by="TRACK",**{k:keys[k] for k in keys if k in ["since","to","within","timerange","artist"]})
|
||||||
@ -478,14 +366,6 @@ def get_charts_tracks(**keys):
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
@dbserver.get("pulse")
|
|
||||||
def get_pulse_external(**keys):
|
|
||||||
k_filter, k_time, k_internal, k_amount = uri_to_internal(keys)
|
|
||||||
ckeys = {**k_filter, **k_time, **k_internal, **k_amount}
|
|
||||||
|
|
||||||
results = get_pulse(**ckeys)
|
|
||||||
return {"list":results}
|
|
||||||
|
|
||||||
def get_pulse(**keys):
|
def get_pulse(**keys):
|
||||||
|
|
||||||
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
||||||
@ -500,14 +380,6 @@ def get_pulse(**keys):
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
@dbserver.get("performance")
|
|
||||||
def get_performance_external(**keys):
|
|
||||||
k_filter, k_time, k_internal, k_amount = uri_to_internal(keys)
|
|
||||||
ckeys = {**k_filter, **k_time, **k_internal, **k_amount}
|
|
||||||
|
|
||||||
results = get_performance(**ckeys)
|
|
||||||
return {"list":results}
|
|
||||||
|
|
||||||
def get_performance(**keys):
|
def get_performance(**keys):
|
||||||
|
|
||||||
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
||||||
@ -539,14 +411,6 @@ def get_performance(**keys):
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
@dbserver.get("top/artists")
|
|
||||||
def get_top_artists_external(**keys):
|
|
||||||
_, k_time, k_internal, _ = uri_to_internal(keys)
|
|
||||||
ckeys = {**k_time, **k_internal}
|
|
||||||
|
|
||||||
results = get_top_artists(**ckeys)
|
|
||||||
return {"list":results}
|
|
||||||
|
|
||||||
def get_top_artists(**keys):
|
def get_top_artists(**keys):
|
||||||
|
|
||||||
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
||||||
@ -569,17 +433,6 @@ def get_top_artists(**keys):
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@dbserver.get("top/tracks")
|
|
||||||
def get_top_tracks_external(**keys):
|
|
||||||
_, k_time, k_internal, _ = uri_to_internal(keys)
|
|
||||||
ckeys = {**k_time, **k_internal}
|
|
||||||
|
|
||||||
# IMPLEMENT THIS FOR TOP TRACKS OF ARTIST AS WELL?
|
|
||||||
|
|
||||||
results = get_top_tracks(**ckeys)
|
|
||||||
return {"list":results}
|
|
||||||
|
|
||||||
def get_top_tracks(**keys):
|
def get_top_tracks(**keys):
|
||||||
|
|
||||||
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
||||||
@ -604,14 +457,6 @@ def get_top_tracks(**keys):
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
@dbserver.get("artistinfo")
|
|
||||||
def artistInfo_external(**keys):
|
|
||||||
k_filter, _, _, _ = uri_to_internal(keys,forceArtist=True)
|
|
||||||
ckeys = {**k_filter}
|
|
||||||
|
|
||||||
results = artistInfo(**ckeys)
|
|
||||||
return results
|
|
||||||
|
|
||||||
def artistInfo(artist):
|
def artistInfo(artist):
|
||||||
|
|
||||||
charts = db_aggregate(by="ARTIST")
|
charts = db_aggregate(by="ARTIST")
|
||||||
@ -642,17 +487,6 @@ def artistInfo(artist):
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
@dbserver.get("trackinfo")
|
|
||||||
def trackInfo_external(artist:Multi[str],**keys):
|
|
||||||
# transform into a multidict so we can use our nomral uri_to_internal function
|
|
||||||
keys = FormsDict(keys)
|
|
||||||
for a in artist:
|
|
||||||
keys.append("artist",a)
|
|
||||||
k_filter, _, _, _ = uri_to_internal(keys,forceTrack=True)
|
|
||||||
ckeys = {**k_filter}
|
|
||||||
|
|
||||||
results = trackInfo(**ckeys)
|
|
||||||
return results
|
|
||||||
|
|
||||||
def trackInfo(track):
|
def trackInfo(track):
|
||||||
charts = db_aggregate(by="TRACK")
|
charts = db_aggregate(by="TRACK")
|
||||||
@ -679,11 +513,6 @@ def trackInfo(track):
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
@dbserver.get("compare")
|
|
||||||
def compare_external(**keys):
|
|
||||||
|
|
||||||
results = compare(keys["remote"])
|
|
||||||
return results
|
|
||||||
|
|
||||||
def compare(remoteurl):
|
def compare(remoteurl):
|
||||||
import json
|
import json
|
||||||
@ -737,58 +566,25 @@ def compare(remoteurl):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def incoming_scrobble(artists,title,album=None,duration=None,time=None):
|
||||||
@dbserver.get("newscrobble")
|
artists = "/".join(artists)
|
||||||
@dbserver.post("newscrobble")
|
if time is None:
|
||||||
@authenticated_api_with_alternate(api_key_correct)
|
|
||||||
def post_scrobble(artist:Multi,**keys):
|
|
||||||
artists = "/".join(artist)
|
|
||||||
title = keys.get("title")
|
|
||||||
album = keys.get("album")
|
|
||||||
duration = keys.get("seconds")
|
|
||||||
|
|
||||||
try:
|
|
||||||
time = int(keys.get("time"))
|
|
||||||
except:
|
|
||||||
time = int(datetime.datetime.now(tz=datetime.timezone.utc).timestamp())
|
time = int(datetime.datetime.now(tz=datetime.timezone.utc).timestamp())
|
||||||
|
|
||||||
log("Incoming scrobble (native API): ARTISTS: " + str(artists) + ", TRACK: " + title,module="debug")
|
log("Incoming scrobble (): ARTISTS: " + str(artists) + ", TRACK: " + title,module="debug")
|
||||||
(artists,title) = cla.fullclean(artists,title)
|
(artists,title) = cla.fullclean(artists,title)
|
||||||
|
|
||||||
## this is necessary for localhost testing
|
|
||||||
#response.set_header("Access-Control-Allow-Origin","*")
|
|
||||||
|
|
||||||
trackdict = createScrobble(artists,title,time,album,duration)
|
trackdict = createScrobble(artists,title,time,album,duration)
|
||||||
|
|
||||||
sync()
|
sync()
|
||||||
#always sync, one filesystem access every three minutes shouldn't matter
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
return {"status":"success","track":trackdict}
|
return {"status":"success","track":trackdict}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# standard-compliant scrobbling methods
|
|
||||||
|
|
||||||
@dbserver.post("s/{path}",pass_headers=True)
|
|
||||||
@dbserver.get("s/{path}",pass_headers=True)
|
|
||||||
def sapi(path:Multi,**keys):
|
|
||||||
"""Scrobbles according to a standardized protocol.
|
|
||||||
|
|
||||||
:param string path: Path according to the scrobble protocol
|
|
||||||
:param string keys: Query keys according to the scrobble protocol
|
|
||||||
"""
|
|
||||||
path = list(filter(None,path))
|
|
||||||
return compliant_api.handle(path,keys)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@dbserver.post("newrule")
|
|
||||||
@authenticated_api
|
|
||||||
def newrule(**keys):
|
|
||||||
tsv.add_entry(datadir("rules/webmade.tsv"),[k for k in keys])
|
|
||||||
#addEntry("rules/webmade.tsv",[k for k in keys])
|
|
||||||
|
|
||||||
|
|
||||||
def issues():
|
def issues():
|
||||||
@ -931,84 +727,6 @@ def get_predefined_rulesets():
|
|||||||
|
|
||||||
return rulesets
|
return rulesets
|
||||||
|
|
||||||
@dbserver.post("importrules")
|
|
||||||
@authenticated_api
|
|
||||||
def import_rulemodule(**keys):
|
|
||||||
filename = keys.get("filename")
|
|
||||||
remove = keys.get("remove") is not None
|
|
||||||
validchars = "-_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
|
|
||||||
filename = "".join(c for c in filename if c in validchars)
|
|
||||||
|
|
||||||
if remove:
|
|
||||||
log("Deactivating predefined rulefile " + filename)
|
|
||||||
os.remove(datadir("rules/" + filename + ".tsv"))
|
|
||||||
else:
|
|
||||||
log("Importing predefined rulefile " + filename)
|
|
||||||
os.symlink(datadir("rules/predefined/" + filename + ".tsv"),datadir("rules/" + filename + ".tsv"))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@dbserver.post("rebuild")
|
|
||||||
@authenticated_api
|
|
||||||
def rebuild(**keys):
|
|
||||||
log("Database rebuild initiated!")
|
|
||||||
sync()
|
|
||||||
from .proccontrol.tasks.fixexisting import fix
|
|
||||||
fix()
|
|
||||||
global cla, coa
|
|
||||||
cla = CleanerAgent()
|
|
||||||
coa = CollectorAgent()
|
|
||||||
build_db()
|
|
||||||
invalidate_caches()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@dbserver.get("search")
|
|
||||||
def search(**keys):
|
|
||||||
query = keys.get("query")
|
|
||||||
max_ = keys.get("max")
|
|
||||||
if max_ is not None: max_ = int(max_)
|
|
||||||
query = query.lower()
|
|
||||||
|
|
||||||
artists = db_search(query,type="ARTIST")
|
|
||||||
tracks = db_search(query,type="TRACK")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# if the string begins with the query it's a better match, if a word in it begins with it, still good
|
|
||||||
# also, shorter is better (because longer titles would be easier to further specify)
|
|
||||||
artists.sort(key=lambda x: ((0 if x.lower().startswith(query) else 1 if " " + query in x.lower() else 2),len(x)))
|
|
||||||
tracks.sort(key=lambda x: ((0 if x["title"].lower().startswith(query) else 1 if " " + query in x["title"].lower() else 2),len(x["title"])))
|
|
||||||
|
|
||||||
# add links
|
|
||||||
artists_result = []
|
|
||||||
for a in artists:
|
|
||||||
result = {"name":a}
|
|
||||||
result["link"] = "/artist?" + compose_querystring(internal_to_uri({"artist":a}))
|
|
||||||
result["image"] = "/image?" + compose_querystring(internal_to_uri({"artist":a}))
|
|
||||||
artists_result.append(result)
|
|
||||||
|
|
||||||
tracks_result = []
|
|
||||||
for t in tracks:
|
|
||||||
result = t
|
|
||||||
result["link"] = "/track?" + compose_querystring(internal_to_uri({"track":t}))
|
|
||||||
result["image"] = "/image?" + compose_querystring(internal_to_uri({"track":t}))
|
|
||||||
tracks_result.append(result)
|
|
||||||
|
|
||||||
return {"artists":artists_result[:max_],"tracks":tracks_result[:max_]}
|
|
||||||
|
|
||||||
|
|
||||||
@dbserver.post("addpicture")
|
|
||||||
@authenticated_api
|
|
||||||
def add_picture(b64,artist:Multi=[],title=None):
|
|
||||||
keys = FormsDict()
|
|
||||||
for a in artist:
|
|
||||||
keys.append("artist",a)
|
|
||||||
if title is not None: keys.append("title",title)
|
|
||||||
k_filter, _, _, _, _ = uri_to_internal(keys)
|
|
||||||
if "track" in k_filter: k_filter = k_filter["track"]
|
|
||||||
utilities.set_image(b64,**k_filter)
|
|
||||||
|
|
||||||
####
|
####
|
||||||
## Server operation
|
## Server operation
|
||||||
|
@ -15,9 +15,10 @@ from . import malojatime
|
|||||||
from . import utilities
|
from . import utilities
|
||||||
from . import malojauri
|
from . import malojauri
|
||||||
from .utilities import resolveImage
|
from .utilities import resolveImage
|
||||||
from .malojauri import uri_to_internal, remove_identical
|
from .malojauri import uri_to_internal, remove_identical, compose_querystring
|
||||||
from . import globalconf
|
from . import globalconf
|
||||||
from .jinjaenv.context import jinja_environment
|
from .jinjaenv.context import jinja_environment
|
||||||
|
from jinja2.exceptions import TemplateNotFound
|
||||||
# doreah toolkit
|
# doreah toolkit
|
||||||
from doreah import settings
|
from doreah import settings
|
||||||
from doreah.logging import log
|
from doreah.logging import log
|
||||||
@ -55,6 +56,24 @@ DATAFOLDER = DATA_DIR
|
|||||||
webserver = Bottle()
|
webserver = Bottle()
|
||||||
auth.authapi.mount(server=webserver)
|
auth.authapi.mount(server=webserver)
|
||||||
|
|
||||||
|
from .apis import init_apis
|
||||||
|
init_apis(webserver)
|
||||||
|
|
||||||
|
# redirects for backwards compatibility
|
||||||
|
@webserver.get("/api/s/<pth:path>")
|
||||||
|
@webserver.post("/api/s/<pth:path>")
|
||||||
|
def deprecated_api_s(pth):
|
||||||
|
redirect("/apis/" + pth + "?" + compose_querystring(request.query))
|
||||||
|
|
||||||
|
@webserver.get("/api/<pth:path>")
|
||||||
|
@webserver.post("/api/<pth:path>")
|
||||||
|
def deprecated_api(pth):
|
||||||
|
redirect("/apis/mlj_1/" + pth + "?" + compose_querystring(request.query))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
pthjoin = os.path.join
|
pthjoin = os.path.join
|
||||||
|
|
||||||
def generate_css():
|
def generate_css():
|
||||||
@ -91,8 +110,8 @@ def mainpage():
|
|||||||
def customerror(error):
|
def customerror(error):
|
||||||
errorcode = error.status_code
|
errorcode = error.status_code
|
||||||
errordesc = error.status
|
errordesc = error.status
|
||||||
traceback = error.traceback.strip()
|
traceback = error.traceback
|
||||||
|
traceback = traceback.strip() if traceback is not None else "No Traceback"
|
||||||
adminmode = request.cookies.get("adminmode") == "true" and auth.check(request)
|
adminmode = request.cookies.get("adminmode") == "true" and auth.check(request)
|
||||||
|
|
||||||
template = jinja_environment.get_template('error.jinja')
|
template = jinja_environment.get_template('error.jinja')
|
||||||
@ -246,7 +265,6 @@ setproctitle.setproctitle("Maloja")
|
|||||||
|
|
||||||
## start database
|
## start database
|
||||||
database.start_db()
|
database.start_db()
|
||||||
database.dbserver.mount(server=webserver)
|
|
||||||
|
|
||||||
log("Starting up Maloja server...")
|
log("Starting up Maloja server...")
|
||||||
#run(webserver, host=HOST, port=MAIN_PORT, server='waitress')
|
#run(webserver, host=HOST, port=MAIN_PORT, server='waitress')
|
||||||
|
Loading…
x
Reference in New Issue
Block a user