logging improvements (#2073)

* Scheduled tasks log to Debug, not Info

* Add LOG_LEVEL config to .env

* Update some other log levels and fix typos

* fix logger initializer

---------

Co-authored-by: Jakob Rubin <647846+Grygon@users.noreply.github.com>
This commit is contained in:
Hayden 2023-01-28 16:54:44 -09:00 committed by GitHub
parent 9ddb27b2e3
commit 530f7c9d6b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 28 additions and 24 deletions

View File

@ -27,6 +27,8 @@ class LoggerConfig:
def get_logger_config(): def get_logger_config():
settings = get_app_settings() settings = get_app_settings()
log_level = logging._nameToLevel[settings.LOG_LEVEL]
if not settings.PRODUCTION: if not settings.PRODUCTION:
from rich.logging import RichHandler from rich.logging import RichHandler
@ -35,6 +37,7 @@ def get_logger_config():
format=None, format=None,
date_format=None, date_format=None,
logger_file=None, logger_file=None,
level=log_level,
) )
output_file_handler = logging.FileHandler(LOGGER_FILE) output_file_handler = logging.FileHandler(LOGGER_FILE)
@ -50,6 +53,7 @@ def get_logger_config():
format="%(levelname)s: %(asctime)s \t%(message)s", format="%(levelname)s: %(asctime)s \t%(message)s",
date_format="%d-%b-%y %H:%M:%S", date_format="%d-%b-%y %H:%M:%S",
logger_file=LOGGER_FILE, logger_file=LOGGER_FILE,
level=log_level,
) )

View File

@ -30,6 +30,7 @@ class AppSettings(BaseSettings):
API_DOCS: bool = True API_DOCS: bool = True
TOKEN_TIME: int = 48 # Time in Hours TOKEN_TIME: int = 48 # Time in Hours
SECRET: str SECRET: str
LOG_LEVEL: str = "INFO" # Corresponds to standard Python log levels.
GIT_COMMIT_HASH: str = "unknown" GIT_COMMIT_HASH: str = "unknown"

View File

@ -87,7 +87,7 @@ def main():
alembic_cfg = Config(str(PROJECT_DIR / "alembic.ini")) alembic_cfg = Config(str(PROJECT_DIR / "alembic.ini"))
if db_is_at_head(alembic_cfg): if db_is_at_head(alembic_cfg):
logger.info("Migration not needed.") logger.debug("Migration not needed.")
else: else:
logger.info("Migration needed. Performing migration...") logger.info("Migration needed. Performing migration...")
command.upgrade(alembic_cfg, "head") command.upgrade(alembic_cfg, "head")
@ -95,7 +95,7 @@ def main():
db = get_repositories(session) db = get_repositories(session)
if db.users.get_all(): if db.users.get_all():
logger.info("Database exists") logger.debug("Database exists")
else: else:
logger.info("Database contains no users, initializing...") logger.info("Database contains no users, initializing...")
init_db(db) init_db(db)

View File

@ -96,7 +96,7 @@ class DefaultEmailSender(ABCEmailSender, BaseService):
smtp_options.password = self.settings.SMTP_PASSWORD smtp_options.password = self.settings.SMTP_PASSWORD
response = message.send(to=email_to, smtp=smtp_options) response = message.send(to=email_to, smtp=smtp_options)
self.logger.info(f"send email result: {response}") self.logger.debug(f"send email result: {response}")
if not response.success: if not response.success:
self.logger.error(f"send email error: {response}") self.logger.error(f"send email error: {response}")

View File

@ -156,7 +156,6 @@ class RecipeDataService(BaseService):
self.logger.error(f"Content-Type: {content_type} is not an image") self.logger.error(f"Content-Type: {content_type} is not an image")
raise NotAnImageError(f"Content-Type {content_type} is not an image") raise NotAnImageError(f"Content-Type {content_type} is not an image")
self.logger.info(f"File Name Suffix {file_path.suffix}") self.logger.debug(f"File Name Suffix {file_path.suffix}")
self.write_image(r.read(), file_path.suffix) self.write_image(r.read(), file_path.suffix)
file_path.unlink(missing_ok=True) file_path.unlink(missing_ok=True)

View File

@ -63,7 +63,7 @@ class RecipeService(BaseService):
try: try:
copytree(current_dir, recipe.directory, dirs_exist_ok=True) copytree(current_dir, recipe.directory, dirs_exist_ok=True)
self.logger.info(f"Renaming Recipe Directory: {original_slug} -> {recipe.slug}") self.logger.debug(f"Renaming Recipe Directory: {original_slug} -> {recipe.slug}")
except FileNotFoundError: except FileNotFoundError:
self.logger.error(f"Recipe Directory not Found: {original_slug}") self.logger.error(f"Recipe Directory not Found: {original_slug}")

View File

@ -17,7 +17,7 @@ class SchedulerRegistry:
@staticmethod @staticmethod
def _register(name: str, callbacks: list[Callable], callback: Iterable[Callable]): def _register(name: str, callbacks: list[Callable], callback: Iterable[Callable]):
for cb in callback: for cb in callback:
logger.info(f"Registering {name} callback: {cb.__name__}") logger.debug(f"Registering {name} callback: {cb.__name__}")
callbacks.append(cb) callbacks.append(cb)
@staticmethod @staticmethod
@ -26,7 +26,7 @@ class SchedulerRegistry:
@staticmethod @staticmethod
def remove_daily(callback: Callable): def remove_daily(callback: Callable):
logger.info(f"Removing daily callback: {callback.__name__}") logger.debug(f"Removing daily callback: {callback.__name__}")
SchedulerRegistry._daily.remove(callback) SchedulerRegistry._daily.remove(callback)
@staticmethod @staticmethod
@ -35,7 +35,7 @@ class SchedulerRegistry:
@staticmethod @staticmethod
def remove_hourly(callback: Callable): def remove_hourly(callback: Callable):
logger.info(f"Removing hourly callback: {callback.__name__}") logger.debug(f"Removing hourly callback: {callback.__name__}")
SchedulerRegistry._hourly.remove(callback) SchedulerRegistry._hourly.remove(callback)
@staticmethod @staticmethod
@ -44,16 +44,16 @@ class SchedulerRegistry:
@staticmethod @staticmethod
def remove_minutely(callback: Callable): def remove_minutely(callback: Callable):
logger.info(f"Removing minutely callback: {callback.__name__}") logger.debug(f"Removing minutely callback: {callback.__name__}")
SchedulerRegistry._minutely.remove(callback) SchedulerRegistry._minutely.remove(callback)
@staticmethod @staticmethod
def print_jobs(): def print_jobs():
for job in SchedulerRegistry._daily: for job in SchedulerRegistry._daily:
logger.info(f"Daily job: {job.__name__}") logger.debug(f"Daily job: {job.__name__}")
for job in SchedulerRegistry._hourly: for job in SchedulerRegistry._hourly:
logger.info(f"Hourly job: {job.__name__}") logger.debug(f"Hourly job: {job.__name__}")
for job in SchedulerRegistry._minutely: for job in SchedulerRegistry._minutely:
logger.info(f"Minutely job: {job.__name__}") logger.debug(f"Minutely job: {job.__name__}")

View File

@ -31,20 +31,20 @@ def _scheduled_task_wrapper(callable):
@repeat_every(minutes=MINUTES_DAY, wait_first=True, logger=logger) @repeat_every(minutes=MINUTES_DAY, wait_first=True, logger=logger)
def run_daily(): def run_daily():
logger.info("Running daily callbacks") logger.debug("Running daily callbacks")
for func in SchedulerRegistry._daily: for func in SchedulerRegistry._daily:
_scheduled_task_wrapper(func) _scheduled_task_wrapper(func)
@repeat_every(minutes=MINUTES_HOUR, wait_first=True, logger=logger) @repeat_every(minutes=MINUTES_HOUR, wait_first=True, logger=logger)
def run_hourly(): def run_hourly():
logger.info("Running hourly callbacks") logger.debug("Running hourly callbacks")
for func in SchedulerRegistry._hourly: for func in SchedulerRegistry._hourly:
_scheduled_task_wrapper(func) _scheduled_task_wrapper(func)
@repeat_every(minutes=MINUTES_5, wait_first=True, logger=logger) @repeat_every(minutes=MINUTES_5, wait_first=True, logger=logger)
def run_minutely(): def run_minutely():
logger.info("Running minutely callbacks") logger.debug("Running minutely callbacks")
for func in SchedulerRegistry._minutely: for func in SchedulerRegistry._minutely:
_scheduled_task_wrapper(func) _scheduled_task_wrapper(func)

View File

@ -13,7 +13,7 @@ def purge_group_data_exports(max_minutes_old=ONE_DAY_AS_MINUTES):
"""Purges all group exports after x days""" """Purges all group exports after x days"""
logger = root_logger.get_logger() logger = root_logger.get_logger()
logger.info("purging group data exports") logger.debug("purging group data exports")
limit = datetime.datetime.now() - datetime.timedelta(minutes=max_minutes_old) limit = datetime.datetime.now() - datetime.timedelta(minutes=max_minutes_old)
with session_context() as session: with session_context() as session:
@ -41,6 +41,6 @@ def purge_excess_files() -> None:
# TODO: fix comparison types # TODO: fix comparison types
if file.stat().st_mtime < limit: # type: ignore if file.stat().st_mtime < limit: # type: ignore
file.unlink() file.unlink()
logger.info(f"excess group file removed '{file}'") logger.debug(f"excess group file removed '{file}'")
logger.info("finished purging excess files") logger.info("finished purging excess files")

View File

@ -11,11 +11,11 @@ MAX_DAYS_OLD = 2
def purge_password_reset_tokens(): def purge_password_reset_tokens():
"""Purges all events after x days""" """Purges all events after x days"""
logger.info("purging password reset tokens") logger.debug("purging password reset tokens")
limit = datetime.datetime.now() - datetime.timedelta(days=MAX_DAYS_OLD) limit = datetime.datetime.now() - datetime.timedelta(days=MAX_DAYS_OLD)
with session_context() as session: with session_context() as session:
session.query(PasswordResetModel).filter(PasswordResetModel.created_at <= limit).delete() session.query(PasswordResetModel).filter(PasswordResetModel.created_at <= limit).delete()
session.commit() session.commit()
session.close() session.close()
logger.info("password reset tokens purges") logger.info("password reset tokens purged")

View File

@ -11,7 +11,7 @@ MAX_DAYS_OLD = 4
def purge_group_registration(): def purge_group_registration():
"""Purges all events after x days""" """Purges all events after x days"""
logger.info("purging expired registration tokens") logger.debug("purging expired registration tokens")
limit = datetime.datetime.now() - datetime.timedelta(days=MAX_DAYS_OLD) limit = datetime.datetime.now() - datetime.timedelta(days=MAX_DAYS_OLD)
with session_context() as session: with session_context() as session:

View File

@ -6,12 +6,12 @@ from mealie.services.user_services.user_service import UserService
def locked_user_reset(): def locked_user_reset():
logger = root_logger.get_logger() logger = root_logger.get_logger()
logger.info("resetting locked users") logger.debug("resetting locked users")
with session_context() as session: with session_context() as session:
repos = AllRepositories(session) repos = AllRepositories(session)
user_service = UserService(repos) user_service = UserService(repos)
unlocked = user_service.reset_locked_users() unlocked = user_service.reset_locked_users()
logger.info(f"scheduled task unlocked {unlocked} users in the database") logger.debug(f"scheduled task unlocked {unlocked} users in the database")
logger.info("locked users reset") logger.info("locked users reset")

View File

@ -37,7 +37,7 @@ async def create_from_url(url: str) -> tuple[Recipe, ScrapedExtras | None]:
new_recipe.id = uuid4() new_recipe.id = uuid4()
logger = get_logger() logger = get_logger()
logger.info(f"Image {new_recipe.image}") logger.debug(f"Image {new_recipe.image}")
recipe_data_service = RecipeDataService(new_recipe.id) recipe_data_service = RecipeDataService(new_recipe.id)