Merge pull request #35 from tiqi-group/34-remove-loguru-dependency-and-use-std-logging

34 remove loguru dependency and use std logging
This commit is contained in:
Mose Müller 2023-10-16 17:16:33 +02:00 committed by GitHub
commit 7a78713388
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 841 additions and 607 deletions

View File

@ -479,7 +479,7 @@ For more information about what you can do with the units, please consult the do
## Changing the Log Level
You can change the log level of loguru by either
You can change the log level of the logger by either
1. (RECOMMENDED) setting the `ENVIRONMENT` environment variable to "production" or "development"
@ -493,10 +493,14 @@ You can change the log level of loguru by either
```python
# <your_script.py>
import logging
from pydase.utils.logging import setup_logging
setup_logging("INFO")
setup_logging("INFO") # or setup_logging(logging.INFO)
logger = logging.getLogger(__main__)
# ... and your log
logger.info("My info message.")
```
## Documentation

1166
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -10,7 +10,6 @@ packages = [{ include = "pydase", from = "src" }]
[tool.poetry.dependencies]
python = "^3.10"
rpyc = "^5.3.1"
loguru = "^0.7.0"
fastapi = "^0.100.0"
uvicorn = "^0.22.0"
toml = "^0.10.2"

View File

@ -1,17 +1,19 @@
import base64
import io
import logging
from pathlib import Path
from typing import TYPE_CHECKING, Optional
from urllib.request import urlopen
import PIL.Image
from loguru import logger
from pydase.data_service.data_service import DataService
if TYPE_CHECKING:
from matplotlib.figure import Figure
logger = logging.getLogger(__name__)
class Image(DataService):
def __init__(
@ -54,7 +56,7 @@ class Image(DataService):
self._load_from_base64(value_, format_)
def _load_from_base64(self, value_: bytes, format_: str) -> None:
value = value_.decode("utf-8") if isinstance(value_, bytes) else value_
value = value_.decode("utf-8")
self._value = value
self._format = format_

View File

@ -1,9 +1,10 @@
import logging
from typing import Any, Literal
from loguru import logger
from pydase.data_service.data_service import DataService
logger = logging.getLogger(__name__)
class NumberSlider(DataService):
"""

View File

@ -1,11 +1,10 @@
from __future__ import annotations
import inspect
import logging
from collections.abc import Callable
from typing import TYPE_CHECKING, Any, cast
from loguru import logger
from pydase.data_service.abstract_data_service import AbstractDataService
from pydase.utils.helpers import get_class_and_instance_attributes
@ -14,6 +13,8 @@ from .data_service_list import DataServiceList
if TYPE_CHECKING:
from .data_service import DataService
logger = logging.getLogger(__name__)
class CallbackManager:
_notification_callbacks: list[Callable[[str, str, Any], Any]] = []

View File

@ -1,12 +1,12 @@
import asyncio
import inspect
import json
import logging
import os
from enum import Enum
from typing import Any, Optional, cast, get_type_hints
import rpyc
from loguru import logger
import pydase.units as u
from pydase.data_service.abstract_data_service import AbstractDataService
@ -27,6 +27,8 @@ from pydase.utils.warnings import (
warn_if_instance_class_does_not_inherit_from_DataService,
)
logger = logging.getLogger(__name__)
def process_callable_attribute(attr: Any, args: dict[str, Any]) -> Any:
converted_args_or_error_msg = convert_arguments_to_hinted_types(

View File

@ -2,15 +2,16 @@ from __future__ import annotations
import asyncio
import inspect
import logging
from collections.abc import Callable
from functools import wraps
from typing import TYPE_CHECKING, Any, TypedDict
from loguru import logger
if TYPE_CHECKING:
from .data_service import DataService
logger = logging.getLogger(__name__)
class TaskDict(TypedDict):
task: asyncio.Task[None]

View File

@ -1,4 +1,5 @@
import asyncio
import logging
import os
import signal
import threading
@ -8,7 +9,6 @@ from types import FrameType
from typing import Any, Optional, Protocol, TypedDict
import uvicorn
from loguru import logger
from rpyc import (
ForkingServer, # can be used for multiprocessing, e.g. a database interface server
)
@ -21,6 +21,8 @@ from pydase.version import __version__
from .web_server import WebAPI
logger = logging.getLogger(__name__)
class AdditionalServerProtocol(Protocol):
"""

View File

@ -1,3 +1,4 @@
import logging
from pathlib import Path
from typing import Any, TypedDict
@ -5,11 +6,12 @@ import socketio
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
from loguru import logger
from pydase import DataService
from pydase.version import __version__
logger = logging.getLogger(__name__)
class UpdateDict(TypedDict):
"""

View File

@ -1,8 +1,9 @@
import logging
import re
from itertools import chain
from typing import Any, Optional, cast
from loguru import logger
logger = logging.getLogger(__name__)
STANDARD_TYPES = ("int", "float", "bool", "str", "Enum", "NoneType", "Quantity")

View File

@ -1,82 +1,111 @@
import logging
import sys
from types import FrameType
from copy import copy
from typing import Optional
import loguru
import rpyc
import uvicorn.logging
from uvicorn.config import LOGGING_CONFIG
import pydase.config
ALLOWED_LOG_LEVELS = ["DEBUG", "INFO", "ERROR"]
class DefaultFormatter(uvicorn.logging.ColourizedFormatter):
"""
A custom log formatter class that:
* Outputs the LOG_LEVEL with an appropriate color.
* If a log call includes an `extras={"color_message": ...}` it will be used
for formatting the output, instead of the plain text message.
"""
def formatMessage(self, record: logging.LogRecord) -> str:
recordcopy = copy(record)
levelname = recordcopy.levelname
seperator = " " * (8 - len(recordcopy.levelname))
if self.use_colors:
levelname = self.color_level_name(levelname, recordcopy.levelno)
if "color_message" in recordcopy.__dict__:
recordcopy.msg = recordcopy.__dict__["color_message"]
recordcopy.__dict__["message"] = recordcopy.getMessage()
recordcopy.__dict__["levelprefix"] = levelname + seperator
return logging.Formatter.formatMessage(self, recordcopy)
def should_use_colors(self) -> bool:
return sys.stderr.isatty() # pragma: no cover
# from: https://github.com/Delgan/loguru section
# "Entirely compatible with standard logging"
class InterceptHandler(logging.Handler):
def emit(self, record: logging.LogRecord) -> None:
# Ignore "asyncio.CancelledError" raised by uvicorn
if record.name == "uvicorn.error" and "CancelledError" in record.msg:
return
def setup_logging(level: Optional[str | int] = None) -> None:
"""
Configures the logging settings for the application.
# Get corresponding Loguru level if it exists.
level: int | str
try:
level = loguru.logger.level(record.levelname).name
except ValueError:
level = record.levelno
This function sets up logging with specific formatting and colorization of log
messages. The log level is determined based on the application's operation mode,
with an option to override the level. By default, in a development environment, the
log level is set to DEBUG, whereas in other environments, it is set to INFO.
# Find caller from where originated the logged message.
frame: Optional[FrameType] = sys._getframe(6)
depth = 6
while frame and frame.f_code.co_filename == logging.__file__:
frame = frame.f_back
depth += 1
Parameters:
level (Optional[str | int]):
A specific log level to set for the application. If None, the log level is
determined based on the application's operation mode. Accepts standard log
level names ('DEBUG', 'INFO', etc.) and corresponding numerical values.
try:
msg = record.getMessage()
except TypeError:
# A `TypeError` is raised when the `msg` string expects more arguments
# than are provided by `args`. This can happen when intercepting log
# messages with a certain format, like
# > logger.debug("call: %s%r", method_name, *args) # in tiqi_rpc
# where `*args` unpacks a sequence of values that should replace
# placeholders in the string.
msg = record.msg % (record.args[0], record.args[2:]) # type: ignore
Example:
loguru.logger.opt(depth=depth, exception=record.exc_info).log(level, msg)
```python
>>> import logging
>>> setup_logging(logging.DEBUG)
>>> setup_logging("INFO")
```
"""
def setup_logging(level: Optional[str] = None) -> None:
loguru.logger.debug("Configuring service logging.")
logger = logging.getLogger()
if pydase.config.OperationMode().environment == "development":
log_level = "DEBUG"
log_level = logging.DEBUG
else:
log_level = "INFO"
log_level = logging.INFO
if level is not None and level in ALLOWED_LOG_LEVELS:
log_level = level
# If a level is specified, check whether it's a string or an integer.
if level is not None:
if isinstance(level, str):
# Convert known log level strings directly to their corresponding logging
# module constants.
level_name = level.upper() # Ensure level names are uppercase
if hasattr(logging, level_name):
log_level = getattr(logging, level_name)
else:
raise ValueError(
f"Invalid log level: {level}. Must be one of 'DEBUG', 'INFO', "
"'WARNING', 'ERROR', etc."
)
elif isinstance(level, int):
log_level = level # Directly use integer levels
else:
raise ValueError("Log level must be a string or an integer.")
loguru.logger.remove()
loguru.logger.add(sys.stderr, level=log_level)
# Set the logger's level.
logger.setLevel(log_level)
# set up the rpyc logger *before* adding the InterceptHandler to the logging module
rpyc.setup_logger(quiet=True) # type: ignore
# create console handler and set level to debug
ch = logging.StreamHandler()
logging.basicConfig(handlers=[InterceptHandler()], level=0)
# add formatter to ch
ch.setFormatter(
DefaultFormatter(
fmt="%(asctime)s.%(msecs)03d | %(levelprefix)s | %(name)s:%(funcName)s:%(lineno)d - %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
)
# add ch to logger
logger.addHandler(ch)
logger.debug("Configuring service logging.")
logging.getLogger("asyncio").setLevel(logging.INFO)
logging.getLogger("urllib3").setLevel(logging.INFO)
# overwriting the uvicorn logging config to use the loguru intercept handler
LOGGING_CONFIG["handlers"] = {
"default": {
"()": InterceptHandler,
"formatter": "default",
},
"access": {
"()": InterceptHandler,
"formatter": "access",
},
}
# configuring uvicorn logger
LOGGING_CONFIG["formatters"]["default"][
"fmt"
] = "%(asctime)s.%(msecs)03d | %(levelprefix)s %(message)s"
LOGGING_CONFIG["formatters"]["default"]["datefmt"] = "%Y-%m-%d %H:%M:%S"

View File

@ -1,7 +1,9 @@
from loguru import logger
import logging
from pydase.utils.helpers import get_component_class_names
logger = logging.getLogger(__name__)
def warn_if_instance_class_does_not_inherit_from_DataService(__value: object) -> None:
base_class_name = __value.__class__.__base__.__name__

View File

@ -1,21 +1,10 @@
from collections.abc import Generator
from typing import Any
import pytest
from loguru import logger
from pytest import LogCaptureFixture
from pydase import DataService
from pydase.data_service.callback_manager import CallbackManager
@pytest.fixture
def caplog(caplog: LogCaptureFixture) -> Generator[LogCaptureFixture, Any, None]:
handler_id = logger.add(caplog.handler, format="{message}")
yield caplog
logger.remove(handler_id)
def emit(self: Any, parent_path: str, name: str, value: Any) -> None:
if isinstance(value, DataService):
value = value.serialize()

View File

@ -3,8 +3,6 @@ from pytest import CaptureFixture, LogCaptureFixture
from pydase.components.coloured_enum import ColouredEnum
from pydase.data_service.data_service import DataService
from .. import caplog # noqa
def test_ColouredEnum(capsys: CaptureFixture) -> None:
class MyStatus(ColouredEnum):

View File

@ -3,8 +3,6 @@ from pytest import CaptureFixture, LogCaptureFixture
from pydase.components.number_slider import NumberSlider
from pydase.data_service.data_service import DataService
from .. import caplog # noqa
def test_NumberSlider(capsys: CaptureFixture) -> None:
class ServiceClass(DataService):

View File

@ -0,0 +1,71 @@
import logging
from pytest import LogCaptureFixture
from pydase.utils.logging import setup_logging
def test_log_error(caplog: LogCaptureFixture):
setup_logging("ERROR")
logger = logging.getLogger()
logger.debug("This is a debug message")
logger.info("This is an info message")
logger.warning("This is a warning message")
logger.error("This is an error message")
# Check the log records as well as the level.
assert "This is a debug message" not in caplog.text
assert "This is an info message" not in caplog.text
assert "This is a warning message" not in caplog.text
assert "This is an error message" in caplog.text
assert any(record.levelname == "ERROR" for record in caplog.records)
def test_log_warning(caplog: LogCaptureFixture):
setup_logging("WARNING")
logger = logging.getLogger()
logger.debug("This is a debug message")
logger.info("This is an info message")
logger.warning("This is a warning message")
logger.error("This is an error message")
# Check the log records as well as the level.
assert "This is a debug message" not in caplog.text
assert "This is an info message" not in caplog.text
assert "This is a warning message" in caplog.text
assert "This is an error message" in caplog.text
assert any(record.levelname == "ERROR" for record in caplog.records)
def test_log_debug(caplog: LogCaptureFixture):
setup_logging("DEBUG")
logger = (
logging.getLogger()
) # Get the root logger or replace with the appropriate logger.
logger.debug("This is a debug message")
logger.info("This is an info message")
logger.warning("This is a warning message")
logger.error("This is an error message")
# Now, check that the message is in the log records.
assert "This is a debug message" in caplog.text
assert "This is an info message" in caplog.text
assert "This is a warning message" in caplog.text
assert "This is an error message" in caplog.text
def test_log_info(caplog: LogCaptureFixture):
setup_logging("INFO")
logger = (
logging.getLogger()
) # Get the root logger or replace with the appropriate logger.
logger.debug("This is a debug message")
logger.info("This is an info message")
logger.warning("This is a warning message")
logger.error("This is an error message")
# Now, check that the message is in the log records.
assert "This is a debug message" not in caplog.text
assert "This is an info message" in caplog.text
assert "This is a warning message" in caplog.text
assert "This is an error message" in caplog.text

View File

@ -2,8 +2,6 @@ from pytest import LogCaptureFixture
from pydase import DataService
from .. import caplog # noqa
def test_setattr_warnings(caplog: LogCaptureFixture) -> None: # noqa
# def test_setattr_warnings(capsys: CaptureFixture) -> None: