Merge pull request #35 from tiqi-group/34-remove-loguru-dependency-and-use-std-logging

34 remove loguru dependency and use std logging
This commit is contained in:
Mose Müller 2023-10-16 17:16:33 +02:00 committed by GitHub
commit 7a78713388
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 841 additions and 607 deletions

View File

@ -479,7 +479,7 @@ For more information about what you can do with the units, please consult the do
## Changing the Log Level ## Changing the Log Level
You can change the log level of loguru by either You can change the log level of the logger by either
1. (RECOMMENDED) setting the `ENVIRONMENT` environment variable to "production" or "development" 1. (RECOMMENDED) setting the `ENVIRONMENT` environment variable to "production" or "development"
@ -493,10 +493,14 @@ You can change the log level of loguru by either
```python ```python
# <your_script.py> # <your_script.py>
import logging
from pydase.utils.logging import setup_logging from pydase.utils.logging import setup_logging
setup_logging("INFO") setup_logging("INFO") # or setup_logging(logging.INFO)
logger = logging.getLogger(__main__)
# ... and your log
logger.info("My info message.")
``` ```
## Documentation ## Documentation

1166
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -10,7 +10,6 @@ packages = [{ include = "pydase", from = "src" }]
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.10" python = "^3.10"
rpyc = "^5.3.1" rpyc = "^5.3.1"
loguru = "^0.7.0"
fastapi = "^0.100.0" fastapi = "^0.100.0"
uvicorn = "^0.22.0" uvicorn = "^0.22.0"
toml = "^0.10.2" toml = "^0.10.2"

View File

@ -1,17 +1,19 @@
import base64 import base64
import io import io
import logging
from pathlib import Path from pathlib import Path
from typing import TYPE_CHECKING, Optional from typing import TYPE_CHECKING, Optional
from urllib.request import urlopen from urllib.request import urlopen
import PIL.Image import PIL.Image
from loguru import logger
from pydase.data_service.data_service import DataService from pydase.data_service.data_service import DataService
if TYPE_CHECKING: if TYPE_CHECKING:
from matplotlib.figure import Figure from matplotlib.figure import Figure
logger = logging.getLogger(__name__)
class Image(DataService): class Image(DataService):
def __init__( def __init__(
@ -54,7 +56,7 @@ class Image(DataService):
self._load_from_base64(value_, format_) self._load_from_base64(value_, format_)
def _load_from_base64(self, value_: bytes, format_: str) -> None: def _load_from_base64(self, value_: bytes, format_: str) -> None:
value = value_.decode("utf-8") if isinstance(value_, bytes) else value_ value = value_.decode("utf-8")
self._value = value self._value = value
self._format = format_ self._format = format_

View File

@ -1,9 +1,10 @@
import logging
from typing import Any, Literal from typing import Any, Literal
from loguru import logger
from pydase.data_service.data_service import DataService from pydase.data_service.data_service import DataService
logger = logging.getLogger(__name__)
class NumberSlider(DataService): class NumberSlider(DataService):
""" """

View File

@ -1,11 +1,10 @@
from __future__ import annotations from __future__ import annotations
import inspect import inspect
import logging
from collections.abc import Callable from collections.abc import Callable
from typing import TYPE_CHECKING, Any, cast from typing import TYPE_CHECKING, Any, cast
from loguru import logger
from pydase.data_service.abstract_data_service import AbstractDataService from pydase.data_service.abstract_data_service import AbstractDataService
from pydase.utils.helpers import get_class_and_instance_attributes from pydase.utils.helpers import get_class_and_instance_attributes
@ -14,6 +13,8 @@ from .data_service_list import DataServiceList
if TYPE_CHECKING: if TYPE_CHECKING:
from .data_service import DataService from .data_service import DataService
logger = logging.getLogger(__name__)
class CallbackManager: class CallbackManager:
_notification_callbacks: list[Callable[[str, str, Any], Any]] = [] _notification_callbacks: list[Callable[[str, str, Any], Any]] = []

View File

@ -1,12 +1,12 @@
import asyncio import asyncio
import inspect import inspect
import json import json
import logging
import os import os
from enum import Enum from enum import Enum
from typing import Any, Optional, cast, get_type_hints from typing import Any, Optional, cast, get_type_hints
import rpyc import rpyc
from loguru import logger
import pydase.units as u import pydase.units as u
from pydase.data_service.abstract_data_service import AbstractDataService from pydase.data_service.abstract_data_service import AbstractDataService
@ -27,6 +27,8 @@ from pydase.utils.warnings import (
warn_if_instance_class_does_not_inherit_from_DataService, warn_if_instance_class_does_not_inherit_from_DataService,
) )
logger = logging.getLogger(__name__)
def process_callable_attribute(attr: Any, args: dict[str, Any]) -> Any: def process_callable_attribute(attr: Any, args: dict[str, Any]) -> Any:
converted_args_or_error_msg = convert_arguments_to_hinted_types( converted_args_or_error_msg = convert_arguments_to_hinted_types(

View File

@ -2,15 +2,16 @@ from __future__ import annotations
import asyncio import asyncio
import inspect import inspect
import logging
from collections.abc import Callable from collections.abc import Callable
from functools import wraps from functools import wraps
from typing import TYPE_CHECKING, Any, TypedDict from typing import TYPE_CHECKING, Any, TypedDict
from loguru import logger
if TYPE_CHECKING: if TYPE_CHECKING:
from .data_service import DataService from .data_service import DataService
logger = logging.getLogger(__name__)
class TaskDict(TypedDict): class TaskDict(TypedDict):
task: asyncio.Task[None] task: asyncio.Task[None]

View File

@ -1,4 +1,5 @@
import asyncio import asyncio
import logging
import os import os
import signal import signal
import threading import threading
@ -8,7 +9,6 @@ from types import FrameType
from typing import Any, Optional, Protocol, TypedDict from typing import Any, Optional, Protocol, TypedDict
import uvicorn import uvicorn
from loguru import logger
from rpyc import ( from rpyc import (
ForkingServer, # can be used for multiprocessing, e.g. a database interface server ForkingServer, # can be used for multiprocessing, e.g. a database interface server
) )
@ -21,6 +21,8 @@ from pydase.version import __version__
from .web_server import WebAPI from .web_server import WebAPI
logger = logging.getLogger(__name__)
class AdditionalServerProtocol(Protocol): class AdditionalServerProtocol(Protocol):
""" """

View File

@ -1,3 +1,4 @@
import logging
from pathlib import Path from pathlib import Path
from typing import Any, TypedDict from typing import Any, TypedDict
@ -5,11 +6,12 @@ import socketio
from fastapi import FastAPI from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles from fastapi.staticfiles import StaticFiles
from loguru import logger
from pydase import DataService from pydase import DataService
from pydase.version import __version__ from pydase.version import __version__
logger = logging.getLogger(__name__)
class UpdateDict(TypedDict): class UpdateDict(TypedDict):
""" """

View File

@ -1,8 +1,9 @@
import logging
import re import re
from itertools import chain from itertools import chain
from typing import Any, Optional, cast from typing import Any, Optional, cast
from loguru import logger logger = logging.getLogger(__name__)
STANDARD_TYPES = ("int", "float", "bool", "str", "Enum", "NoneType", "Quantity") STANDARD_TYPES = ("int", "float", "bool", "str", "Enum", "NoneType", "Quantity")

View File

@ -1,82 +1,111 @@
import logging import logging
import sys import sys
from types import FrameType from copy import copy
from typing import Optional from typing import Optional
import loguru import uvicorn.logging
import rpyc
from uvicorn.config import LOGGING_CONFIG from uvicorn.config import LOGGING_CONFIG
import pydase.config import pydase.config
ALLOWED_LOG_LEVELS = ["DEBUG", "INFO", "ERROR"]
class DefaultFormatter(uvicorn.logging.ColourizedFormatter):
"""
A custom log formatter class that:
* Outputs the LOG_LEVEL with an appropriate color.
* If a log call includes an `extras={"color_message": ...}` it will be used
for formatting the output, instead of the plain text message.
"""
def formatMessage(self, record: logging.LogRecord) -> str:
recordcopy = copy(record)
levelname = recordcopy.levelname
seperator = " " * (8 - len(recordcopy.levelname))
if self.use_colors:
levelname = self.color_level_name(levelname, recordcopy.levelno)
if "color_message" in recordcopy.__dict__:
recordcopy.msg = recordcopy.__dict__["color_message"]
recordcopy.__dict__["message"] = recordcopy.getMessage()
recordcopy.__dict__["levelprefix"] = levelname + seperator
return logging.Formatter.formatMessage(self, recordcopy)
def should_use_colors(self) -> bool:
return sys.stderr.isatty() # pragma: no cover
# from: https://github.com/Delgan/loguru section def setup_logging(level: Optional[str | int] = None) -> None:
# "Entirely compatible with standard logging" """
class InterceptHandler(logging.Handler): Configures the logging settings for the application.
def emit(self, record: logging.LogRecord) -> None:
# Ignore "asyncio.CancelledError" raised by uvicorn
if record.name == "uvicorn.error" and "CancelledError" in record.msg:
return
# Get corresponding Loguru level if it exists. This function sets up logging with specific formatting and colorization of log
level: int | str messages. The log level is determined based on the application's operation mode,
try: with an option to override the level. By default, in a development environment, the
level = loguru.logger.level(record.levelname).name log level is set to DEBUG, whereas in other environments, it is set to INFO.
except ValueError:
level = record.levelno
# Find caller from where originated the logged message. Parameters:
frame: Optional[FrameType] = sys._getframe(6) level (Optional[str | int]):
depth = 6 A specific log level to set for the application. If None, the log level is
while frame and frame.f_code.co_filename == logging.__file__: determined based on the application's operation mode. Accepts standard log
frame = frame.f_back level names ('DEBUG', 'INFO', etc.) and corresponding numerical values.
depth += 1
try: Example:
msg = record.getMessage()
except TypeError:
# A `TypeError` is raised when the `msg` string expects more arguments
# than are provided by `args`. This can happen when intercepting log
# messages with a certain format, like
# > logger.debug("call: %s%r", method_name, *args) # in tiqi_rpc
# where `*args` unpacks a sequence of values that should replace
# placeholders in the string.
msg = record.msg % (record.args[0], record.args[2:]) # type: ignore
loguru.logger.opt(depth=depth, exception=record.exc_info).log(level, msg) ```python
>>> import logging
>>> setup_logging(logging.DEBUG)
>>> setup_logging("INFO")
```
"""
logger = logging.getLogger()
def setup_logging(level: Optional[str] = None) -> None:
loguru.logger.debug("Configuring service logging.")
if pydase.config.OperationMode().environment == "development": if pydase.config.OperationMode().environment == "development":
log_level = "DEBUG" log_level = logging.DEBUG
else: else:
log_level = "INFO" log_level = logging.INFO
if level is not None and level in ALLOWED_LOG_LEVELS: # If a level is specified, check whether it's a string or an integer.
log_level = level if level is not None:
if isinstance(level, str):
# Convert known log level strings directly to their corresponding logging
# module constants.
level_name = level.upper() # Ensure level names are uppercase
if hasattr(logging, level_name):
log_level = getattr(logging, level_name)
else:
raise ValueError(
f"Invalid log level: {level}. Must be one of 'DEBUG', 'INFO', "
"'WARNING', 'ERROR', etc."
)
elif isinstance(level, int):
log_level = level # Directly use integer levels
else:
raise ValueError("Log level must be a string or an integer.")
loguru.logger.remove() # Set the logger's level.
loguru.logger.add(sys.stderr, level=log_level) logger.setLevel(log_level)
# set up the rpyc logger *before* adding the InterceptHandler to the logging module # create console handler and set level to debug
rpyc.setup_logger(quiet=True) # type: ignore ch = logging.StreamHandler()
logging.basicConfig(handlers=[InterceptHandler()], level=0) # add formatter to ch
ch.setFormatter(
DefaultFormatter(
fmt="%(asctime)s.%(msecs)03d | %(levelprefix)s | %(name)s:%(funcName)s:%(lineno)d - %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
)
# add ch to logger
logger.addHandler(ch)
logger.debug("Configuring service logging.")
logging.getLogger("asyncio").setLevel(logging.INFO) logging.getLogger("asyncio").setLevel(logging.INFO)
logging.getLogger("urllib3").setLevel(logging.INFO) logging.getLogger("urllib3").setLevel(logging.INFO)
# overwriting the uvicorn logging config to use the loguru intercept handler # configuring uvicorn logger
LOGGING_CONFIG["handlers"] = { LOGGING_CONFIG["formatters"]["default"][
"default": { "fmt"
"()": InterceptHandler, ] = "%(asctime)s.%(msecs)03d | %(levelprefix)s %(message)s"
"formatter": "default", LOGGING_CONFIG["formatters"]["default"]["datefmt"] = "%Y-%m-%d %H:%M:%S"
},
"access": {
"()": InterceptHandler,
"formatter": "access",
},
}

View File

@ -1,7 +1,9 @@
from loguru import logger import logging
from pydase.utils.helpers import get_component_class_names from pydase.utils.helpers import get_component_class_names
logger = logging.getLogger(__name__)
def warn_if_instance_class_does_not_inherit_from_DataService(__value: object) -> None: def warn_if_instance_class_does_not_inherit_from_DataService(__value: object) -> None:
base_class_name = __value.__class__.__base__.__name__ base_class_name = __value.__class__.__base__.__name__

View File

@ -1,21 +1,10 @@
from collections.abc import Generator from collections.abc import Generator
from typing import Any from typing import Any
import pytest
from loguru import logger
from pytest import LogCaptureFixture
from pydase import DataService from pydase import DataService
from pydase.data_service.callback_manager import CallbackManager from pydase.data_service.callback_manager import CallbackManager
@pytest.fixture
def caplog(caplog: LogCaptureFixture) -> Generator[LogCaptureFixture, Any, None]:
handler_id = logger.add(caplog.handler, format="{message}")
yield caplog
logger.remove(handler_id)
def emit(self: Any, parent_path: str, name: str, value: Any) -> None: def emit(self: Any, parent_path: str, name: str, value: Any) -> None:
if isinstance(value, DataService): if isinstance(value, DataService):
value = value.serialize() value = value.serialize()

View File

@ -3,8 +3,6 @@ from pytest import CaptureFixture, LogCaptureFixture
from pydase.components.coloured_enum import ColouredEnum from pydase.components.coloured_enum import ColouredEnum
from pydase.data_service.data_service import DataService from pydase.data_service.data_service import DataService
from .. import caplog # noqa
def test_ColouredEnum(capsys: CaptureFixture) -> None: def test_ColouredEnum(capsys: CaptureFixture) -> None:
class MyStatus(ColouredEnum): class MyStatus(ColouredEnum):

View File

@ -3,8 +3,6 @@ from pytest import CaptureFixture, LogCaptureFixture
from pydase.components.number_slider import NumberSlider from pydase.components.number_slider import NumberSlider
from pydase.data_service.data_service import DataService from pydase.data_service.data_service import DataService
from .. import caplog # noqa
def test_NumberSlider(capsys: CaptureFixture) -> None: def test_NumberSlider(capsys: CaptureFixture) -> None:
class ServiceClass(DataService): class ServiceClass(DataService):

View File

@ -0,0 +1,71 @@
import logging
from pytest import LogCaptureFixture
from pydase.utils.logging import setup_logging
def test_log_error(caplog: LogCaptureFixture):
setup_logging("ERROR")
logger = logging.getLogger()
logger.debug("This is a debug message")
logger.info("This is an info message")
logger.warning("This is a warning message")
logger.error("This is an error message")
# Check the log records as well as the level.
assert "This is a debug message" not in caplog.text
assert "This is an info message" not in caplog.text
assert "This is a warning message" not in caplog.text
assert "This is an error message" in caplog.text
assert any(record.levelname == "ERROR" for record in caplog.records)
def test_log_warning(caplog: LogCaptureFixture):
setup_logging("WARNING")
logger = logging.getLogger()
logger.debug("This is a debug message")
logger.info("This is an info message")
logger.warning("This is a warning message")
logger.error("This is an error message")
# Check the log records as well as the level.
assert "This is a debug message" not in caplog.text
assert "This is an info message" not in caplog.text
assert "This is a warning message" in caplog.text
assert "This is an error message" in caplog.text
assert any(record.levelname == "ERROR" for record in caplog.records)
def test_log_debug(caplog: LogCaptureFixture):
setup_logging("DEBUG")
logger = (
logging.getLogger()
) # Get the root logger or replace with the appropriate logger.
logger.debug("This is a debug message")
logger.info("This is an info message")
logger.warning("This is a warning message")
logger.error("This is an error message")
# Now, check that the message is in the log records.
assert "This is a debug message" in caplog.text
assert "This is an info message" in caplog.text
assert "This is a warning message" in caplog.text
assert "This is an error message" in caplog.text
def test_log_info(caplog: LogCaptureFixture):
setup_logging("INFO")
logger = (
logging.getLogger()
) # Get the root logger or replace with the appropriate logger.
logger.debug("This is a debug message")
logger.info("This is an info message")
logger.warning("This is a warning message")
logger.error("This is an error message")
# Now, check that the message is in the log records.
assert "This is a debug message" not in caplog.text
assert "This is an info message" in caplog.text
assert "This is a warning message" in caplog.text
assert "This is an error message" in caplog.text

View File

@ -2,8 +2,6 @@ from pytest import LogCaptureFixture
from pydase import DataService from pydase import DataService
from .. import caplog # noqa
def test_setattr_warnings(caplog: LogCaptureFixture) -> None: # noqa def test_setattr_warnings(caplog: LogCaptureFixture) -> None: # noqa
# def test_setattr_warnings(capsys: CaptureFixture) -> None: # def test_setattr_warnings(capsys: CaptureFixture) -> None: