diff --git a/Logging.md b/Logging.md index c8f778a801..0bdd483701 100644 --- a/Logging.md +++ b/Logging.md @@ -12,14 +12,15 @@ Other log levels fall back to `WARNING`. * `--log-level` - The log level to log on std out. * `--log-file` - Path to a file to log to. -* `--log-handler-config` - Specifies `when`, `interval`, `backupCount` for the `TimedRotatingFileMultiProcessHandler`. These 3 attributes are passed as a `;` seperated string. For example, `when` of d (days), `interval` of 7 and `backupCount` of 1 will be passed as `D;7;1`. Note: `backupCount` of 0 will mean all backup log files will be retained and not deleted at all. More details about these attributes can be found [here](https://docs.python.org/3/library/logging.handlers.html#timedrotatingfilehandler). `TimedRotatingFileMultiProcessHandler` supports the ability to cleanup logs by time and mantain backup logs and a custom JSON formatter for logs. -* `--log-fmt-pattern` - Specifies logging.Formatter pattern to override default patterns. -* `--log-json-fmt` - Specifies whether to use JSON logging formatter or text logging formatter. Defaults to `False`. +* `--log-config` - Specifies a custom logging configuration file + Example: ```sh -./bin/aca-py start --log-level debug --log-file acapy.log --log-handler-config "d;7;1" --log-fmt-pattern "%(asctime)s [%(did)s] %(filename)s %(lineno)d %(message)s" --log-json-fmt +./bin/aca-py start --log-level debug --log-file acapy.log --log-config aries_cloudagent.config:default_per_tenant_logging_config.ini + +./bin/aca-py start --log-level debug --log-file acapy.log --log-config ./aries_cloudagent/config/default_per_tenant_logging_config.yml ``` ## Environment Variables @@ -27,14 +28,11 @@ Example: The log level can be configured using the environment variable `ACAPY_LOG_LEVEL`. The log file can be set by `ACAPY_LOG_FILE`. The log config can be set by `ACAPY_LOG_CONFIG`. -The log rotating file handler config can be set by `ACAPY_LOG_HANDLER_CONFIG`. -The log formatter pattern can be set by `ACAPY_LOG_FMT_PATTERN`. -The log json formatter flag can be set by `ACAPY_LOG_JSON_FMT`. Example: ```sh -ACAPY_LOG_LEVEL=info ACAPY_LOG_FILE=./acapy.log ACAPY_LOG_CONFIG=./acapy_log.ini ACAPY_LOG_HANDLER_CONFIG="d;7;1" ./bin/aca-py start +ACAPY_LOG_LEVEL=info ACAPY_LOG_FILE=./acapy.log ACAPY_LOG_CONFIG=./acapy_log.ini ./bin/aca-py start ``` ## Acapy Config File @@ -53,6 +51,68 @@ Also if log-level is set to WARNING, connections and presentations will be logge ## Log config file +The path to config file is provided via `--log-config`. + Find an example in [default_logging_config.ini](aries_cloudagent/config/default_logging_config.ini). You can find more detail description in the [logging documentation](https://docs.python.org/3/howto/logging.html#configuring-logging). + +For per tenant logging, find an example in [default_per_tenant_logging_config.ini](aries_cloudagent/config/default_per_tenant_logging_config.ini), which sets up `TimedRotatingFileMultiProcessHandler` and `StreamHandler` handlers. Custom `TimedRotatingFileMultiProcessHandler` handler supports the ability to cleanup logs by time and mantain backup logs and a custom JSON formatter for logs. The arguments for it such as `file name`, `when`, `interval` and `backupCount` can be passed as `args=('acapy.log', 'd', 7, 1,)` [also shown below]. Note: `backupCount` of 0 will mean all backup log files will be retained and not deleted at all. More details about these attributes can be found [here](https://docs.python.org/3/library/logging.handlers.html#timedrotatingfilehandler) + +``` +[loggers] +keys=root + +[handlers] +keys=stream_handler, timed_file_handler + +[formatters] +keys=formatter + +[logger_root] +level=ERROR +handlers=stream_handler, timed_file_handler + +[handler_stream_handler] +class=StreamHandler +level=DEBUG +formatter=formatter +args=(sys.stderr,) + +[handler_timed_file_handler] +class=logging.handlers.TimedRotatingFileMultiProcessHandler +level=DEBUG +formatter=formatter +args=('acapy.log', 'd', 7, 1,) + +[formatter_formatter] +format=%(asctime)s %(wallet_id)s %(levelname)s %(pathname)s:%(lineno)d %(message)s +``` + +For `DictConfig` [`dict` logging config file], find an example in [default_per_tenant_logging_config.yml](aries_cloudagent/config/default_per_tenant_logging_config.yml) with same attributes as `default_per_tenant_logging_config.ini` file. + +``` +version: 1 +formatters: + default: + format: '%(asctime)s %(wallet_id)s %(levelname)s %(pathname)s:%(lineno)d %(message)s' +handlers: + console: + class: logging.StreamHandler + level: DEBUG + formatter: default + stream: ext://sys.stderr + rotating_file: + class: logging.handlers.TimedRotatingFileMultiProcessHandler + level: DEBUG + filename: 'acapy.log' + when: 'd' + interval: 7 + backupCount: 1 + formatter: default +root: + level: INFO + handlers: + - console + - rotating_file +``` diff --git a/aries_cloudagent/admin/server.py b/aries_cloudagent/admin/server.py index 61b3b61630..53f8890546 100644 --- a/aries_cloudagent/admin/server.py +++ b/aries_cloudagent/admin/server.py @@ -22,6 +22,7 @@ from marshmallow import fields from ..config.injection_context import InjectionContext +from ..config.logging import context_wallet_id from ..core.event_bus import Event, EventBus from ..core.plugin_registry import PluginRegistry from ..core.profile import Profile @@ -407,6 +408,8 @@ async def setup_context(request: web.Request, handler): ) = self.multitenant_manager.get_wallet_details_from_token( token=token ) + wallet_id = profile.settings.get("wallet.id") + context_wallet_id.set(wallet_id) meta_data = { "wallet_id": walletid, "wallet_key": walletkey, diff --git a/aries_cloudagent/config/argparse.py b/aries_cloudagent/config/argparse.py index 4247ffa725..2f9463d73b 100644 --- a/aries_cloudagent/config/argparse.py +++ b/aries_cloudagent/config/argparse.py @@ -1013,46 +1013,6 @@ def add_arguments(self, parser: ArgumentParser): "('debug', 'info', 'warning', 'error', 'critical')" ), ) - parser.add_argument( - "--log-handler-config", - dest="log_handler_config", - type=str, - metavar="", - default=None, - env_var="ACAPY_LOG_HANDLER_CONFIG", - help=( - "Specifies when, interval, backupCount for the " - "TimedRotatingFileHandler. These attributes are " - "passed as a ; seperated string. For example, " - "when of D (days), interval of 7 and backupCount " - "of 1 will be passed as 'D;7;1'. Note: " - "backupCount of 0 will mean all backup log files " - "will be retained and not deleted at all." - ), - ) - parser.add_argument( - "--log-fmt-pattern", - dest="log_fmt_pattern", - type=str, - metavar="", - default=None, - env_var="ACAPY_LOG_FMT_PATTERN", - help=( - "Specifies logging formatter pattern as string. Examples are included " - "in 'Logging.md'. For information regarding different attributes " - "supported in the pattern, please look at " - "https://docs.python.org/3/library/logging.html#logrecord-attributes." - ), - ) - parser.add_argument( - "--log-json-fmt", - action="store_true", - env_var="ACAPY_LOG_JSON_FMT", - help=( - "Specifies whether to use JSON logging formatter or " - "text logging formatter." - ), - ) def get_settings(self, args: Namespace) -> dict: """Extract logging settings.""" @@ -1063,29 +1023,6 @@ def get_settings(self, args: Namespace) -> dict: settings["log.file"] = args.log_file if args.log_level: settings["log.level"] = args.log_level - if args.log_handler_config: - try: - handler_config_attribs = (args.log_handler_config).split(";") - settings["log.handler_when"] = handler_config_attribs[0] - settings["log.handler_interval"] = int(handler_config_attribs[1]) - settings["log.handler_bakcount"] = int(handler_config_attribs[2]) - except IndexError: - raise ArgsParseError( - "With --log-handler-config, the provided argument must be " - "in 'when;interval;backupCount' format. Each of the 3 " - "attributes for TimedRotatingFileHandler must be specified." - ) - except ValueError: - raise ArgsParseError( - "With --log-handler-config, 'interval' and 'backupCount' " - "should be a number [int]" - ) - if args.log_fmt_pattern: - settings["log.fmt_pattern"] = args.log_fmt_pattern - if args.log_json_fmt: - settings["log.json_fmt"] = True - else: - settings["log.json_fmt"] = False return settings diff --git a/aries_cloudagent/config/default_per_tenant_logging_config.ini b/aries_cloudagent/config/default_per_tenant_logging_config.ini new file mode 100644 index 0000000000..bb763dd56f --- /dev/null +++ b/aries_cloudagent/config/default_per_tenant_logging_config.ini @@ -0,0 +1,27 @@ +[loggers] +keys=root + +[handlers] +keys=stream_handler, timed_file_handler + +[formatters] +keys=formatter + +[logger_root] +level=ERROR +handlers=stream_handler, timed_file_handler + +[handler_stream_handler] +class=StreamHandler +level=DEBUG +formatter=formatter +args=(sys.stderr,) + +[handler_timed_file_handler] +class=logging.handlers.TimedRotatingFileMultiProcessHandler +level=DEBUG +formatter=formatter +args=('/home/aries/log/acapy-agent.log', 'd', 7, 1,) + +[formatter_formatter] +format=%(asctime)s %(wallet_id)s %(levelname)s %(pathname)s:%(lineno)d %(message)s \ No newline at end of file diff --git a/aries_cloudagent/config/default_per_tenant_logging_config.yml b/aries_cloudagent/config/default_per_tenant_logging_config.yml new file mode 100644 index 0000000000..dc7c0e53d0 --- /dev/null +++ b/aries_cloudagent/config/default_per_tenant_logging_config.yml @@ -0,0 +1,23 @@ +version: 1 +formatters: + default: + format: '%(asctime)s %(wallet_id)s %(levelname)s %(pathname)s:%(lineno)d %(message)s' +handlers: + console: + class: logging.StreamHandler + level: DEBUG + formatter: default + stream: ext://sys.stderr + rotating_file: + class: logging.handlers.TimedRotatingFileMultiProcessHandler + level: DEBUG + filename: '/home/aries/log/acapy-agent.log' + when: 'd' + interval: 7 + backupCount: 1 + formatter: default +root: + level: INFO + handlers: + - console + - rotating_file \ No newline at end of file diff --git a/aries_cloudagent/config/logging.py b/aries_cloudagent/config/logging.py index f3bd59217d..8fcc845b1f 100644 --- a/aries_cloudagent/config/logging.py +++ b/aries_cloudagent/config/logging.py @@ -1,30 +1,55 @@ """Utilities related to logging.""" -import asyncio + +from configparser import ConfigParser +from contextvars import ContextVar from datetime import datetime, timedelta from io import TextIOWrapper import logging -from logging.config import fileConfig +from logging.config import dictConfig, fileConfig from logging.handlers import BaseRotatingHandler import os from random import randint import re import sys +import yaml import time as mod_time -from typing import Optional, TextIO +from typing import TextIO import pkg_resources from portalocker import LOCK_EX, lock, unlock from pythonjsonlogger import jsonlogger from ..config.settings import Settings -from ..core.profile import Profile from ..version import __version__ -from ..wallet.base import BaseWallet, DIDInfo from .banner import Banner -from .base import BaseSettings - DEFAULT_LOGGING_CONFIG_PATH = "aries_cloudagent.config:default_logging_config.ini" +DEFAULT_PER_TENANT_LOGGING_CONFIG_PATH_INI = ( + "aries_cloudagent.config:default_per_tenant_logging_config.ini" +) +LOG_FORMAT_FILE_ALIAS_PATTERN = ( + "%(asctime)s %(wallet_id)s %(levelname)s %(pathname)s:%(lineno)d %(message)s" +) + +context_wallet_id: ContextVar[str] = ContextVar("context_wallet_id") + + +class ContextFilter(logging.Filter): + """Custom logging Filter to adapt logs with contextual wallet_id.""" + + def __init__(self): + """Initialize an instance of Custom logging.Filter.""" + super(ContextFilter, self).__init__() + + def filter(self, record): + """Filter LogRecords and add wallet id to them.""" + try: + wallet_id = context_wallet_id.get() + record.wallet_id = wallet_id + return True + except LookupError: + record.wallet_id = None + return True def load_resource(path: str, encoding: str = None) -> TextIO: @@ -57,6 +82,7 @@ def configure( logging_config_path: str = None, log_level: str = None, log_file: str = None, + multitenant: bool = False, ): """Configure logger. @@ -65,28 +91,100 @@ def configure( :param log_level: str: (Default value = None) """ + is_dict_config = False if logging_config_path is not None: config_path = logging_config_path else: - config_path = DEFAULT_LOGGING_CONFIG_PATH - - log_config = load_resource(config_path, "utf-8") + if multitenant: + config_path = DEFAULT_PER_TENANT_LOGGING_CONFIG_PATH_INI + if log_file: + parsed = ConfigParser() + config_file_path = os.path.join( + os.path.dirname(os.path.realpath(__file__)), + config_path.split(":")[1], + ) + parsed.read(config_file_path) + parsed.set( + "handler_timed_file_handler", + "args", + str( + ( + log_file, + "d", + 7, + 1, + ) + ), + ) + with open(config_file_path, "w") as configfile: + parsed.write(configfile) + else: + config_path = DEFAULT_LOGGING_CONFIG_PATH + if ".yml" in config_path or ".yaml" in config_path: + is_dict_config = True + with open(config_path, "r") as stream: + log_config = yaml.safe_load(stream) + if log_file: + log_config["handlers"]["rotating_file"]["filename"] = log_file + with open(config_path, "w") as fp: + yaml.dump(log_config, fp) + else: + log_config = load_resource(config_path, "utf-8") if log_config: - with log_config: - fileConfig(log_config, disable_existing_loggers=False) + if is_dict_config: + dictConfig(log_config) + else: + with log_config: + fileConfig(log_config, disable_existing_loggers=False) else: logging.basicConfig(level=logging.WARNING) logging.root.warning(f"Logging config file not found: {config_path}") - - if log_file: - logging.root.handlers.clear() + if multitenant: + file_handler_set = False + handler_pattern = None + # Create context filter to adapt wallet_id in logger messages + _cf = ContextFilter() + for _handler in logging.root.handlers: + if isinstance(_handler, TimedRotatingFileMultiProcessHandler): + file_handler_set = True + handler_pattern = _handler.formatter._fmt + # Set Json formatter for rotated file handler which + # cannot be set with config file. By default this will + # be set up. + _handler.setFormatter(jsonlogger.JsonFormatter(handler_pattern)) + # Add context filter to handlers + _handler.addFilter(_cf) + if log_level: + _handler.setLevel(log_level.upper()) + if not file_handler_set and log_file: + file_path = os.path.join( + os.path.dirname(os.path.realpath(__file__)).replace( + "aries_cloudagent/config", "" + ), + log_file, + ) + # If configuration is not provided within .ini or dict config file + # then by default the rotated file handler will have interval=7, + # when=d and backupCount=1 configuration + timed_file_handler = TimedRotatingFileMultiProcessHandler( + filename=file_path, + interval=7, + when="d", + backupCount=1, + ) + timed_file_handler.addFilter(_cf) + # By default this will be set up. + timed_file_handler.setFormatter( + jsonlogger.JsonFormatter(LOG_FORMAT_FILE_ALIAS_PATTERN) + ) + logging.root.handlers.append(timed_file_handler) + elif log_file and not multitenant: + # Don't go with rotated file handler when not in multitenant mode. logging.root.handlers.append( logging.FileHandler(log_file, encoding="utf-8") ) - if log_level: - log_level = log_level.upper() - logging.root.setLevel(log_level) + logging.root.setLevel(log_level.upper()) @classmethod def print_banner( @@ -498,128 +596,6 @@ def doRollover(self): self.release() -LOG_FORMAT_FILE_ALIAS_PATTERN = ( - "%(asctime)s [%(did)s] %(levelname)s %(filename)s %(lineno)d %(message)s" -) - -LOG_FORMAT_FILE_NO_ALIAS_PATTERN = ( - "%(asctime)s %(levelname)s %(filename)s %(lineno)d %(message)s" +logging.handlers.TimedRotatingFileMultiProcessHandler = ( + TimedRotatingFileMultiProcessHandler ) - -LOG_FORMAT_STREAM_PATTERN = ( - "%(asctime)s %(levelname)s %(filename)s %(lineno)d %(message)s" -) - - -def clear_prev_handlers(logger: logging.Logger) -> logging.Logger: - """Remove all handler classes associated with logger instance.""" - iter_count = 0 - num_handlers = len(logger.handlers) - while iter_count < num_handlers: - logger.removeHandler(logger.handlers[0]) - iter_count = iter_count + 1 - return logger - - -def get_logger_inst(profile: Profile, logger_name) -> logging.Logger: - """Return a logger instance with provided name and handlers.""" - did_ident = get_did_ident(profile) - if did_ident: - logger_name = f"{logger_name}_{did_ident}" - return get_logger_with_handlers( - settings=profile.settings, - logger=logging.getLogger(logger_name), - did_ident=did_ident, - interval=profile.settings.get("log.handler_interval") or 7, - backup_count=profile.settings.get("log.handler_bakcount") or 1, - at_when=profile.settings.get("log.handler_when") or "d", - ) - - -def get_did_ident(profile: Profile) -> Optional[str]: - """Get public did identifier for logging, if applicable.""" - did_ident = None - if profile.settings.get("log.file"): - - async def _fetch_did() -> Optional[str]: - async with profile.session() as session: - wallet = session.inject(BaseWallet) - req_did_info: DIDInfo = await wallet.get_public_did() - if not req_did_info: - req_did_info: DIDInfo = (await wallet.get_local_dids())[0] - if req_did_info: - did_ident = req_did_info.did - return did_ident - - loop = asyncio.get_event_loop() - did_ident = loop.run_until_complete(_fetch_did()) - return did_ident - - -def get_logger_with_handlers( - settings: BaseSettings, - logger: logging.Logger, - at_when: str = None, - interval: int = None, - backup_count: int = None, - did_ident: str = None, -) -> logging.Logger: - """Return logger instance with necessary handlers if required.""" - if settings.get("log.file"): - # Clear handlers set previously for this logger instance - logger = clear_prev_handlers(logger) - # log file handler - file_path = settings.get("log.file") - file_handler = TimedRotatingFileMultiProcessHandler( - filename=file_path, - interval=interval, - when=at_when, - backupCount=backup_count, - ) - if did_ident: - if settings.get("log.json_fmt"): - file_handler.setFormatter( - jsonlogger.JsonFormatter( - settings.get("log.fmt_pattern") or LOG_FORMAT_FILE_ALIAS_PATTERN - ) - ) - else: - file_handler.setFormatter( - logging.Formatter( - settings.get("log.fmt_pattern") or LOG_FORMAT_FILE_ALIAS_PATTERN - ) - ) - else: - if settings.get("log.json_fmt"): - file_handler.setFormatter( - jsonlogger.JsonFormatter( - settings.get("log.fmt_pattern") - or LOG_FORMAT_FILE_NO_ALIAS_PATTERN - ) - ) - else: - file_handler.setFormatter( - logging.Formatter( - settings.get("log.fmt_pattern") - or LOG_FORMAT_FILE_NO_ALIAS_PATTERN - ) - ) - logger.addHandler(file_handler) - # stream console handler - std_out_handler = logging.StreamHandler(sys.stdout) - std_out_handler.setFormatter( - logging.Formatter( - settings.get("log.fmt_pattern") or LOG_FORMAT_STREAM_PATTERN - ) - ) - logger.addHandler(std_out_handler) - if did_ident: - logger = logging.LoggerAdapter(logger, {"did": did_ident}) - # set log level - logger_level = ( - (settings.get("log.level")).upper() - if settings.get("log.level") - else logging.INFO - ) - logger.setLevel(logger_level) - return logger diff --git a/aries_cloudagent/config/tests/test_argparse.py b/aries_cloudagent/config/tests/test_argparse.py index c110666606..5003975170 100644 --- a/aries_cloudagent/config/tests/test_argparse.py +++ b/aries_cloudagent/config/tests/test_argparse.py @@ -402,10 +402,6 @@ async def test_logging(self): "test_file.log", "--log-level", "INFO", - "--log-handler-config", - "d;7;1", - "--log-fmt-pattern", - "%(asctime)s %(levelname)s %(filename)s %(lineno)d %(message)s", ] ) @@ -413,35 +409,6 @@ async def test_logging(self): assert settings.get("log.file") == "test_file.log" assert settings.get("log.level") == "INFO" - assert settings.get("log.handler_when") == "d" - assert settings.get("log.handler_interval") == 7 - assert settings.get("log.handler_bakcount") == 1 - assert ( - settings.get("log.fmt_pattern") - == "%(asctime)s %(levelname)s %(filename)s %(lineno)d %(message)s" - ) - assert not settings.get("log.json_fmt") - - result = parser.parse_args( - [ - "--log-file", - "test_file.log", - "--log-level", - "INFO", - "--log-handler-config", - "d;7;1", - "--log-json-fmt", - ] - ) - - settings = group.get_settings(result) - - assert settings.get("log.file") == "test_file.log" - assert settings.get("log.level") == "INFO" - assert settings.get("log.handler_when") == "d" - assert settings.get("log.handler_interval") == 7 - assert settings.get("log.handler_bakcount") == 1 - assert settings.get("log.json_fmt") async def test_error_raised_when_multitenancy_used_and_no_jwt_provided(self): """Test that error is raised if no jwt_secret is provided with multitenancy.""" diff --git a/aries_cloudagent/config/tests/test_logging.py b/aries_cloudagent/config/tests/test_logging.py index 13ceff1f8d..0bd6d7c514 100644 --- a/aries_cloudagent/config/tests/test_logging.py +++ b/aries_cloudagent/config/tests/test_logging.py @@ -1,5 +1,4 @@ import contextlib -import logging from io import StringIO @@ -9,11 +8,6 @@ from .. import logging as test_module -from ...core.in_memory import InMemoryProfile -from ...wallet.base import BaseWallet -from ...wallet.did_method import SOV, DIDMethods -from ...wallet.key_type import ED25519 - class TestLoggingConfigurator(IsolatedAsyncioTestCase): agent_label_arg_value = "Aries Cloud Agent" @@ -33,6 +27,61 @@ def test_configure_default(self, mock_file_config, mock_load_resource): mock_load_resource.return_value, disable_existing_loggers=False ) + def test_configure_with_per_tenant_config_file(self): + with mock.patch.object( + test_module, + "logging", + mock.MagicMock( + basicConfig=mock.MagicMock(), + FileHandler=mock.MagicMock(), + root=mock.MagicMock( + warning=mock.MagicMock(), + handlers=[], + ), + ), + ): + test_module.LoggingConfigurator.configure( + log_file="test.log", + multitenant=True, + ) + + def test_configure_with_per_tenant_yml_file(self): + with mock.patch.object( + test_module, + "logging", + mock.MagicMock( + basicConfig=mock.MagicMock(), + FileHandler=mock.MagicMock(), + root=mock.MagicMock( + warning=mock.MagicMock(), + handlers=[], + ), + ), + ): + test_module.LoggingConfigurator.configure( + logging_config_path="aries_cloudagent/config/default_per_tenant_logging_config.yml", + log_file="test.log", + multitenant=True, + ) + + def test_configure_with_default_config(self): + with mock.patch.object( + test_module, + "logging", + mock.MagicMock( + basicConfig=mock.MagicMock(), + FileHandler=mock.MagicMock(), + root=mock.MagicMock( + warning=mock.MagicMock(), + handlers=[], + ), + ), + ): + test_module.LoggingConfigurator.configure( + log_file="test.log", + multitenant=True, + ) + def test_configure_default_no_resource(self): with mock.patch.object( test_module, "load_resource", mock.MagicMock() @@ -99,85 +148,3 @@ def test_load_resource(self): test_module.pkg_resources, "resource_stream", mock.MagicMock() ) as mock_res_stream: test_module.load_resource("abc:def", encoding=None) - - def test_get_logger_with_handlers(self): - profile = InMemoryProfile.test_profile() - profile.settings["log.file"] = "test_file.log" - logger = logging.getLogger(__name__) - logger = test_module.get_logger_with_handlers( - settings=profile.settings, - logger=logger, - at_when="m", - interval=1, - backup_count=1, - ) - assert logger - logger = test_module.get_logger_with_handlers( - settings=profile.settings, - logger=logger, - did_ident="tenant_did_123", - at_when="m", - interval=1, - backup_count=1, - ) - assert logger - - async def test_get_logger_inst(self): - profile = InMemoryProfile.test_profile() - logger = test_module.get_logger_inst( - profile=profile, - logger_name=__name__, - ) - assert logger - # public did - profile.settings["log.file"] = "test_file.log" - profile.context.injector.bind_instance(DIDMethods, DIDMethods()) - async with profile.session() as session: - wallet: BaseWallet = session.inject_or(BaseWallet) - await wallet.create_local_did( - SOV, - ED25519, - did="DJGEjaMunDtFtBVrn1qJMT", - ) - await wallet.set_public_did("DJGEjaMunDtFtBVrn1qJMT") - logger = test_module.get_logger_inst( - profile=profile, - logger_name=__name__, - ) - # public did, json_fmt, pattern - profile.settings["log.file"] = "test_file.log" - profile.settings["log.json_fmt"] = True - profile.settings[ - "log.fmt_pattern" - ] = "%(asctime)s [%(did)s] %(lineno)d %(message)s" - logger = test_module.get_logger_inst( - profile=profile, - logger_name=__name__, - ) - assert logger - # not public did - profile = InMemoryProfile.test_profile() - profile.settings["log.file"] = "test_file.log" - profile.settings["log.json_fmt"] = False - profile.context.injector.bind_instance(DIDMethods, DIDMethods()) - async with profile.session() as session: - wallet: BaseWallet = session.inject_or(BaseWallet) - await wallet.create_local_did( - SOV, - ED25519, - did="DJGEjaMunDtFtBVrn1qJMT", - ) - logger = test_module.get_logger_inst( - profile=profile, - logger_name=__name__, - ) - assert logger - # not public did, json_fmt, pattern - profile.settings["log.file"] = "test_file.log" - profile.settings["log.json_fmt"] = True - profile.settings["log.fmt_pattern"] = "%(asctime)s %(lineno)d %(message)s" - logger = test_module.get_logger_inst( - profile=profile, - logger_name=__name__, - ) - assert logger diff --git a/aries_cloudagent/config/util.py b/aries_cloudagent/config/util.py index 00a7d5c534..00f9af8654 100644 --- a/aries_cloudagent/config/util.py +++ b/aries_cloudagent/config/util.py @@ -15,7 +15,13 @@ def common_config(settings: Mapping[str, Any]): log_config = settings.get("log.config") log_level = settings.get("log.level") or os.getenv("LOG_LEVEL") log_file = settings.get("log.file") - LoggingConfigurator.configure(log_config, log_level, log_file) + multitenant_enabled = settings.get("multitenant.enabled") or False + LoggingConfigurator.configure( + logging_config_path=log_config, + log_level=log_level, + log_file=log_file, + multitenant=multitenant_enabled, + ) class BoundedInt: diff --git a/aries_cloudagent/connections/base_manager.py b/aries_cloudagent/connections/base_manager.py index 7cc63cd691..f40a670ae8 100644 --- a/aries_cloudagent/connections/base_manager.py +++ b/aries_cloudagent/connections/base_manager.py @@ -20,7 +20,6 @@ from ..cache.base import BaseCache from ..config.base import InjectionError -from ..config.logging import get_logger_inst from ..core.error import BaseError from ..core.profile import Profile from ..did.did_key import DIDKey @@ -72,10 +71,7 @@ def __init__(self, profile: Profile): """ self._profile = profile self._route_manager = profile.inject(RouteManager) - self._logger: logging.Logger = get_logger_inst( - profile=self._profile, - logger_name=__name__, - ) + self._logger = logging.getLogger(__name__) async def create_did_document( self, diff --git a/aries_cloudagent/core/dispatcher.py b/aries_cloudagent/core/dispatcher.py index 66664474fe..b8660ab568 100644 --- a/aries_cloudagent/core/dispatcher.py +++ b/aries_cloudagent/core/dispatcher.py @@ -13,7 +13,6 @@ from aiohttp.web import HTTPException -from ..config.logging import get_logger_inst from ..connections.base_manager import BaseConnectionManager from ..connections.models.conn_record import ConnRecord from ..core.profile import Profile @@ -52,10 +51,7 @@ def __init__(self, profile: Profile): self.collector: Collector = None self.profile = profile self.task_queue: TaskQueue = None - self.logger: logging.Logger = get_logger_inst( - profile=self.profile, - logger_name=__name__, - ) + self.logger: logging.Logger = logging.getLogger(__name__) async def setup(self): """Perform async instance setup.""" diff --git a/aries_cloudagent/core/oob_processor.py b/aries_cloudagent/core/oob_processor.py index ccecc1a606..d1305aca3a 100644 --- a/aries_cloudagent/core/oob_processor.py +++ b/aries_cloudagent/core/oob_processor.py @@ -5,7 +5,6 @@ from typing import Any, Callable, Dict, List, Optional, cast from ..messaging.agent_message import AgentMessage -from ..config.logging import get_logger_inst from ..connections.models.conn_record import ConnRecord from ..connections.models.connection_target import ConnectionTarget from ..messaging.decorators.service_decorator import ServiceDecorator @@ -23,6 +22,8 @@ from .error import BaseError from .profile import Profile +LOGGER = logging.getLogger(__name__) + class OobMessageProcessorError(BaseError): """Base error for OobMessageProcessor.""" @@ -70,10 +71,6 @@ async def find_oob_target_for_outbound_message( self, profile: Profile, outbound_message: OutboundMessage ) -> Optional[ConnectionTarget]: """Find connection target for the outbound message.""" - logger: logging.Logger = get_logger_inst( - profile=profile, - logger_name=__name__, - ) try: async with profile.session() as session: # Try to find the oob record for the outbound message: @@ -81,7 +78,7 @@ async def find_oob_target_for_outbound_message( session, {"attach_thread_id": outbound_message.reply_thread_id} ) - logger.debug( + LOGGER.debug( "extracting their service from oob record %s", oob_record.their_service, ) @@ -91,7 +88,7 @@ async def find_oob_target_for_outbound_message( # Attach ~service decorator so other message can respond message = json.loads(outbound_message.payload) if not message.get("~service"): - logger.debug( + LOGGER.debug( "Setting our service on the message ~service %s", oob_record.our_service, ) @@ -104,7 +101,7 @@ async def find_oob_target_for_outbound_message( outbound_message.payload = json.dumps(message) - logger.debug("Sending oob message payload %s", outbound_message.payload) + LOGGER.debug("Sending oob message payload %s", outbound_message.payload) return ConnectionTarget( endpoint=their_service.endpoint, @@ -121,15 +118,11 @@ async def find_oob_record_for_inbound_message( """Find oob record for inbound message.""" message_type = context.message._type oob_record = None - logger: logging.Logger = get_logger_inst( - profile=context.profile, - logger_name=__name__, - ) async with context.profile.session() as session: # First try to find the oob record based on the associated pthid if context.message_receipt.parent_thread_id: try: - logger.debug( + LOGGER.debug( "Retrieving OOB record using pthid " f"{context.message_receipt.parent_thread_id} " f"for message type {message_type}" @@ -153,7 +146,7 @@ async def find_oob_record_for_inbound_message( and context.message_receipt.recipient_verkey ): try: - logger.debug( + LOGGER.debug( "Retrieving OOB record using thid " f"{context.message_receipt.thread_id} and recipient verkey" f" {context.message_receipt.recipient_verkey} for " @@ -174,7 +167,7 @@ async def find_oob_record_for_inbound_message( if not oob_record: return None - logger.debug( + LOGGER.debug( f"Found out of band record for inbound message with type {message_type}" f": {oob_record.oob_id}" ) @@ -190,14 +183,14 @@ async def find_oob_record_for_inbound_message( and context.connection_record and context.connection_record.connection_id != oob_record.connection_id ): - logger.debug( + LOGGER.debug( f"Oob record connection id {oob_record.connection_id} is different from" f" inbound message connection {context.connection_record.connection_id}", ) # Mismatch in connection id's in only allowed in state await response # (connection id can change bc of reuse) if oob_record.state != OobRecord.STATE_AWAIT_RESPONSE: - logger.debug( + LOGGER.debug( "Inbound message has incorrect connection_id " f"{context.connection_record.connection_id}. Oob record " f"{oob_record.oob_id} associated with connection id " @@ -212,7 +205,7 @@ async def find_oob_record_for_inbound_message( oob_record.invitation.requests_attach and oob_record.state == OobRecord.STATE_AWAIT_RESPONSE ): - logger.debug( + LOGGER.debug( f"Removing stale connection {oob_record.connection_id} due " "to connection reuse" ) @@ -237,7 +230,7 @@ async def find_oob_record_for_inbound_message( ] if context.message_receipt.thread_id not in allowed_thread_ids: - logger.debug( + LOGGER.debug( "Inbound message is for not allowed thread " f"{context.message_receipt.thread_id}. Allowed " f"threads are {allowed_thread_ids}" @@ -249,7 +242,7 @@ async def find_oob_record_for_inbound_message( oob_record.attach_thread_id and context.message_receipt.thread_id != oob_record.attach_thread_id ): - logger.debug( + LOGGER.debug( f"Inbound message thread id {context.message_receipt.thread_id} does not" f" match oob record thread id {oob_record.attach_thread_id}" ) @@ -276,7 +269,7 @@ async def find_oob_record_for_inbound_message( ) ) ): - logger.debug( + LOGGER.debug( "Inbound message sender verkey does not match stored service on oob" " record" ) @@ -285,7 +278,7 @@ async def find_oob_record_for_inbound_message( # If the message has a ~service decorator we save it in the oob record so we # can reply to this message if context._message._service: - logger.debug( + LOGGER.debug( "Storing service decorator in oob record %s", context.message._service.serialize(), ) @@ -313,10 +306,6 @@ async def handle_message( their_service: Optional[ServiceDecorator] = None, ): """Message handler for inbound messages.""" - logger: logging.Logger = get_logger_inst( - profile=profile, - logger_name=__name__, - ) supported_types = [ CREDENTIAL_OFFER, @@ -357,7 +346,7 @@ async def handle_message( if not oob_record.connection_id: oob_record.attach_thread_id = self.get_thread_id(message) if their_service: - logger.debug( + LOGGER.debug( "Storing their service in oob record %s", their_service ) oob_record.their_service = their_service.serialize() diff --git a/aries_cloudagent/indy/sdk/tests/test_profile.py b/aries_cloudagent/indy/sdk/tests/test_profile.py index 11f7edc6bf..660641e4d7 100644 --- a/aries_cloudagent/indy/sdk/tests/test_profile.py +++ b/aries_cloudagent/indy/sdk/tests/test_profile.py @@ -1,7 +1,6 @@ -import logging +import pytest from aries_cloudagent.tests import mock -import pytest from ....config.injection_context import InjectionContext from ....core.error import ProfileError @@ -11,6 +10,8 @@ from ..profile import IndySdkProfile from ..wallet_setup import IndyOpenWallet, IndyWalletConfig +from .. import profile as test_module + @pytest.fixture async def open_wallet(): @@ -118,12 +119,12 @@ def test_read_only(open_wallet): ro_profile = IndySdkProfile(open_wallet, context) -def test_finalizer(open_wallet, caplog): - def _smaller_scope(): - profile = IndySdkProfile(open_wallet) - assert profile - - with caplog.at_level(logging.DEBUG): - _smaller_scope() - - assert "finalizer called" in caplog.text +def test_finalizer(open_wallet): + profile = IndySdkProfile(open_wallet) + assert profile + with mock.patch.object(test_module, "LOGGER", autospec=True) as mock_logger: + profile._finalizer() + assert mock_logger.debug.call_count == 1 + mock_logger.debug.assert_called_once_with( + "Profile finalizer called; closing wallet" + ) diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/keylist_handler.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/keylist_handler.py index de00566498..6d7225a64d 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/keylist_handler.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/keylist_handler.py @@ -32,11 +32,11 @@ async def handle(self, context: RequestContext, responder: BaseResponder): session, context.connection_record.connection_id ) except StorageNotFoundError as err: - LOG.warning( + self._logger.warning( "Received keylist from connection that is not acting as mediator: %s", err, ) return # TODO verify our keylist matches? - LOG.info("Keylist received: %s", context.message) + self._logger.info("Keylist received: %s", context.message) diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/tests/test_keylist_handler.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/tests/test_keylist_handler.py index 695629cf91..8c2b837aac 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/tests/test_keylist_handler.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/tests/test_keylist_handler.py @@ -1,6 +1,5 @@ """Test keylist handler.""" import logging - import pytest from ......connections.models.conn_record import ConnRecord @@ -11,6 +10,7 @@ from ...models.mediation_record import MediationRecord from ..keylist_handler import KeylistHandler + TEST_CONN_ID = "conn-id" pytestmark = pytest.mark.asyncio @@ -45,15 +45,17 @@ async def test_handler_no_active_connection(self, context): assert "inactive connection" in exc.value async def test_handler_no_record(self, context, caplog): - caplog.set_level(logging.INFO) handler, responder = KeylistHandler(), MockResponder() + logging.propagate = True + caplog.set_level(logging.INFO) await handler.handle(context, responder) assert "not acting as mediator" in caplog.text assert "Keylist received: " not in caplog.text async def test_handler(self, context, session, caplog): - caplog.set_level(logging.INFO) handler, responder = KeylistHandler(), MockResponder() await MediationRecord(connection_id=TEST_CONN_ID).save(session) + logging.propagate = True + caplog.set_level(logging.INFO) await handler.handle(context, responder) assert "Keylist received: " in caplog.text diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/tests/test_problem_report.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/tests/test_problem_report.py index 322ae27f5a..fe0b5a8722 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/tests/test_problem_report.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/tests/test_problem_report.py @@ -1,7 +1,7 @@ """Test Problem Report Message.""" -import logging import pytest +from unittest import mock from unittest import TestCase from ......messaging.models.base import BaseModelError @@ -13,25 +13,20 @@ ValidationError, ) +from .. import problem_report as test_module + class TestCMProblemReportMessage(TestCase): """Test problem report.""" - def setUp(self): - self.problem_report = CMProblemReport( + def test_make_model(self): + """Make problem report model.""" + data = CMProblemReport( description={ "en": "Insufficient credit", "code": ProblemReportReason.MEDIATION_NOT_GRANTED.value, } - ) - - @pytest.fixture(autouse=True) - def inject_fixtures(self, caplog): - self._caplog = caplog - - def test_make_model(self): - """Make problem report model.""" - data = self.problem_report.serialize() + ).serialize() model_instance = CMProblemReport.deserialize(data) assert isinstance(model_instance, CMProblemReport) @@ -53,6 +48,6 @@ def test_validate_and_logger(self): "code": "invalid_code", }, ).serialize() - self._caplog.set_level(logging.WARNING) - CMProblemReportSchema().validate_fields(data) - assert "Unexpected error code received" in self._caplog.text + with mock.patch.object(test_module, "LOGGER", autospec=True) as mock_logger: + CMProblemReportSchema().validate_fields(data) + mock_logger.warning.assert_called_once() diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/tests/test_mediation_manager.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/tests/test_mediation_manager.py index 8f013fe177..05e39d6686 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/tests/test_mediation_manager.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/tests/test_mediation_manager.py @@ -1,9 +1,8 @@ """Test MediationManager.""" -import logging -from typing import AsyncIterable, Iterable +import pytest from aries_cloudagent.tests import mock -import pytest +from typing import AsyncIterable, Iterable from .. import manager as test_module from .....core.event_bus import EventBus, MockEventBus @@ -449,9 +448,8 @@ async def test_store_update_results_exists_relay(self, session, manager): assert route.wallet_id == "test_wallet" assert route.connection_id == TEST_CONN_ID - async def test_store_update_results_errors(self, caplog, manager): + async def test_store_update_results_errors(self, manager): """test_store_update_results with errors.""" - caplog.set_level(logging.WARNING) results = [ KeylistUpdated( recipient_key=TEST_VERKEY, @@ -469,11 +467,9 @@ async def test_store_update_results_errors(self, caplog, manager): result=KeylistUpdated.RESULT_CLIENT_ERROR, ), ] - await manager.store_update_results(TEST_CONN_ID, results) - assert "no_change" in caplog.text - assert "client_error" in caplog.text - assert "server_error" in caplog.text - print(caplog.text) + with mock.patch.object(test_module, "LOGGER", autospec=True) as mock_logger: + await manager.store_update_results(TEST_CONN_ID, results) + assert mock_logger.warning.call_count == 3 async def test_get_my_keylist(self, session, manager): """test_get_my_keylist.""" diff --git a/aries_cloudagent/protocols/didexchange/v1_0/messages/tests/test_problem_report.py b/aries_cloudagent/protocols/didexchange/v1_0/messages/tests/test_problem_report.py index 03af260b1a..6d4a700ae7 100644 --- a/aries_cloudagent/protocols/didexchange/v1_0/messages/tests/test_problem_report.py +++ b/aries_cloudagent/protocols/didexchange/v1_0/messages/tests/test_problem_report.py @@ -1,10 +1,13 @@ import pytest +from unittest import mock + from ......messaging.models.base import BaseModelError from .....didcomm_prefix import DIDCommPrefix from ...message_types import PROBLEM_REPORT from ..problem_report import DIDXProblemReport +from .. import problem_report as test_module THID = "dummy-thid" PTHID = "dummy-pthid" @@ -31,9 +34,9 @@ def test_missing_code(): DIDXProblemReport.deserialize({"description": {"en": "test"}}) -def test_unrecognized_code(caplog): - with caplog.at_level("DEBUG"): +def test_unrecognized_code(): + with mock.patch.object(test_module, "LOGGER", autospec=True) as mock_logger: DIDXProblemReport.deserialize( {"description": {"code": "unknown", "en": "test"}} ) - assert "Unexpected error code received" in caplog.text + mock_logger.warning.assert_called_once() diff --git a/aries_cloudagent/protocols/issue_credential/v1_0/messages/tests/test_credential_problem_report.py b/aries_cloudagent/protocols/issue_credential/v1_0/messages/tests/test_credential_problem_report.py index d4e76821e6..86a8ef4c2c 100644 --- a/aries_cloudagent/protocols/issue_credential/v1_0/messages/tests/test_credential_problem_report.py +++ b/aries_cloudagent/protocols/issue_credential/v1_0/messages/tests/test_credential_problem_report.py @@ -1,4 +1,3 @@ -import logging import pytest from unittest import mock @@ -17,6 +16,8 @@ ValidationError, ) +from .. import credential_problem_report as test_module + class TestCredentialProblemReport(TestCase): """Problem report tests.""" @@ -32,10 +33,6 @@ def test_init_type(self): ) assert prob._type == DIDCommPrefix.qualify_current(CREDENTIAL_PROBLEM_REPORT) - @pytest.fixture(autouse=True) - def inject_fixtures(self, caplog): - self._caplog = caplog - @mock.patch( f"{PROTOCOL_PACKAGE}.messages.credential_problem_report." "CredentialProblemReportSchema.load" @@ -106,6 +103,6 @@ def test_validate_and_logger(self): "code": "invalid_code", }, ).serialize() - self._caplog.set_level(logging.WARNING) - CredentialProblemReportSchema().validate_fields(data) - assert "Unexpected error code received" in self._caplog.text + with mock.patch.object(test_module, "LOGGER", autospec=True) as mock_logger: + CredentialProblemReportSchema().validate_fields(data) + assert mock_logger.warning.call_count == 1 diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/messages/tests/test_cred_problem_report.py b/aries_cloudagent/protocols/issue_credential/v2_0/messages/tests/test_cred_problem_report.py index 0d302f5a77..4459ec4bc9 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/messages/tests/test_cred_problem_report.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/messages/tests/test_cred_problem_report.py @@ -1,4 +1,3 @@ -import logging import pytest from unittest import mock @@ -17,14 +16,12 @@ ValidationError, ) +from .. import cred_problem_report as test_module + class TestCredProblemReport(TestCase): """Problem report tests.""" - @pytest.fixture(autouse=True) - def inject_fixtures(self, caplog): - self._caplog = caplog - def test_init_type(self): """Test initializer.""" @@ -100,9 +97,9 @@ def test_validate_and_logger(self): "code": "invalid_code", }, ).serialize() - self._caplog.set_level(logging.WARNING) - V20CredProblemReportSchema().validate_fields(data) - assert "Unexpected error code received" in self._caplog.text + with mock.patch.object(test_module, "LOGGER", autospec=True) as mock_logger: + V20CredProblemReportSchema().validate_fields(data) + assert mock_logger.warning.call_count == 1 def test_validate_x(self): """Exercise validation requirements.""" diff --git a/aries_cloudagent/protocols/out_of_band/v1_0/messages/tests/test_problem_report.py b/aries_cloudagent/protocols/out_of_band/v1_0/messages/tests/test_problem_report.py index 831d109b9f..4adfd23602 100644 --- a/aries_cloudagent/protocols/out_of_band/v1_0/messages/tests/test_problem_report.py +++ b/aries_cloudagent/protocols/out_of_band/v1_0/messages/tests/test_problem_report.py @@ -1,7 +1,7 @@ """Test Problem Report Message.""" -import logging import pytest +from unittest import mock from unittest import TestCase from ......messaging.models.base import BaseModelError @@ -13,14 +13,12 @@ ValidationError, ) +from .. import problem_report as test_module + class TestOOBProblemReportMessage(TestCase): """Test problem report.""" - @pytest.fixture(autouse=True) - def inject_fixtures(self, caplog): - self._caplog = caplog - def setUp(self): self.problem_report = OOBProblemReport( description={ @@ -70,9 +68,9 @@ def test_validate_and_logger(self): ) data.assign_thread_id(thid="test_thid", pthid="test_pthid") data = data.serialize() - self._caplog.set_level(logging.WARNING) - OOBProblemReportSchema().validate_fields(data) - assert "Unexpected error code received" in self._caplog.text + with mock.patch.object(test_module, "LOGGER", autospec=True) as mock_logger: + OOBProblemReportSchema().validate_fields(data) + assert mock_logger.warning.call_count == 1 def test_assign_msg_type_version_to_model_inst(self): test_msg = OOBProblemReport() diff --git a/demo/runners/agent_container.py b/demo/runners/agent_container.py index d9941a856b..30e6ab129f 100644 --- a/demo/runners/agent_container.py +++ b/demo/runners/agent_container.py @@ -56,6 +56,9 @@ def __init__( endorser_role: str = None, revocation: bool = False, anoncreds_legacy_revocation: str = None, + log_file: str = None, + log_config: str = None, + log_level: str = None, **kwargs, ): extra_args = [] @@ -81,6 +84,9 @@ def __init__( endorser_role=endorser_role, revocation=revocation, extra_args=extra_args, + log_file=log_file, + log_config=log_config, + log_level=log_level, **kwargs, ) self.connection_id = None @@ -680,6 +686,9 @@ def __init__( reuse_connections: bool = False, taa_accept: bool = False, anoncreds_legacy_revocation: str = None, + log_file: str = None, + log_config: str = None, + log_level: str = None, ): # configuration parameters self.genesis_txns = genesis_txns @@ -703,6 +712,9 @@ def __init__( self.endorser_agent = None self.endorser_role = endorser_role self.anoncreds_legacy_revocation = anoncreds_legacy_revocation + self.log_file = log_file + self.log_config = log_config + self.log_level = log_level if endorser_role: # endorsers and authors need public DIDs (assume cred_type is Indy) if endorser_role == "author" or endorser_role == "endorser": @@ -749,6 +761,9 @@ async def initialize( aip=self.aip, arg_file=self.arg_file, endorser_role=self.endorser_role, + log_file=self.log_file, + log_config=self.log_config, + log_level=self.log_level, ) else: self.agent = the_agent @@ -1293,6 +1308,28 @@ def arg_parser(ident: str = None, port: int = 8020): action="store_true", help="Accept the ledger's TAA, if required", ) + parser.add_argument( + "--log-file", + type=str, + metavar="", + help="Output destination for the root logger.", + ) + parser.add_argument( + "--log-config", + type=str, + metavar="", + help="File path for logging configuration.", + ) + parser.add_argument( + "--log-level", + type=str, + metavar="", + default=None, + help=( + "Specifies a custom logging level as one of: " + "('debug', 'info', 'warning', 'error', 'critical')" + ), + ) return parser @@ -1324,6 +1361,9 @@ async def create_agent_with_args(args, ident: str = None): with open(arg_file) as f: arg_file_dict = yaml.safe_load(f) + log_file = args.log_file or os.getenv("ACAPY_LOG_FILE") + log_config = args.log_config or os.getenv("ACAPY_LOG_CONFIG") + log_level = args.log_level # if we don't have a tails server url then guess it if ("revocation" in args and args.revocation) and not tails_server_base_url: # assume we're running in docker @@ -1401,6 +1441,9 @@ async def create_agent_with_args(args, ident: str = None): reuse_connections=reuse_connections, taa_accept=args.taa_accept, anoncreds_legacy_revocation=anoncreds_legacy_revocation, + log_file=log_file, + log_config=log_config, + log_level=log_level, ) return agent diff --git a/demo/runners/alice.py b/demo/runners/alice.py index 8bea579d29..31f85d0bea 100644 --- a/demo/runners/alice.py +++ b/demo/runners/alice.py @@ -36,6 +36,9 @@ def __init__( no_auto: bool = False, aip: int = 20, endorser_role: str = None, + log_file: str = None, + log_config: str = None, + log_level: str = None, **kwargs, ): super().__init__( @@ -47,6 +50,9 @@ def __init__( seed=None, aip=aip, endorser_role=endorser_role, + log_file=log_file, + log_config=log_config, + log_level=log_level, **kwargs, ) self.connection_id = None @@ -130,6 +136,9 @@ async def main(args): wallet_type=alice_agent.wallet_type, aip=alice_agent.aip, endorser_role=alice_agent.endorser_role, + log_file=alice_agent.log_file, + log_config=alice_agent.log_config, + log_level=alice_agent.log_level, ) await alice_agent.initialize(the_agent=agent) diff --git a/demo/runners/faber.py b/demo/runners/faber.py index 5a26eb2669..4d05c939ad 100644 --- a/demo/runners/faber.py +++ b/demo/runners/faber.py @@ -47,6 +47,9 @@ def __init__( endorser_role: str = None, revocation: bool = False, anoncreds_legacy_revocation: str = None, + log_file: str = None, + log_config: str = None, + log_level: str = None, **kwargs, ): super().__init__( @@ -58,6 +61,9 @@ def __init__( endorser_role=endorser_role, revocation=revocation, anoncreds_legacy_revocation=anoncreds_legacy_revocation, + log_file=log_file, + log_config=log_config, + log_level=log_level, **kwargs, ) self.connection_id = None @@ -403,6 +409,9 @@ async def main(args): aip=faber_agent.aip, endorser_role=faber_agent.endorser_role, anoncreds_legacy_revocation=faber_agent.anoncreds_legacy_revocation, + log_file=faber_agent.log_file, + log_config=faber_agent.log_config, + log_level=faber_agent.log_level, ) faber_schema_name = "degree schema" diff --git a/demo/runners/performance.py b/demo/runners/performance.py index 4c79bf6437..26daf4a87d 100644 --- a/demo/runners/performance.py +++ b/demo/runners/performance.py @@ -32,11 +32,23 @@ def __init__( ident: str, port: int, prefix: str = None, + log_file: str = None, + log_config: str = None, + log_level: str = None, **kwargs, ): if prefix is None: prefix = ident - super().__init__(ident, port, port + 1, prefix=prefix, **kwargs) + super().__init__( + ident, + port, + port + 1, + prefix=prefix, + log_file=log_file, + log_config=log_config, + log_level=log_level, + **kwargs, + ) self._connection_id = None self._connection_ready = None self.credential_state = {} @@ -123,7 +135,10 @@ async def check_received_creds(self) -> Tuple[int, int]: return pending, total async def update_creds(self): - await self.credential_event.wait() + try: + await self.credential_event.wait() + except asyncio.exceptions.CancelledError: + pass async def check_received_pings(self) -> Tuple[int, int]: while True: @@ -158,8 +173,23 @@ def check_task_exception(self, fut: asyncio.Task): class AliceAgent(BaseAgent): - def __init__(self, port: int, **kwargs): - super().__init__("Alice", port, seed=None, **kwargs) + def __init__( + self, + port: int, + log_file: str = None, + log_config: str = None, + log_level: str = None, + **kwargs, + ): + super().__init__( + "Alice", + port, + seed=None, + log_file=log_file, + log_config=log_config, + log_level=log_level, + **kwargs, + ) self.extra_args = [ "--auto-accept-invites", "--auto-accept-requests", @@ -195,8 +225,23 @@ async def propose_credential( class FaberAgent(BaseAgent): - def __init__(self, port: int, **kwargs): - super().__init__("Faber", port, seed="random", **kwargs) + def __init__( + self, + port: int, + log_file: str = None, + log_config: str = None, + log_level: str = None, + **kwargs, + ): + super().__init__( + "Faber", + port, + seed="random", + log_file=log_file, + log_config=log_config, + log_level=log_level, + **kwargs, + ) self.extra_args = [ "--auto-accept-invites", "--auto-accept-requests", @@ -282,6 +327,9 @@ async def main( batch_size: int = 30, wallet_type: str = None, arg_file: str = None, + log_file: str = None, + log_config: str = None, + log_level: str = None, ): if multi_ledger: genesis = None @@ -310,6 +358,9 @@ async def main( mediation=mediation, wallet_type=wallet_type, arg_file=arg_file, + log_file=log_file, + log_config=log_config, + log_level=log_level, ) await alice.listen_webhooks(start_port + 2) @@ -323,6 +374,9 @@ async def main( mediation=mediation, wallet_type=wallet_type, arg_file=arg_file, + log_file=log_file, + log_config=log_config, + log_level=log_level, ) await faber.listen_webhooks(start_port + 5) await faber.register_did() @@ -682,6 +736,28 @@ async def check_received_pings(agent, issue_count, pb): metavar="", help="Specify a file containing additional aca-py parameters", ) + parser.add_argument( + "--log-file", + type=str, + metavar="", + help=("Output destination for the root logger."), + ) + parser.add_argument( + "--log-config", + type=str, + metavar="", + help=("File path for logging configuration."), + ) + parser.add_argument( + "--log-level", + type=str, + metavar="", + default=None, + help=( + "Specifies a custom logging level as one of: " + "('debug', 'info', 'warning', 'error', 'critical')" + ), + ) args = parser.parse_args() if args.did_exchange and args.mediation: @@ -720,6 +796,9 @@ async def check_received_pings(agent, issue_count, pb): args.batch, args.wallet_type, args.arg_file, + args.log_file, + args.log_config, + args.log_level, ) ) except KeyboardInterrupt: diff --git a/demo/runners/support/agent.py b/demo/runners/support/agent.py index 223dc4a445..aae52165f9 100644 --- a/demo/runners/support/agent.py +++ b/demo/runners/support/agent.py @@ -138,6 +138,9 @@ def __init__( arg_file: str = None, endorser_role: str = None, extra_args=None, + log_file: str = None, + log_config: str = None, + log_level: str = None, **params, ): self.ident = ident @@ -169,6 +172,9 @@ def __init__( self.mediator_request_id = None self.aip = aip self.arg_file = arg_file + self.log_file = log_file + self.log_config = log_config + self.log_level = log_level self.admin_url = f"http://{self.internal_host}:{admin_port}" if AGENT_ENDPOINT: @@ -345,6 +351,24 @@ def get_agent_args(self): "--public-invites", # ("--log-level", "debug"), ] + if self.log_file: + result.extend( + [ + ("--log-file", self.log_file), + ] + ) + if self.log_config: + result.extend( + [ + ("--log-config", self.log_config), + ] + ) + if self.log_level: + result.extend( + [ + ("--log-level", self.log_level), + ] + ) if self.aip == 20: result.append("--emit-new-didcomm-prefix") if self.multitenant: diff --git a/demo/runners/support/utils.py b/demo/runners/support/utils.py index 0056686fbf..77e5d7792f 100644 --- a/demo/runners/support/utils.py +++ b/demo/runners/support/utils.py @@ -122,7 +122,10 @@ def output_reader(handle, callback, *args, **kwargs): def log_msg(*msg, color="fg:ansimagenta", **kwargs): - run_in_terminal(lambda: print_ext(*msg, color=color, **kwargs)) + try: + run_in_terminal(lambda: print_ext(*msg, color=color, **kwargs)) + except AssertionError: + pass def log_json(data, **kwargs):