1
0
Fork 1
mirror of https://github.com/thatmattlove/hyperglass.git synced 2026-01-17 00:38:06 +00:00

logging overhaul

This commit is contained in:
thatmattlove 2024-04-01 23:28:46 -04:00
parent d2e1486b5a
commit d2f734ba1c
47 changed files with 412 additions and 458 deletions

2
.gitignore vendored
View file

@ -6,7 +6,7 @@ TODO*
test.py test.py
.DS_Store .DS_Store
.idea .idea
old_*.py *_old.py
*.rdb *.rdb
# #
# Github Default from https://github.com/martinohanlon/flightlight/issues/1 # Github Default from https://github.com/martinohanlon/flightlight/issues/1

View file

@ -1,4 +1,6 @@
"""hyperglass API.""" """hyperglass API."""
# Standard Library
import logging
# Third Party # Third Party
from litestar import Litestar from litestar import Litestar
@ -33,21 +35,28 @@ OPEN_API = OpenAPIConfig(
root_schema_site="elements", root_schema_site="elements",
) )
HANDLERS = [
device,
devices,
queries,
info,
query,
]
app = Litestar( if not STATE.settings.disable_ui:
route_handlers=[ HANDLERS = [
device, *HANDLERS,
devices,
queries,
info,
query,
create_static_files_router( create_static_files_router(
path="/images", directories=[IMAGES_DIR], name="images", include_in_schema=False path="/images", directories=[IMAGES_DIR], name="images", include_in_schema=False
), ),
create_static_files_router( create_static_files_router(
path="/", directories=[UI_DIR], name="ui", html_mode=True, include_in_schema=False path="/", directories=[UI_DIR], name="ui", html_mode=True, include_in_schema=False
), ),
], ]
app = Litestar(
route_handlers=HANDLERS,
exception_handlers={ exception_handlers={
HTTPException: http_handler, HTTPException: http_handler,
HyperglassError: app_handler, HyperglassError: app_handler,

View file

@ -2,6 +2,7 @@
# Third Party # Third Party
from litestar import Request, Response from litestar import Request, Response
from litestar.exceptions import ValidationException
# Project # Project
from hyperglass.log import log from hyperglass.log import log
@ -18,9 +19,7 @@ __all__ = (
def default_handler(request: Request, exc: BaseException) -> Response: def default_handler(request: Request, exc: BaseException) -> Response:
"""Handle uncaught errors.""" """Handle uncaught errors."""
state = use_state() state = use_state()
log.critical( log.bind(method=request.method, path=request.url.path, detail=str(exc)).critical("Error")
"{method} {path} {detail!s}", method=request.method, path=request.url.path, detail=exc
)
return Response( return Response(
{"output": state.params.messages.general, "level": "danger", "keywords": []}, {"output": state.params.messages.general, "level": "danger", "keywords": []},
status_code=500, status_code=500,
@ -29,9 +28,7 @@ def default_handler(request: Request, exc: BaseException) -> Response:
def http_handler(request: Request, exc: BaseException) -> Response: def http_handler(request: Request, exc: BaseException) -> Response:
"""Handle web server errors.""" """Handle web server errors."""
log.critical( log.bind(method=request.method, path=request.url.path, detail=exc.detail).critical("HTTP Error")
"{method} {path} {detail}", method=request.method, path=request.url.path, detail=exc.detail
)
return Response( return Response(
{"output": exc.detail, "level": "danger", "keywords": []}, {"output": exc.detail, "level": "danger", "keywords": []},
status_code=exc.status_code, status_code=exc.status_code,
@ -40,8 +37,8 @@ def http_handler(request: Request, exc: BaseException) -> Response:
def app_handler(request: Request, exc: BaseException) -> Response: def app_handler(request: Request, exc: BaseException) -> Response:
"""Handle application errors.""" """Handle application errors."""
log.critical( log.bind(method=request.method, path=request.url.path, detail=exc.message).critical(
"{method} {path} {detail}", method=request.method, path=request.url.path, detail=exc.message "hyperglass Error"
) )
return Response( return Response(
{"output": exc.message, "level": exc.level, "keywords": exc.keywords}, {"output": exc.message, "level": exc.level, "keywords": exc.keywords},
@ -49,14 +46,11 @@ def app_handler(request: Request, exc: BaseException) -> Response:
) )
def validation_handler(request: Request, exc: BaseException) -> Response: def validation_handler(request: Request, exc: ValidationException) -> Response:
"""Handle Pydantic validation errors raised by FastAPI.""" """Handle Pydantic validation errors raised by FastAPI."""
error = exc.errors()[0] error = exc.errors()[0]
log.critical( log.bind(method=request.method, path=request.url.path, detail=error["msg"]).critical(
"{method} {path} {detail}", "Validation Error"
method=request.method,
path=request.url.path,
detail=error["msg"],
) )
return Response( return Response(
{"output": error["msg"], "level": "error", "keywords": error["loc"]}, {"output": error["msg"], "level": "error", "keywords": error["loc"]},

View file

@ -73,7 +73,9 @@ async def query(_state: HyperglassState, request: Request, data: Query) -> Query
# each command output value is unique. # each command output value is unique.
cache_key = f"hyperglass.query.{data.digest()}" cache_key = f"hyperglass.query.{data.digest()}"
log.info("{!r} starting query execution", data) _log = log.bind(query=data.summary())
_log.info("Starting query execution")
cache_response = cache.get_map(cache_key, "output") cache_response = cache.get_map(cache_key, "output")
json_output = False json_output = False
@ -81,7 +83,7 @@ async def query(_state: HyperglassState, request: Request, data: Query) -> Query
runtime = 65535 runtime = 65535
if cache_response: if cache_response:
log.debug("{!r} cache hit (cache key {!r})", data, cache_key) _log.bind(cache_key=cache_key).debug("Cache hit")
# If a cached response exists, reset the expiration time. # If a cached response exists, reset the expiration time.
cache.expire(cache_key, expire_in=_state.params.cache.timeout) cache.expire(cache_key, expire_in=_state.params.cache.timeout)
@ -91,7 +93,7 @@ async def query(_state: HyperglassState, request: Request, data: Query) -> Query
timestamp = cache.get_map(cache_key, "timestamp") timestamp = cache.get_map(cache_key, "timestamp")
elif not cache_response: elif not cache_response:
log.debug("{!r} cache miss (cache key {!r})", data, cache_key) _log.bind(cache_key=cache_key).debug("Cache miss")
timestamp = data.timestamp timestamp = data.timestamp
@ -109,7 +111,7 @@ async def query(_state: HyperglassState, request: Request, data: Query) -> Query
endtime = time.time() endtime = time.time()
elapsedtime = round(endtime - starttime, 4) elapsedtime = round(endtime - starttime, 4)
log.debug("{!r} runtime: {!s} seconds", data, elapsedtime) _log.debug("Runtime: {!s} seconds", elapsedtime)
if output is None: if output is None:
raise HyperglassError(message=_state.params.messages.general, alert="danger") raise HyperglassError(message=_state.params.messages.general, alert="danger")
@ -125,7 +127,7 @@ async def query(_state: HyperglassState, request: Request, data: Query) -> Query
cache.set_map_item(cache_key, "timestamp", timestamp) cache.set_map_item(cache_key, "timestamp", timestamp)
cache.expire(cache_key, expire_in=_state.params.cache.timeout) cache.expire(cache_key, expire_in=_state.params.cache.timeout)
log.debug("{!r} cached for {!s} seconds", data, _state.params.cache.timeout) _log.bind(cache_timeout=_state.params.cache.timeout).debug("Response cached")
runtime = int(round(elapsedtime, 0)) runtime = int(round(elapsedtime, 0))
@ -137,8 +139,7 @@ async def query(_state: HyperglassState, request: Request, data: Query) -> Query
if json_output: if json_output:
response_format = "application/json" response_format = "application/json"
_log.info("Execution completed")
log.success("{!r} execution completed", data)
response = { response = {
"output": cache_response, "output": cache_response,

View file

@ -65,4 +65,6 @@ async def send_webhook(
} }
) )
except Exception as err: except Exception as err:
log.error("Error sending webhook to {}: {!s}", params.logging.http.provider, err) log.bind(destination=params.logging.http.provider, error=str(err)).error(
"Failed to send webhook"
)

View file

@ -217,19 +217,11 @@ class _ForwardHandler(socketserver.BaseRequestHandler):
data = self.request.recv(1024) data = self.request.recv(1024)
if not data: if not data:
break break
self.logger.trace(
">>> OUT {0} send to {1}: {2} >>>".format(
self.info, self.remote_address, hexlify(data)
),
)
chan.sendall(data) chan.sendall(data)
if chan in rqst: # else if chan in rqst: # else
if not chan.recv_ready(): if not chan.recv_ready():
break break
data = chan.recv(1024) data = chan.recv(1024)
self.logger.trace(
"<<< IN {0} recv: {1} <<<".format(self.info, hexlify(data)),
)
self.request.sendall(data) self.request.sendall(data)
def handle(self): def handle(self):
@ -249,10 +241,8 @@ class _ForwardHandler(socketserver.BaseRequestHandler):
chan = None chan = None
if chan is None: if chan is None:
msg = "{0} to {1} was rejected by the SSH server".format(self.info, self.remote_address) msg = "{0} to {1} was rejected by the SSH server".format(self.info, self.remote_address)
self.logger.trace(msg)
raise HandlerSSHTunnelForwarderError(msg) raise HandlerSSHTunnelForwarderError(msg)
self.logger.trace("{0} connected".format(self.info))
try: try:
self._redirect(chan) self._redirect(chan)
except socket.error: except socket.error:
@ -260,13 +250,12 @@ class _ForwardHandler(socketserver.BaseRequestHandler):
# exception. It was seen that a 3way FIN is processed later on, so # exception. It was seen that a 3way FIN is processed later on, so
# no need to make an ordered close of the connection here or raise # no need to make an ordered close of the connection here or raise
# the exception beyond this point... # the exception beyond this point...
self.logger.trace("{0} sending RST".format(self.info)) pass
except Exception as e: except Exception:
self.logger.trace("{0} error: {1}".format(self.info, repr(e))) pass
finally: finally:
chan.close() chan.close()
self.request.close() self.request.close()
self.logger.trace("{0} connection closed.".format(self.info))
class _ForwardServer(socketserver.TCPServer): # Not Threading class _ForwardServer(socketserver.TCPServer): # Not Threading
@ -283,10 +272,7 @@ class _ForwardServer(socketserver.TCPServer): # Not Threading
def handle_error(self, request, client_address): def handle_error(self, request, client_address):
(exc_class, exc, tb) = sys.exc_info() (exc_class, exc, tb) = sys.exc_info()
self.logger.error( self.logger.bind(source=request.getsockname()).error("Could not establish connection to remote side of the tunnel")
"Could not establish connection from {0} to remote " "side of the tunnel",
request.getsockname(),
)
self.tunnel_ok.put(False) self.tunnel_ok.put(False)
@property @property
@ -814,16 +800,13 @@ class SSHTunnelForwarder:
check_host(self.ssh_host) check_host(self.ssh_host)
check_port(self.ssh_port) check_port(self.ssh_port)
self.logger.bind(
self.logger.info( host=self.ssh_host,
"Connecting to gateway: {h}:{p} as user '{u}', timeout {t}", port=self.ssh_port,
h=self.ssh_host, username=self.ssh_username,
p=self.ssh_port, timeout=self.gateway_timeout,
u=self.ssh_username, ).info("Connecting to gateway")
t=self.gateway_timeout, self.logger.bind(count=self._threaded).debug("Concurrent connections allowed")
)
self.logger.debug("Concurrent connections allowed: {0}", self._threaded)
@staticmethod @staticmethod
def _read_ssh_config( def _read_ssh_config(
@ -1154,8 +1137,7 @@ class SSHTunnelForwarder:
if self.skip_tunnel_checkup: if self.skip_tunnel_checkup:
self.tunnel_is_up[_srv.local_address] = True self.tunnel_is_up[_srv.local_address] = True
return return
self.logger.debug("Checking tunnel", address=_srv.remote_address)
self.logger.info("Checking tunnel to: {a}", a=_srv.remote_address)
if isinstance(_srv.local_address, str): # UNIX stream if isinstance(_srv.local_address, str): # UNIX stream
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
@ -1171,13 +1153,13 @@ class SSHTunnelForwarder:
) )
s.connect(connect_to) s.connect(connect_to)
self.tunnel_is_up[_srv.local_address] = _srv.tunnel_ok.get(timeout=TUNNEL_TIMEOUT * 1.1) self.tunnel_is_up[_srv.local_address] = _srv.tunnel_ok.get(timeout=TUNNEL_TIMEOUT * 1.1)
self.logger.debug("Tunnel to {0} is DOWN".format(_srv.remote_address)) self.logger.bind(status="DOWN", address=_srv.remote_address).debug("Tunnel Status")
except socket.error: except socket.error:
self.logger.debug("Tunnel to {0} is DOWN".format(_srv.remote_address)) self.logger.bind(status="DOWN", address=_srv.remote_address).debug("Tunnel Status")
self.tunnel_is_up[_srv.local_address] = False self.tunnel_is_up[_srv.local_address] = False
except queue.Empty: except queue.Empty:
self.logger.debug("Tunnel to {0} is UP".format(_srv.remote_address)) self.logger.bind(status="UP", address=_srv.remote_address).debug("Tunnel Status")
self.tunnel_is_up[_srv.local_address] = True self.tunnel_is_up[_srv.local_address] = True
finally: finally:
s.close() s.close()

View file

@ -63,7 +63,7 @@ def load_dsl(path: Path, *, empty_allowed: bool) -> LoadedConfig:
raise ConfigError( raise ConfigError(
"'{!s}' exists, but it is empty and is required to start hyperglass.".format(path), "'{!s}' exists, but it is empty and is required to start hyperglass.".format(path),
) )
log.debug("Loaded configuration from {!s}", path) log.bind(path=path).debug("Loaded configuration")
return data or {} return data or {}
@ -101,7 +101,7 @@ def load_python(path: Path, *, empty_allowed: bool) -> LoadedConfig:
if data is None and empty_allowed is False: if data is None and empty_allowed is False:
raise ConfigError(f"'{path!s} exists', but variable or function 'main' is an invalid type") raise ConfigError(f"'{path!s} exists', but variable or function 'main' is an invalid type")
log.debug("Loaded configuration from {!s}", path) log.bind(path=path).debug("Loaded configuration")
return data or {} return data or {}

View file

@ -4,7 +4,7 @@
from pydantic import ValidationError from pydantic import ValidationError
# Project # Project
from hyperglass.log import log, enable_file_logging, enable_syslog_logging from hyperglass.log import log
from hyperglass.settings import Settings from hyperglass.settings import Settings
from hyperglass.models.ui import UIParameters from hyperglass.models.ui import UIParameters
from hyperglass.models.directive import Directive, Directives from hyperglass.models.directive import Directive, Directives
@ -31,7 +31,7 @@ def init_files() -> None:
path = Settings.app_path / directory path = Settings.app_path / directory
if not path.exists(): if not path.exists():
path.mkdir(parents=True) path.mkdir(parents=True)
log.debug("Created directory {!s}", path) log.debug("Created directory", path=path)
def init_params() -> "Params": def init_params() -> "Params":
@ -40,20 +40,20 @@ def init_params() -> "Params":
# Map imported user configuration to expected schema. # Map imported user configuration to expected schema.
params = Params(**user_config) params = Params(**user_config)
# Set up file logging once configuration parameters are initialized. # # Set up file logging once configuration parameters are initialized.
enable_file_logging( # enable_file_logging(
log_directory=params.logging.directory, # log_directory=params.logging.directory,
log_format=params.logging.format, # log_format=params.logging.format,
log_max_size=params.logging.max_size, # log_max_size=params.logging.max_size,
debug=Settings.debug, # debug=Settings.debug,
) # )
# Set up syslog logging if enabled. # Set up syslog logging if enabled.
if params.logging.syslog is not None and params.logging.syslog.enable: # if params.logging.syslog is not None and params.logging.syslog.enable:
enable_syslog_logging( # enable_syslog_logging(
syslog_host=params.logging.syslog.host, # syslog_host=params.logging.syslog.host,
syslog_port=params.logging.syslog.port, # syslog_port=params.logging.syslog.port,
) # )
if params.logging.http is not None and params.logging.http.enable: if params.logging.http is not None and params.logging.http.enable:
log.debug("HTTP logging is enabled") log.debug("HTTP logging is enabled")
@ -103,7 +103,7 @@ def init_devices() -> "Devices":
raise ConfigError("No devices are defined in devices file") raise ConfigError("No devices are defined in devices file")
devices = Devices(*items) devices = Devices(*items)
log.debug("Initialized devices {!r}", devices) log.debug("Initialized devices", devices=devices)
return devices return devices

View file

@ -107,6 +107,7 @@ class PublicHyperglassError(HyperglassError):
_level = "warning" _level = "warning"
_message_template = "Something went wrong." _message_template = "Something went wrong."
_original_template_name: str = ""
def __init_subclass__( def __init_subclass__(
cls, *, template: Optional[str] = None, level: Optional[ErrorLevel] = None cls, *, template: Optional[str] = None, level: Optional[ErrorLevel] = None
@ -115,6 +116,7 @@ class PublicHyperglassError(HyperglassError):
if template is not None: if template is not None:
cls._message_template = template cls._message_template = template
cls._original_template_name = template
if level is not None: if level is not None:
cls._level = level cls._level = level
@ -128,19 +130,17 @@ class PublicHyperglassError(HyperglassError):
error = self._safe_format(str(error), **kwargs) error = self._safe_format(str(error), **kwargs)
kwargs["error"] = error kwargs["error"] = error
template = self._message_template
(messages := use_state("params").messages) (messages := use_state("params").messages)
if messages.has(self._message_template): if messages.has(self._original_template_name):
self._message_template = messages[self._message_template] template = messages[self._original_template_name]
self._message = self._safe_format(self._message_template, **kwargs) if "error" in kwargs and "({error})" not in template:
template += " ({error})"
self._message = self._safe_format(template, **kwargs)
self._keywords = list(kwargs.values()) self._keywords = list(kwargs.values())
super().__init__(message=self._message, level=self._level, keywords=self._keywords) super().__init__(message=self._message, level=self._level, keywords=self._keywords)
def handle_error(self, error: Any) -> None:
"""Add details to the error template, if provided."""
if error is not None:
self._message_template = self._message_template + " ({error})"
class PrivateHyperglassError(HyperglassError): class PrivateHyperglassError(HyperglassError):
"""Base exception class for internal system errors. """Base exception class for internal system errors.
@ -156,7 +156,7 @@ class PrivateHyperglassError(HyperglassError):
k: ", ".join(str(loc) for t in errors for loc in t["loc"] if t["type"] == k) k: ", ".join(str(loc) for t in errors for loc in t["loc"] if t["type"] == k)
for k in {e["type"] for e in errors} for k in {e["type"] for e in errors}
} }
return ", ".join([f"{k} ({v})" for k, v in parsed.items()]) return ", ".join(parsed.values())
def __init_subclass__(cls, *, level: Optional[ErrorLevel] = None) -> None: def __init_subclass__(cls, *, level: Optional[ErrorLevel] = None) -> None:
"""Override error attributes from subclass.""" """Override error attributes from subclass."""

View file

@ -59,11 +59,9 @@ class InvalidQuery(PublicHyperglassError, template="request_timeout"):
kwargs = { kwargs = {
"query_type": query.query_type, "query_type": query.query_type,
"target": query.query_target, "target": query.query_target,
"error": str(error),
**kwargs, **kwargs,
} }
if error is not None:
self.handle_error(error)
kwargs["error"] = str(error)
super().__init__(**kwargs) super().__init__(**kwargs)
@ -109,10 +107,7 @@ class InputInvalid(PublicHyperglassError, template="invalid_input"):
) -> None: ) -> None:
"""Initialize parent error.""" """Initialize parent error."""
kwargs = {"target": target, **kwargs} kwargs = {"target": target, "error": str(error), **kwargs}
if error is not None:
self.handle_error(error)
kwargs["error"] = str(error)
super().__init__(**kwargs) super().__init__(**kwargs)
@ -128,11 +123,9 @@ class InputNotAllowed(PublicHyperglassError, template="target_not_allowed"):
kwargs = { kwargs = {
"query_type": query.query_type, "query_type": query.query_type,
"target": query.query_target, "target": query.query_target,
"error": str(error),
**kwargs, **kwargs,
} }
if error is not None:
self.handle_error(error)
kwargs["error"] = str(error)
super().__init__(**kwargs) super().__init__(**kwargs)
@ -148,10 +141,8 @@ class ResponseEmpty(PublicHyperglassError, template="no_output"):
kwargs = { kwargs = {
"query_type": query.query_type, "query_type": query.query_type,
"target": query.query_target, "target": query.query_target,
"error": str(error),
**kwargs, **kwargs,
} }
if error is not None:
self.handle_error(error)
kwargs["error"] = str(error)
super().__init__(**kwargs) super().__init__(**kwargs)

View file

@ -19,6 +19,9 @@ from hyperglass.exceptions.public import InputInvalid
from hyperglass.exceptions.private import ConfigError from hyperglass.exceptions.private import ConfigError
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
# Third Party
from loguru import Logger
# Project # Project
from hyperglass.models.api.query import Query, QueryTarget from hyperglass.models.api.query import Query, QueryTarget
from hyperglass.models.directive import Directive from hyperglass.models.directive import Directive
@ -35,14 +38,12 @@ class Construct:
query: "Query" query: "Query"
transport: str transport: str
target: str target: str
_log: "Logger"
def __init__(self, device: "Device", query: "Query"): def __init__(self, device: "Device", query: "Query"):
"""Initialize command construction.""" """Initialize command construction."""
log.debug( self._log = log.bind(type=query.query_type, target=query.query_target)
"Constructing '{}' query for '{}'", self._log.debug("Constructing query")
query.query_type,
str(query.query_target),
)
self.query = query self.query = query
self.device = device self.device = device
self.target = self.query.query_target self.target = self.query.query_target
@ -75,7 +76,7 @@ class Construct:
def json(self, afi): def json(self, afi):
"""Return JSON version of validated query for REST devices.""" """Return JSON version of validated query for REST devices."""
log.debug("Building JSON query for {q}", q=repr(self.query)) self._log.debug("Building JSON query")
return _json.dumps( return _json.dumps(
{ {
"query_type": self.query.query_type, "query_type": self.query.query_type,
@ -125,8 +126,7 @@ class Construct:
for rule in [r for r in self.directive.rules if r._passed is True]: for rule in [r for r in self.directive.rules if r._passed is True]:
for command in rule.commands: for command in rule.commands:
query.append(self.format(command)) query.append(self.format(command))
self._log.bind(constructed_query=query).debug("Constructed query")
log.debug("Constructed query: {}", query)
return query return query
@ -189,7 +189,7 @@ class Formatter:
if was_modified: if was_modified:
modified = " ".join(asns) modified = " ".join(asns)
log.debug("Modified target '{}' to '{}'", target, modified) log.bind(original=target, modified=modified).debug("Modified target")
return modified return modified
return query return query
@ -217,7 +217,7 @@ class Formatter:
result = " ".join(asns) result = " ".join(asns)
if was_modified: if was_modified:
log.debug("Modified target '{}' to '{}'", target, result) log.bind(original=target, modified=result).debug("Modified target")
return result return result

View file

@ -51,8 +51,8 @@ class SSHConnection(Connection):
return open_tunnel(proxy._target, proxy.port, **tunnel_kwargs) return open_tunnel(proxy._target, proxy.port, **tunnel_kwargs)
except BaseSSHTunnelForwarderError as scrape_proxy_error: except BaseSSHTunnelForwarderError as scrape_proxy_error:
log.error( log.bind(device=self.device.name, proxy=proxy.name).error(
f"Error connecting to device {self.device.name} via " f"proxy {proxy.name}" "Failed to connect to device via proxy"
) )
raise ScrapeError( raise ScrapeError(
error=scrape_proxy_error, device=self.device error=scrape_proxy_error, device=self.device

View file

@ -47,15 +47,13 @@ class NetmikoConnection(SSHConnection):
command output. command output.
""" """
params = use_state("params") params = use_state("params")
if host is not None: _log = log.bind(
log.debug( device=self.device.name,
"Connecting to {} via proxy {} [{}]", address=f"{host}:{port}",
self.device.name, proxy=str(self.device.proxy.address) if self.device.proxy is not None else None,
self.device.proxy.address, )
f"{host}:{port}",
) _log.debug("Connecting to device")
else:
log.debug("Connecting directly to {}", self.device.name)
global_args = netmiko_device_globals.get(self.device.platform, {}) global_args = netmiko_device_globals.get(self.device.platform, {})

View file

@ -47,9 +47,8 @@ async def execute(query: "Query") -> Union["OutputDataModel", str]:
"""Initiate query validation and execution.""" """Initiate query validation and execution."""
params = use_state("params") params = use_state("params")
output = params.messages.general output = params.messages.general
_log = log.bind(query=query, device=query.device)
log.debug("Received query {!r}", query) _log.debug("")
log.debug("Matched device config: {!r}", query.device)
mapped_driver = map_driver(query.device.driver) mapped_driver = map_driver(query.device.driver)
driver: "Connection" = mapped_driver(query.device, query) driver: "Connection" = mapped_driver(query.device, query)
@ -68,8 +67,7 @@ async def execute(query: "Query") -> Union["OutputDataModel", str]:
response = await driver.collect() response = await driver.collect()
output = await driver.response(response) output = await driver.response(response)
_log.bind(response=response).debug("Query response")
log.debug("Response for {!r}\n{response}", query, response=response)
if is_series(output): if is_series(output):
if len(output) == 0: if len(output) == 0:

View file

@ -85,7 +85,7 @@ class BaseExternal:
available = await self._atest() available = await self._atest()
if available: if available:
log.debug("Initialized session with {}", self.base_url) log.bind(url=self.base_url).debug("Initialized session")
return self return self
raise self._exception(f"Unable to create session to {self.name}") raise self._exception(f"Unable to create session to {self.name}")
@ -96,7 +96,7 @@ class BaseExternal:
traceback: t.Optional["TracebackType"] = None, traceback: t.Optional["TracebackType"] = None,
) -> True: ) -> True:
"""Close connection on exit.""" """Close connection on exit."""
log.debug("Closing session with {}", self.base_url) log.bind(url=self.base_url).debug("Closing session")
if exc_type is not None: if exc_type is not None:
log.error(str(exc_value)) log.error(str(exc_value))
@ -111,7 +111,7 @@ class BaseExternal:
available = self._test() available = self._test()
if available: if available:
log.debug("Initialized session with {}", self.base_url) log.bind(url=self.base_url).debug("Initialized session")
return self return self
raise self._exception(f"Unable to create session to {self.name}") raise self._exception(f"Unable to create session to {self.name}")
@ -162,7 +162,7 @@ class BaseExternal:
def _test(self: "BaseExternal") -> bool: def _test(self: "BaseExternal") -> bool:
"""Open a low-level connection to the base URL to ensure its port is open.""" """Open a low-level connection to the base URL to ensure its port is open."""
log.debug("Testing connection to {}", self.base_url) log.bind(url=self.base_url).debug("Testing connection")
try: try:
# Parse out just the hostname from a URL string. # Parse out just the hostname from a URL string.
@ -199,9 +199,16 @@ class BaseExternal:
supported_methods = ("GET", "POST", "PUT", "DELETE", "HEAD", "PATCH") supported_methods = ("GET", "POST", "PUT", "DELETE", "HEAD", "PATCH")
(method, endpoint, item, headers, params, data, timeout, response_required,) = itemgetter( (
*kwargs.keys() method,
)(kwargs) endpoint,
item,
headers,
params,
data,
timeout,
response_required,
) = itemgetter(*kwargs.keys())(kwargs)
if method.upper() not in supported_methods: if method.upper() not in supported_methods:
raise self._exception( raise self._exception(
@ -245,8 +252,7 @@ class BaseExternal:
except TypeError as err: except TypeError as err:
raise self._exception(f"Timeout must be an int, got: {str(timeout)}") from err raise self._exception(f"Timeout must be an int, got: {str(timeout)}") from err
request["timeout"] = timeout request["timeout"] = timeout
log.bind(request=request).debug("Constructed request parameters")
log.debug("Constructed request parameters {}", request)
return request return request
async def _arequest( # noqa: C901 async def _arequest( # noqa: C901

View file

@ -82,7 +82,6 @@ def parse_whois(output: str, targets: t.List[str]) -> TargetDetail:
data = {} data = {}
for line in lines(output): for line in lines(output):
# Unpack each line's parsed values. # Unpack each line's parsed values.
asn, ip, prefix, country, rir, allocated, org = line asn, ip, prefix, country, rir, allocated, org = line
@ -98,8 +97,7 @@ def parse_whois(output: str, targets: t.List[str]) -> TargetDetail:
"allocated": allocated, "allocated": allocated,
"org": org, "org": org,
} }
log.bind(data=data).debug("Parsed bgp.tools data")
log.debug("Parsed bgp.tools data: {}", data)
return data return data
@ -151,7 +149,7 @@ async def network_info(*targets: str) -> TargetData:
for target in (target for target in query_targets if target in cached): for target in (target for target in query_targets if target in cached):
# Reassign the cached network info to the matching resource. # Reassign the cached network info to the matching resource.
query_data[target] = cached[target] query_data[target] = cached[target]
log.debug("Using cached network info for {}", target) log.bind(target=target).debug("Using cached network info")
# Remove cached items from the resource list so they're not queried. # Remove cached items from the resource list so they're not queried.
targets = [t for t in query_targets if t not in cached] targets = [t for t in query_targets if t not in cached]
@ -167,10 +165,10 @@ async def network_info(*targets: str) -> TargetData:
# Cache the response # Cache the response
for target in targets: for target in targets:
cache.set_map_item(CACHE_KEY, target, query_data[target]) cache.set_map_item(CACHE_KEY, target, query_data[target])
log.debug("Cached network info for {}", t) log.bind(target=t).debug("Cached network info")
except Exception as err: except Exception as err:
log.error(str(err)) log.error(err)
return {**default_data, **query_data} return {**default_data, **query_data}

View file

@ -27,8 +27,7 @@ class GenericHook(BaseExternal, name="Generic"):
"""Send an incoming webhook to http endpoint.""" """Send an incoming webhook to http endpoint."""
payload = Webhook(**query) payload = Webhook(**query)
log.bind(host=self.config.host.host, payload=payload).debug("Sending request")
log.debug("Sending query data to {}:\n{}", self.config.host.host, payload)
return await self._apost( return await self._apost(
endpoint=self.config.host.path, endpoint=self.config.host.path,

View file

@ -25,7 +25,6 @@ class MSTeams(BaseExternal, name="MSTeams"):
"""Send an incoming webhook to Microsoft Teams.""" """Send an incoming webhook to Microsoft Teams."""
payload = Webhook(**query) payload = Webhook(**query)
log.bind(destination="MS Teams", payload=payload).debug("Sending request")
log.debug("Sending query data to Microsoft Teams:\n{}", payload)
return await self._apost(endpoint=self.config.host.path, data=payload.msteams()) return await self._apost(endpoint=self.config.host.path, data=payload.msteams())

View file

@ -19,7 +19,8 @@ CACHE_KEY = "hyperglass.external.rpki"
def rpki_state(prefix: t.Union["IPv4Address", "IPv6Address", str], asn: t.Union[int, str]) -> int: def rpki_state(prefix: t.Union["IPv4Address", "IPv6Address", str], asn: t.Union[int, str]) -> int:
"""Get RPKI state and map to expected integer.""" """Get RPKI state and map to expected integer."""
log.debug("Validating RPKI State for {p} via AS{a}", p=prefix, a=asn) _log = log.bind(prefix=prefix, asn=asn)
_log.debug("Validating RPKI State")
cache = use_state("cache") cache = use_state("cache")
@ -31,23 +32,22 @@ def rpki_state(prefix: t.Union["IPv4Address", "IPv6Address", str], asn: t.Union[
if cached is not None: if cached is not None:
state = cached state = cached
else: else:
ql = 'query GetValidation {{ validation(prefix: "{}", asn: {}) {{ state }} }}' ql = 'query GetValidation {{ validation(prefix: "{}", asn: {}) {{ state }} }}'
query = ql.format(prefix, asn) query = ql.format(prefix, asn)
log.debug("Cloudflare RPKI GraphQL Query: {!r}", query) _log.bind(query=query).debug("Cloudflare RPKI GraphQL Query")
try: try:
with BaseExternal(base_url="https://rpki.cloudflare.com") as client: with BaseExternal(base_url="https://rpki.cloudflare.com") as client:
response = client._post("/api/graphql", data={"query": query}) response = client._post("/api/graphql", data={"query": query})
try: try:
validation_state = response["data"]["validation"]["state"] validation_state = response["data"]["validation"]["state"]
except KeyError as missing: except KeyError as missing:
log.error("Response from Cloudflare missing key '{}': {!r}", missing, response) _log.error("Response from Cloudflare missing key '{}': {!r}", missing, response)
validation_state = 3 validation_state = 3
state = RPKI_STATE_MAP[validation_state] state = RPKI_STATE_MAP[validation_state]
cache.set_map_item(CACHE_KEY, ro, state) cache.set_map_item(CACHE_KEY, ro, state)
except Exception as err: except Exception as err:
log.error(str(err)) log.error(err)
# Don't cache the state when an error produced it. # Don't cache the state when an error produced it.
state = 3 state = 3

View file

@ -25,7 +25,6 @@ class SlackHook(BaseExternal, name="Slack"):
"""Send an incoming webhook to Slack.""" """Send an incoming webhook to Slack."""
payload = Webhook(**query) payload = Webhook(**query)
log.bind(destination="Slack", payload=payload).debug("Sending request")
log.debug("Sending query data to Slack:\n{}", payload)
return await self._apost(endpoint=self.config.host.path, data=payload.slack()) return await self._apost(endpoint=self.config.host.path, data=payload.slack())

View file

@ -12,7 +12,6 @@ from pathlib import Path
# Project # Project
from hyperglass.log import log from hyperglass.log import log
from hyperglass.util import copyfiles, check_path, dotenv_to_dict from hyperglass.util import copyfiles, check_path, dotenv_to_dict
from hyperglass.state import use_state
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
# Project # Project
@ -25,11 +24,7 @@ def get_ui_build_timeout() -> t.Optional[int]:
if "HYPERGLASS_UI_BUILD_TIMEOUT" in os.environ: if "HYPERGLASS_UI_BUILD_TIMEOUT" in os.environ:
timeout = int(os.environ["HYPERGLASS_UI_BUILD_TIMEOUT"]) timeout = int(os.environ["HYPERGLASS_UI_BUILD_TIMEOUT"])
log.info("Found UI build timeout environment variable: {}", timeout) log.bind(timeout=timeout).debug("Found UI build timeout environment variable")
elif "POETRY_HYPERGLASS_UI_BUILD_TIMEOUT" in os.environ:
timeout = int(os.environ["POETRY_HYPERGLASS_UI_BUILD_TIMEOUT"])
log.info("Found UI build timeout environment variable: {}", timeout)
return timeout return timeout
@ -61,7 +56,7 @@ async def read_package_json() -> t.Dict[str, t.Any]:
except Exception as err: except Exception as err:
raise RuntimeError(f"Error reading package.json: {str(err)}") from err raise RuntimeError(f"Error reading package.json: {str(err)}") from err
log.debug("package.json:\n{p}", p=package_json) log.bind(package_json=package_json).debug("package.json value")
return package_json return package_json
@ -173,7 +168,7 @@ def generate_opengraph(
# Copy the original image to the target path # Copy the original image to the target path
copied = shutil.copy2(image_path, target_path) copied = shutil.copy2(image_path, target_path)
log.debug("Copied {} to {}", str(image_path), str(target_path)) log.bind(source=str(image_path), destination=str(target_path)).debug("Copied OpenGraph image")
with Image.open(copied) as src: with Image.open(copied) as src:
# Only resize the image if it needs to be resized # Only resize the image if it needs to be resized
@ -200,8 +195,7 @@ def generate_opengraph(
if not dst_path.exists(): if not dst_path.exists():
raise RuntimeError(f"Unable to save resized image to {str(dst_path)}") raise RuntimeError(f"Unable to save resized image to {str(dst_path)}")
log.bind(path=str(dst_path)).debug("OpenGraph image ready")
log.debug("Opengraph image ready at {}", str(dst_path))
return True return True
@ -327,7 +321,7 @@ async def build_frontend( # noqa: C901
base_url="/images/favicons/", base_url="/images/favicons/",
) as favicons: ) as favicons:
await favicons.generate() await favicons.generate()
log.debug("Generated {} favicons", favicons.completed) log.bind(count=favicons.completed).debug("Generated favicons")
write_favicon_formats(favicons.formats()) write_favicon_formats(favicons.formats())
build_data = { build_data = {
@ -337,7 +331,7 @@ async def build_frontend( # noqa: C901
} }
build_json = json.dumps(build_data, default=str) build_json = json.dumps(build_data, default=str)
log.debug("UI Build Data:\n{}", build_json) log.bind(data=build_json).debug("UI Build Data")
# Create SHA256 hash from all parameters passed to UI, use as # Create SHA256 hash from all parameters passed to UI, use as
# build identifier. # build identifier.
@ -348,7 +342,7 @@ async def build_frontend( # noqa: C901
if dot_env_file.exists() and not force: if dot_env_file.exists() and not force:
env_data = dotenv_to_dict(dot_env_file) env_data = dotenv_to_dict(dot_env_file)
env_build_id = env_data.get("HYPERGLASS_BUILD_ID", "None") env_build_id = env_data.get("HYPERGLASS_BUILD_ID", "None")
log.debug("Previous Build ID: {!r}", env_build_id) log.bind(id=env_build_id).debug("Previous build detected")
if env_build_id == build_id: if env_build_id == build_id:
log.debug("UI parameters unchanged since last build, skipping UI build...") log.debug("UI parameters unchanged since last build, skipping UI build...")
@ -357,7 +351,7 @@ async def build_frontend( # noqa: C901
env_config.update({"HYPERGLASS_BUILD_ID": build_id}) env_config.update({"HYPERGLASS_BUILD_ID": build_id})
dot_env_file.write_text("\n".join(f"{k}={v}" for k, v in env_config.items())) dot_env_file.write_text("\n".join(f"{k}={v}" for k, v in env_config.items()))
log.debug("Wrote UI environment file {!r}", str(dot_env_file)) log.bind(path=str(dot_env_file)).debug("Wrote UI environment file")
# Initiate Next.JS export process. # Initiate Next.JS export process.
if any((not dev_mode, force, full)): if any((not dev_mode, force, full)):
@ -371,7 +365,7 @@ async def build_frontend( # noqa: C901
log.debug("Re-initialized node_modules") log.debug("Re-initialized node_modules")
if build_result: if build_result:
log.success("Completed UI build") log.info("Completed UI build")
elif dev_mode and not force: elif dev_mode and not force:
log.debug("Running in developer mode, did not build new UI files") log.debug("Running in developer mode, did not build new UI files")

View file

@ -11,9 +11,9 @@ from loguru import logger as _loguru_logger
from rich.theme import Theme from rich.theme import Theme
from rich.console import Console from rich.console import Console
from rich.logging import RichHandler from rich.logging import RichHandler
from gunicorn.glogging import Logger as GunicornLogger # type: ignore
# Local # Local
from .util import dict_to_kwargs
from .constants import __version__ from .constants import __version__
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
@ -21,20 +21,21 @@ if t.TYPE_CHECKING:
from pathlib import Path from pathlib import Path
# Third Party # Third Party
from loguru import Logger as LoguruLogger from loguru import Logger as Record
from pydantic import ByteSize from pydantic import ByteSize
# Project # Project
from hyperglass.models.fields import LogFormat from hyperglass.models.fields import LogFormat
_FMT = ( _FMT_DEBUG = (
"<lvl><b>[{level}]</b> {time:YYYYMMDD} {time:HH:mm:ss} <lw>|</lw> {name}<lw>:</lw>" "<lvl><b>[{level}]</b> {time:YYYYMMDD} {time:HH:mm:ss} <lw>|</lw>"
"<b>{line}</b> <lw>|</lw> {function}</lvl> <lvl><b>→</b></lvl> {message}" "<b>{line}</b> <lw>|</lw> {function}</lvl> <lvl><b>→</b></lvl> {message} {extra}"
) )
_FMT_FILE = "[{time:YYYYMMDD} {time:HH:mm:ss}] {message}" _FMT = "<lvl><b>[{level}]</b> {time:YYYYMMDD} {time:HH:mm:ss} <lw>|</lw></lvl> {message} {extra}"
_DATE_FMT = "%Y%m%d %H:%M:%S"
_FMT_BASIC = "{message}" _FMT_FILE = "[{time:YYYYMMDD} {time:HH:mm:ss}] {message} {extra}"
_FMT_BASIC = "{message} {extra}"
_LOG_LEVELS = [ _LOG_LEVELS = [
{"name": "TRACE", "color": "<m>"}, {"name": "TRACE", "color": "<m>"},
{"name": "DEBUG", "color": "<c>"}, {"name": "DEBUG", "color": "<c>"},
@ -66,6 +67,32 @@ HyperglassConsole = Console(
log = _loguru_logger log = _loguru_logger
def formatter(record: "Record") -> str:
"""Format log messages with extra data as kwargs string."""
msg = record.get("message", "")
extra = record.get("extra", {})
extra_str = dict_to_kwargs(extra)
return " ".join((msg, extra_str))
def filter_uvicorn_values(record: "Record") -> bool:
"""Drop noisy uvicorn messages."""
drop = (
"Application startup",
"Application shutdown",
"Finished server process",
"Shutting down",
"Waiting for application",
"Started server process",
"Started parent process",
"Stopping parent process",
)
for match in drop:
if match in record["message"]:
return False
return True
class LibInterceptHandler(logging.Handler): class LibInterceptHandler(logging.Handler):
"""Custom log handler for integrating third party library logging with hyperglass's logger.""" """Custom log handler for integrating third party library logging with hyperglass's logger."""
@ -89,104 +116,51 @@ class LibInterceptHandler(logging.Handler):
_loguru_logger.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage()) _loguru_logger.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage())
class CustomGunicornLogger(GunicornLogger): def init_logger(level: t.Union[int, str] = logging.INFO):
"""Custom logger to direct Gunicorn/Uvicorn logs to Loguru.
See: https://pawamoy.github.io/posts/unify-logging-for-a-gunicorn-uvicorn-app/
"""
def setup(self, cfg: t.Any) -> None:
"""Override Gunicorn setup."""
handler = logging.NullHandler()
self.error_logger = logging.getLogger("gunicorn.error")
self.error_logger.addHandler(handler)
self.access_logger = logging.getLogger("gunicorn.access")
self.access_logger.addHandler(handler)
self.error_logger.setLevel(cfg.loglevel)
self.access_logger.setLevel(cfg.loglevel)
def setup_lib_logging(log_level: str) -> None:
"""Override the logging handlers for dependency libraries.
See: https://pawamoy.github.io/posts/unify-logging-for-a-gunicorn-uvicorn-app/
"""
intercept_handler = LibInterceptHandler()
names = {
name.split(".")[0]
for name in (
*logging.root.manager.loggerDict.keys(),
"gunicorn",
"gunicorn.access",
"gunicorn.error",
"uvicorn",
"uvicorn.access",
"uvicorn.error",
"uvicorn.asgi",
"netmiko",
"paramiko",
"httpx",
)
}
for name in names:
logging.getLogger(name).handlers = [intercept_handler]
def _log_patcher(record):
"""Patch for exception handling in logger.
See: https://github.com/Delgan/loguru/issues/504
"""
exception = record["exception"]
if exception is not None:
fixed = Exception(str(exception.value))
record["exception"] = exception._replace(value=fixed)
def init_logger(level: str = "INFO"):
"""Initialize hyperglass logging instance.""" """Initialize hyperglass logging instance."""
# Reset built-in Loguru configurations. # Reset built-in Loguru configurations.
_loguru_logger.remove() _loguru_logger.remove()
if sys.stdout.isatty(): if not sys.stdout.isatty():
# Use Rich for logging if hyperglass started from a TTY. # Use Rich for logging if hyperglass started from a TTY.
_loguru_logger.add( _loguru_logger.add(
sink=RichHandler( sink=RichHandler(
console=HyperglassConsole, console=HyperglassConsole,
rich_tracebacks=True, rich_tracebacks=True,
level=level, tracebacks_show_locals=level == logging.DEBUG,
tracebacks_show_locals=level == "DEBUG",
log_time_format="[%Y%m%d %H:%M:%S]", log_time_format="[%Y%m%d %H:%M:%S]",
), ),
format=_FMT_BASIC, format=formatter,
level=level, level=level,
filter=filter_uvicorn_values,
enqueue=True, enqueue=True,
) )
else: else:
# Otherwise, use regular format. # Otherwise, use regular format.
_loguru_logger.add(sys.stdout, format=_FMT, level=level, enqueue=True) _loguru_logger.add(
sink=sys.stdout,
enqueue=True,
format=_FMT if level == logging.INFO else _FMT_DEBUG,
level=level,
filter=filter_uvicorn_values,
)
_loguru_logger.configure(levels=_LOG_LEVELS, patcher=_log_patcher) _loguru_logger.configure(levels=_LOG_LEVELS)
return _loguru_logger return _loguru_logger
def _log_success(self: "LoguruLogger", message: str, *a: t.Any, **kw: t.Any) -> None:
"""Add custom builtin logging handler for the success level."""
if self.isEnabledFor(25):
self._log(25, message, a, **kw)
def enable_file_logging( def enable_file_logging(
log_directory: "Path", log_format: "LogFormat", log_max_size: "ByteSize", debug: bool *,
directory: "Path",
log_format: "LogFormat",
max_size: "ByteSize",
level: t.Union[str, int],
) -> None: ) -> None:
"""Set up file-based logging from configuration parameters.""" """Set up file-based logging from configuration parameters."""
log_level = "DEBUG" if debug else "INFO"
if log_format == "json": if log_format == "json":
log_file_name = "hyperglass.log.json" log_file_name = "hyperglass.log.json"
structured = True structured = True
@ -194,7 +168,7 @@ def enable_file_logging(
log_file_name = "hyperglass.log" log_file_name = "hyperglass.log"
structured = False structured = False
log_file = log_directory / log_file_name log_file = directory / log_file_name
if log_format == "text": if log_format == "text":
now_str = datetime.utcnow().strftime("%B %d, %Y beginning at %H:%M:%S UTC") now_str = datetime.utcnow().strftime("%B %d, %Y beginning at %H:%M:%S UTC")
@ -203,7 +177,7 @@ def enable_file_logging(
for line in ( for line in (
f"hyperglass {__version__}", f"hyperglass {__version__}",
f"Logs for {now_str}", f"Logs for {now_str}",
f"Log Level: {log_level}", f"Log Level: {'INFO' if level == logging.INFO else 'DEBUG'}",
) )
) )
header = "\n" + "\n".join(header_lines) + "\n" header = "\n" + "\n".join(header_lines) + "\n"
@ -216,31 +190,22 @@ def enable_file_logging(
sink=log_file, sink=log_file,
format=_FMT_FILE, format=_FMT_FILE,
serialize=structured, serialize=structured,
level=log_level, level=level,
encoding="utf8", encoding="utf8",
rotation=log_max_size.human_readable(), rotation=max_size.human_readable(),
) )
log.debug("Logging to file {!s}", log_file) _loguru_logger.bind(path=log_file).debug("Logging to file")
def enable_syslog_logging(syslog_host: str, syslog_port: int) -> None: def enable_syslog_logging(*, host: str, port: int) -> None:
"""Set up syslog logging from configuration parameters.""" """Set up syslog logging from configuration parameters."""
# Standard Library # Standard Library
from logging.handlers import SysLogHandler from logging.handlers import SysLogHandler
_loguru_logger.add( _loguru_logger.add(
SysLogHandler(address=(str(syslog_host), syslog_port)), SysLogHandler(address=(str(host), port)),
format=_FMT_BASIC, format=_FMT_BASIC,
enqueue=True, enqueue=True,
) )
log.debug( _loguru_logger.bind(host=host, port=port).debug("Logging to syslog target")
"Logging to syslog target {}:{} enabled",
str(syslog_host),
str(syslog_port),
)
# Side Effects
logging.addLevelName(25, "SUCCESS")
logging.Logger.success = _log_success

View file

@ -2,17 +2,14 @@
# Standard Library # Standard Library
import sys import sys
import shutil
import typing as t import typing as t
import asyncio import logging
import platform
# Third Party # Third Party
from gunicorn.arbiter import Arbiter # type: ignore import uvicorn
from gunicorn.app.base import BaseApplication # type: ignore
# Local # Local
from .log import log, init_logger, setup_lib_logging from .log import LibInterceptHandler, init_logger, enable_file_logging, enable_syslog_logging
from .util import get_node_version from .util import get_node_version
from .constants import MIN_NODE_VERSION, MIN_PYTHON_VERSION, __version__ from .constants import MIN_NODE_VERSION, MIN_PYTHON_VERSION, __version__
@ -34,10 +31,9 @@ from .util import cpu_count
from .state import use_state from .state import use_state
from .settings import Settings from .settings import Settings
log_level = "INFO" if Settings.debug is False else "DEBUG" LOG_LEVEL = logging.INFO if Settings.debug is False else logging.DEBUG
logging.basicConfig(handlers=[LibInterceptHandler()], level=0, force=True)
setup_lib_logging(log_level) log = init_logger(LOG_LEVEL)
init_logger(log_level)
async def build_ui() -> bool: async def build_ui() -> bool:
@ -78,10 +74,7 @@ def register_all_plugins() -> None:
failures += register_plugin(plugin_file, common=True) failures += register_plugin(plugin_file, common=True)
for failure in failures: for failure in failures:
log.warning( log.bind(plugin=failure).warning("Invalid hyperglass plugin")
"Plugin {!r} is not a valid hyperglass plugin and was not registered",
failure,
)
def unregister_all_plugins() -> None: def unregister_all_plugins() -> None:
@ -93,124 +86,93 @@ def unregister_all_plugins() -> None:
manager().reset() manager().reset()
def on_starting(server: "Arbiter") -> None: def start(*, log_level: t.Union[str, int], workers: int) -> None:
"""Gunicorn pre-start tasks.""" """Start hyperglass via ASGI server."""
python_version = platform.python_version() uvicorn.run(
required = ".".join((str(v) for v in MIN_PYTHON_VERSION)) app="hyperglass.api:app",
log.debug("Python {} detected ({} required)", python_version, required) host=str(Settings.host),
port=Settings.port,
register_all_plugins() workers=workers,
log_level=log_level,
if not Settings.disable_ui: log_config={
asyncio.run(build_ui()) "version": 1,
"disable_existing_loggers": False,
"formatters": {
def when_ready(server: "Arbiter") -> None: "default": {
"""Gunicorn post-start hook.""" "()": "uvicorn.logging.DefaultFormatter",
"format": "%(message)s",
log.success( },
"Started hyperglass {} on http://{} with {!s} workers", "access": {
__version__, "()": "uvicorn.logging.AccessFormatter",
Settings.bind(), "format": "%(message)s",
server.app.cfg.settings["workers"].value, },
},
"handlers": {
"default": {"formatter": "default", "class": "hyperglass.log.LibInterceptHandler"},
"access": {"formatter": "access", "class": "hyperglass.log.LibInterceptHandler"},
},
"loggers": {
"uvicorn.error": {"level": "ERROR", "handlers": ["default"], "propagate": False},
"uvicorn.access": {"level": "INFO", "handlers": ["access"], "propagate": False},
},
},
) )
def on_exit(_: t.Any) -> None: def run(workers: int = None):
"""Gunicorn shutdown tasks."""
state = use_state()
if not Settings.dev_mode:
state.clear()
log.info("Cleared hyperglass state")
unregister_all_plugins()
log.critical("Stopping hyperglass {}", __version__)
class HyperglassWSGI(BaseApplication):
"""Custom gunicorn app."""
def __init__(self: "HyperglassWSGI", app: str, options: t.Dict[str, t.Any]):
"""Initialize custom WSGI."""
self.application = app
self.options = options or {}
super().__init__()
def load_config(self: "HyperglassWSGI"):
"""Load gunicorn config."""
config = {
key: value
for key, value in self.options.items()
if key in self.cfg.settings and value is not None
}
for key, value in config.items():
self.cfg.set(key.lower(), value)
def load(self: "HyperglassWSGI"):
"""Load gunicorn app."""
return self.application
def start(*, log_level: str, workers: int, **kwargs) -> None:
"""Start hyperglass via gunicorn."""
# Local
from .log import CustomGunicornLogger
HyperglassWSGI(
app="hyperglass.api:app",
options={
"preload": True,
"errorlog": "-",
"accesslog": "-",
"workers": workers,
"on_exit": on_exit,
"loglevel": log_level,
"bind": Settings.bind(),
"on_starting": on_starting,
"when_ready": when_ready,
"command": shutil.which("gunicorn"),
"logger_class": CustomGunicornLogger,
"worker_class": "uvicorn.workers.UvicornWorker",
"logconfig_dict": {"formatters": {"generic": {"format": "%(message)s"}}},
**kwargs,
},
).run()
def run(_workers: int = None):
"""Run hyperglass.""" """Run hyperglass."""
# Local # Local
from .configuration import init_user_config from .configuration import init_user_config
try: try:
log.debug("System settings: {!r}", Settings) log.debug(repr(Settings))
state = use_state() state = use_state()
state.clear() state.clear()
init_user_config() init_user_config()
workers = 1 if Settings.debug else cpu_count(2) enable_file_logging(
directory=state.params.logging.directory,
max_size=state.params.logging.max_size,
log_format=state.params.logging.format,
level=LOG_LEVEL,
)
start(log_level=log_level, workers=workers) if state.params.logging.syslog is not None:
enable_syslog_logging(
host=state.params.logging.syslog.host,
port=state.params.logging.syslog.port,
)
_workers = workers
if workers is None:
if Settings.debug:
_workers = 1
else:
_workers = cpu_count(2)
log.bind(
version=__version__,
listening=f"http://{Settings.bind()}",
workers=_workers,
).info(
"Starting hyperglass",
)
start(log_level=LOG_LEVEL, workers=_workers)
log.bind(version=__version__).critical("Stopping hyperglass")
except Exception as error: except Exception as error:
log.critical(error) log.critical(error)
# Handle app exceptions. # Handle app exceptions.
if not Settings.dev_mode: if not Settings.dev_mode:
state = use_state() state = use_state()
state.clear() state.clear()
log.info("Cleared Redis cache") log.debug("Cleared hyperglass state")
unregister_all_plugins() unregister_all_plugins()
raise error raise error
except SystemExit: except (SystemExit, BaseException):
# Handle Gunicorn exit.
sys.exit(4)
except BaseException:
sys.exit(4) sys.exit(4)

View file

@ -25,6 +25,22 @@ QueryTarget = constr(strip_whitespace=True, min_length=1)
QueryType = constr(strip_whitespace=True, strict=True, min_length=1) QueryType = constr(strip_whitespace=True, strict=True, min_length=1)
def deserialize(kw: t.Dict[str, t.Any]) -> "Query":
return Query(**kw)
class SimpleQuery(BaseModel):
"""A simple representation of a post-validated query."""
query_location: str
query_target: t.Union[t.List[str], str]
query_type: str
def __repr_name__(self) -> str:
"""Alias SimpleQuery to Query for clarity in logging."""
return "Query"
class Query(BaseModel): class Query(BaseModel):
"""Validation model for input query parameters.""" """Validation model for input query parameters."""
@ -33,10 +49,12 @@ class Query(BaseModel):
query_location: QueryLocation # Device `name` field query_location: QueryLocation # Device `name` field
query_target: t.Union[t.List[QueryTarget], QueryTarget] query_target: t.Union[t.List[QueryTarget], QueryTarget]
query_type: QueryType # Directive `id` field query_type: QueryType # Directive `id` field
_kwargs: t.Dict[str, t.Any]
def __init__(self, **data) -> None: def __init__(self, **data) -> None:
"""Initialize the query with a UTC timestamp at initialization time.""" """Initialize the query with a UTC timestamp at initialization time."""
super().__init__(**data) super().__init__(**data)
self._kwargs = data
self.timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S") self.timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
state = use_state() state = use_state()
@ -58,6 +76,14 @@ class Query(BaseModel):
except InputValidationError as err: except InputValidationError as err:
raise InputInvalid(**err.kwargs) from err raise InputInvalid(**err.kwargs) from err
def summary(self) -> SimpleQuery:
"""Summarized and post-validated model of a Query."""
return SimpleQuery(
query_location=self.query_location,
query_target=self.query_target,
query_type=self.query_type,
)
def __repr__(self) -> str: def __repr__(self) -> str:
"""Represent only the query fields.""" """Represent only the query fields."""
return repr_from_attrs(self, ("query_location", "query_type", "query_target")) return repr_from_attrs(self, ("query_location", "query_type", "query_target"))
@ -82,7 +108,7 @@ class Query(BaseModel):
self.directive.validate_target(self.query_target) self.directive.validate_target(self.query_target)
# Run plugin-based validations. # Run plugin-based validations.
self._input_plugin_manager.validate(query=self) self._input_plugin_manager.validate(query=self)
log.debug("Validation passed for query {!r}", self) log.bind(query=self.summary()).debug("Validation passed")
def transform_query_target(self) -> QueryTarget: def transform_query_target(self) -> QueryTarget:
"""Transform a query target based on defined plugins.""" """Transform a query target based on defined plugins."""

View file

@ -95,15 +95,8 @@ class Device(HyperglassModelWithId, extra="allow"):
if name is None: if name is None:
raise ValueError("name is required.") raise ValueError("name is required.")
legacy_display_name = values.pop("display_name", None) device_id = generate_id(name)
display_name = name
if legacy_display_name is not None:
log.warning("The 'display_name' field is deprecated. Use the 'name' field instead.")
device_id = generate_id(legacy_display_name)
display_name = legacy_display_name
else:
device_id = generate_id(name)
display_name = name
return {"id": device_id, "name": display_name, "display_name": None, **values} return {"id": device_id, "name": display_name, "display_name": None, **values}
@ -204,7 +197,11 @@ class Device(HyperglassModelWithId, extra="allow"):
target = Settings.static_path / "images" / value.name target = Settings.static_path / "images" / value.name
copied = shutil.copy2(value, target) copied = shutil.copy2(value, target)
log.debug("Copied {} avatar from {!r} to {!r}", values["name"], str(value), str(target)) log.bind(
device=values["name"],
source=str(value),
destination=str(target),
).debug("Copied device avatar")
with Image.open(copied) as src: with Image.open(copied) as src:
if src.width > 512: if src.width > 512:

View file

@ -10,6 +10,12 @@ from ..main import HyperglassModel
class Messages(HyperglassModel): class Messages(HyperglassModel):
"""Validation model for params.messages.""" """Validation model for params.messages."""
model_config = ConfigDict(
title="Messages",
description="Customize almost all user-facing UI & API messages.",
json_schema_extra={"level": 2},
)
no_input: str = Field( no_input: str = Field(
"{field} must be specified.", "{field} must be specified.",
title="No Input", title="No Input",
@ -85,9 +91,3 @@ class Messages(HyperglassModel):
if not self.has(attr): if not self.has(attr):
raise KeyError(f"'{attr}' does not exist on Messages model") raise KeyError(f"'{attr}' does not exist on Messages model")
return getattr(self, attr) return getattr(self, attr)
model_config = ConfigDict(
title="Messages",
description="Customize almost all user-facing UI & API messages.",
json_schema_extra={"level": 2},
)

View file

@ -107,19 +107,22 @@ class RuleWithIP(Rule):
def membership(self, target: IPvAnyNetwork, network: IPvAnyNetwork) -> bool: def membership(self, target: IPvAnyNetwork, network: IPvAnyNetwork) -> bool:
"""Check if IP address belongs to network.""" """Check if IP address belongs to network."""
log.debug("Checking membership of {} for {}", str(target), str(network)) _log = log.bind(target=str(target), network=str(network))
_log.debug("Checking target membership")
if ( if (
network.network_address <= target.network_address network.network_address <= target.network_address
and network.broadcast_address >= target.broadcast_address and network.broadcast_address >= target.broadcast_address
): ):
log.debug("{} is a member of {}", target, network) _log.debug("Target membership verified")
return True return True
return False return False
def in_range(self, target: IPvAnyNetwork) -> bool: def in_range(self, target: IPvAnyNetwork) -> bool:
"""Verify if target prefix length is within ge/le threshold.""" """Verify if target prefix length is within ge/le threshold."""
if target.prefixlen <= self.le and target.prefixlen >= self.ge: if target.prefixlen <= self.le and target.prefixlen >= self.ge:
log.debug("{} is in range {}-{}", target, self.ge, self.le) log.bind(target=str(target), range=f"{self.ge!s}-{self.le!s}").debug(
"Target is in range"
)
return True return True
return False return False
@ -130,7 +133,7 @@ class RuleWithIP(Rule):
if isinstance(target, t.List): if isinstance(target, t.List):
if len(target) > 1: if len(target) > 1:
self._passed = False self._passed = False
raise InputValidationError("Target must be a single value") raise InputValidationError(error="Target must be a single value", target=target)
target = target[0] target = target[0]
try: try:
@ -141,7 +144,9 @@ class RuleWithIP(Rule):
raise InputValidationError(error=str(err), target=target) from err raise InputValidationError(error=str(err), target=target) from err
if valid_target.version != self.condition.version: if valid_target.version != self.condition.version:
log.debug("{!s} is not the same IP version as {!s}", target, self.condition) log.bind(target=str(target), condition=str(self.condition)).debug(
"Mismatching IP version"
)
return False return False
is_member = self.membership(valid_target, self.condition) is_member = self.membership(valid_target, self.condition)
@ -223,7 +228,7 @@ class RuleWithPattern(Rule):
return True return True
if isinstance(target, t.List) and not multiple: if isinstance(target, t.List) and not multiple:
raise InputValidationError("Target must be a single value") raise InputValidationError(error="Target must be a single value", target=target)
result = validate_single_value(target) result = validate_single_value(target)

View file

@ -338,8 +338,9 @@ class MultiModel(RootModel[MultiModelT]):
self._count = len(self.root) self._count = len(self.root)
for item in new: for item in new:
log.debug( log.debug(
"Added {} '{!s}' to {}", "Added {} '{!s}' to {}".format(
item.__class__.__name__, item.__class__.__name__,
getattr(item, self.unique_by), getattr(item, self.unique_by),
self.__class__.__name__, self.__class__.__name__,
)
) )

View file

@ -163,5 +163,5 @@ class AristaBGPTable(_AristaBase):
winning_weight=WINNING_WEIGHT, winning_weight=WINNING_WEIGHT,
) )
log.debug("Serialized Arista response: {!r}", serialized) log.bind(platform="arista_eos", response=repr(serialized)).debug("Serialized response")
return serialized return serialized

View file

@ -115,5 +115,5 @@ class FRRRoute(_FRRBase):
winning_weight="high", winning_weight="high",
) )
log.info("Serialized FRR response: {}", serialized) log.bind(platform="frr", response=repr(serialized)).debug("Serialized response")
return serialized return serialized

View file

@ -195,6 +195,5 @@ class JuniperBGPTable(JuniperBase):
) )
serialized = BGPRouteTable(vrf=vrf, count=count, routes=routes, winning_weight="low") serialized = BGPRouteTable(vrf=vrf, count=count, routes=routes, winning_weight="low")
log.bind(platform="juniper", response=repr(serialized)).debug("Serialized response")
log.debug("Serialized Juniper response: {}", repr(serialized))
return serialized return serialized

View file

@ -51,13 +51,8 @@ def check_legacy_fields(*, model: str, data: t.Dict[str, t.Any]) -> t.Dict[str,
new_value = data.get(field.new) new_value = data.get(field.new)
if legacy_value is not None and new_value is None: if legacy_value is not None and new_value is None:
if field.overwrite: if field.overwrite:
log.warning( log.bind(old_field=f"{model}.{field.old}", new_field=field.new).warning(
( "Deprecated field"
"The {!r} field has been deprecated and will be removed in a future release. "
"Use the {!r} field moving forward."
),
f"{model}.{field.old}",
field.new,
) )
data[field.new] = legacy_value data[field.new] = legacy_value
else: else:

View file

@ -108,7 +108,7 @@ class Webhook(HyperglassModel):
{"markdown": True, "facts": header_data}, {"markdown": True, "facts": header_data},
], ],
} }
log.debug("Created MS Teams webhook: {}", str(payload)) log.bind(type="MS Teams", payload=str(payload)).debug("Created webhook")
return payload return payload
@ -158,5 +158,5 @@ class Webhook(HyperglassModel):
}, },
], ],
} }
log.debug("Created Slack webhook: {}", str(payload)) log.bind(type="Slack", payload=str(payload)).debug("Created webhook")
return payload return payload

View file

@ -28,11 +28,13 @@ def parse_arista(output: t.Sequence[str]) -> "OutputDataModel":
"""Parse a Arista BGP JSON response.""" """Parse a Arista BGP JSON response."""
result = None result = None
_log = log.bind(plugin=BGPRoutePluginArista.__name__)
for response in output: for response in output:
try: try:
parsed: t.Dict = json.loads(response) parsed: t.Dict = json.loads(response)
log.debug("Pre-parsed data:\n{}", parsed) _log.debug("Pre-parsed data", data=parsed)
vrf = list(parsed["vrfs"].keys())[0] vrf = list(parsed["vrfs"].keys())[0]
routes = parsed["vrfs"][vrf] routes = parsed["vrfs"][vrf]
@ -46,19 +48,19 @@ def parse_arista(output: t.Sequence[str]) -> "OutputDataModel":
result += bgp_table result += bgp_table
except json.JSONDecodeError as err: except json.JSONDecodeError as err:
log.critical("Error decoding JSON: {}", str(err)) _log.bind(error=str(err)).critical("Failed to decode JSON")
raise ParsingError("Error parsing response data") from err raise ParsingError("Error parsing response data") from err
except KeyError as err: except KeyError as err:
log.critical("'{}' was not found in the response", str(err)) _log.bind(key=str(err)).critical("Missing required key in response")
raise ParsingError("Error parsing response data") from err raise ParsingError("Error parsing response data") from err
except IndexError as err: except IndexError as err:
log.critical(str(err)) _log.critical(err)
raise ParsingError("Error parsing response data") from err raise ParsingError("Error parsing response data") from err
except ValidationError as err: except ValidationError as err:
log.critical(str(err)) _log.critical(err)
raise ParsingError(err.errors()) from err raise ParsingError(err.errors()) from err
return result return result

View file

@ -69,6 +69,7 @@ def parse_juniper(output: Sequence[str]) -> "OutputDataModel": # noqa: C901
"""Parse a Juniper BGP XML response.""" """Parse a Juniper BGP XML response."""
result = None result = None
_log = log.bind(plugin=BGPRoutePluginJuniper.__name__)
for response in output: for response in output:
cleaned = clean_xml_output(response) cleaned = clean_xml_output(response)
@ -76,8 +77,7 @@ def parse_juniper(output: Sequence[str]) -> "OutputDataModel": # noqa: C901
parsed: "OrderedDict" = xmltodict.parse( parsed: "OrderedDict" = xmltodict.parse(
cleaned, force_list=("rt", "rt-entry", "community") cleaned, force_list=("rt", "rt-entry", "community")
) )
_log.debug("Pre-parsed data", data=parsed)
log.debug("Initially Parsed Response: \n{}", parsed)
if "rpc-reply" in parsed.keys(): if "rpc-reply" in parsed.keys():
if "xnm:error" in parsed["rpc-reply"]: if "xnm:error" in parsed["rpc-reply"]:
@ -105,12 +105,15 @@ def parse_juniper(output: Sequence[str]) -> "OutputDataModel": # noqa: C901
result += bgp_table result += bgp_table
except xmltodict.expat.ExpatError as err: except xmltodict.expat.ExpatError as err:
_log.bind(error=str(err)).critical("Failed to decode XML")
raise ParsingError("Error parsing response data") from err raise ParsingError("Error parsing response data") from err
except KeyError as err: except KeyError as err:
_log.bind(key=str(err)).critical("Missing required key in response")
raise ParsingError("{key} was not found in the response", key=str(err)) from err raise ParsingError("{key} was not found in the response", key=str(err)) from err
except ValidationError as err: except ValidationError as err:
_log.critical(err)
raise ParsingError(err) from err raise ParsingError(err) from err
return result return result

View file

@ -111,10 +111,11 @@ class PluginManager(t.Generic[PluginT]):
if issubclass(plugin, HyperglassPlugin): if issubclass(plugin, HyperglassPlugin):
instance = plugin(*args, **kwargs) instance = plugin(*args, **kwargs)
self._state.add_plugin(self._type, instance) self._state.add_plugin(self._type, instance)
_log = log.bind(type=self._type, name=instance.name)
if instance._hyperglass_builtin is True: if instance._hyperglass_builtin is True:
log.debug("Registered {} built-in plugin {!r}", self._type, instance.name) _log.debug("Registered built-in plugin")
else: else:
log.success("Registered {} plugin {!r}", self._type, instance.name) _log.info("Registered plugin")
return return
except TypeError: except TypeError:
raise PluginError( # noqa: B904 raise PluginError( # noqa: B904
@ -148,7 +149,7 @@ class InputPluginManager(PluginManager[InputPlugin], type="input"):
for plugin in self._gather_plugins(query): for plugin in self._gather_plugins(query):
result = plugin.validate(query) result = plugin.validate(query)
result_test = "valid" if result is True else "invalid" if result is False else "none" result_test = "valid" if result is True else "invalid" if result is False else "none"
log.debug("Input Plugin Validation {!r} result={!r}", plugin.name, result_test) log.bind(name=plugin.name, result=result_test).debug("Input Plugin Validation")
if result is False: if result is False:
raise InputValidationError( raise InputValidationError(
error="No matched validation rules", target=query.query_target error="No matched validation rules", target=query.query_target
@ -162,7 +163,7 @@ class InputPluginManager(PluginManager[InputPlugin], type="input"):
result = query.query_target result = query.query_target
for plugin in self._gather_plugins(query): for plugin in self._gather_plugins(query):
result = plugin.transform(query=query) result = plugin.transform(query=query)
log.debug("Input Plugin Transform {!r} result={!r}", plugin.name, result) log.bind(name=plugin.name, result=repr(result)).debug("Input Plugin Transform")
return result return result
@ -182,9 +183,9 @@ class OutputPluginManager(PluginManager[OutputPlugin], type="output"):
) )
common = (plugin for plugin in self.plugins() if plugin.common is True) common = (plugin for plugin in self.plugins() if plugin.common is True)
for plugin in (*directives, *common): for plugin in (*directives, *common):
log.debug("Output Plugin {!r} starting with\n{!r}", plugin.name, result) log.bind(plugin=plugin.name, value=result).debug("Output Plugin Starting Value")
result = plugin.process(output=result, query=query) result = plugin.process(output=result, query=query)
log.debug("Output Plugin {!r} completed with\n{!r}", plugin.name, result) log.bind(plugin=plugin.name, value=result).debug("Output Plugin Ending Value")
if result is False: if result is False:
return result return result

View file

@ -25,5 +25,5 @@ class OutputPlugin(HyperglassPlugin, DirectivePlugin, PlatformPlugin):
def process(self, *, output: OutputType, query: "Query") -> OutputType: def process(self, *, output: OutputType, query: "Query") -> OutputType:
"""Process or manipulate output from a device.""" """Process or manipulate output from a device."""
log.warning("Output plugin '{}' has not implemented a 'process()' method", self.name) log.warning("Output plugin has not implemented a 'process()' method", plugin=self.name)
return output return output

View file

@ -8,8 +8,6 @@ from pathlib import Path
import pytest import pytest
# Project # Project
from hyperglass.log import log
from hyperglass.models.api.query import Query
from hyperglass.models.data.bgp_route import BGPRouteTable from hyperglass.models.data.bgp_route import BGPRouteTable
# Local # Local

View file

@ -172,11 +172,12 @@ class RedisManager:
) -> None: ) -> None:
pipeline_self.instance.execute() pipeline_self.instance.execute()
if exc_type is not None: if exc_type is not None:
log.error( log.bind(
"Error in pipeline {!r} from parent instance {!r}:\n{!s}", pipeline=repr(pipeline_self),
pipeline_self, parent=repr(pipeline_self.parent),
pipeline_self.parent, error=exc_value,
exc_value, ).error(
"Error exiting pipeline",
) )
return RedisManagerPipeline( return RedisManagerPipeline(

View file

@ -8,6 +8,7 @@ from .tools import (
get_fmt_keys, get_fmt_keys,
compare_dicts, compare_dicts,
compare_lists, compare_lists,
dict_to_kwargs,
snake_to_camel, snake_to_camel,
parse_exception, parse_exception,
repr_from_attrs, repr_from_attrs,
@ -29,6 +30,7 @@ __all__ = (
"copyfiles", "copyfiles",
"cpu_count", "cpu_count",
"deep_convert_keys", "deep_convert_keys",
"dict_to_kwargs",
"dotenv_to_dict", "dotenv_to_dict",
"get_driver", "get_driver",
"get_fmt_keys", "get_fmt_keys",

View file

@ -7,13 +7,13 @@ from queue import Queue
from pathlib import Path from pathlib import Path
from threading import Thread from threading import Thread
# Project
from hyperglass.log import log
async def move_files(src: Path, dst: Path, files: t.Iterable[Path]) -> t.Tuple[str]: # noqa: C901 async def move_files(src: Path, dst: Path, files: t.Iterable[Path]) -> t.Tuple[str]: # noqa: C901
"""Move iterable of files from source to destination.""" """Move iterable of files from source to destination."""
# Project
from hyperglass.log import log
def error(*args, **kwargs): def error(*args, **kwargs):
msg = ", ".join(args) msg = ", ".join(args)
kwargs = {k: str(v) for k, v in kwargs.items()} kwargs = {k: str(v) for k, v in kwargs.items()}
@ -91,6 +91,10 @@ class FileCopy(Thread):
def copyfiles(src_files: t.Iterable[Path], dst_files: t.Iterable[Path]): def copyfiles(src_files: t.Iterable[Path], dst_files: t.Iterable[Path]):
"""Copy iterable of files from source to destination with threading.""" """Copy iterable of files from source to destination with threading."""
# Project
from hyperglass.log import log
queue = Queue() queue = Queue()
threads = () threads = ()
src_files_len = len(src_files) src_files_len = len(src_files)
@ -113,7 +117,7 @@ def copyfiles(src_files: t.Iterable[Path], dst_files: t.Iterable[Path]):
for _ in src_files: for _ in src_files:
copied = queue.get() copied = queue.get()
log.debug("Copied {}", str(copied)) log.bind(path=copied).debug("Copied file", path=copied)
for thread in threads: for thread in threads:
thread.join() thread.join()

View file

@ -13,6 +13,7 @@ from ..tools import (
get_fmt_keys, get_fmt_keys,
compare_dicts, compare_dicts,
compare_lists, compare_lists,
dict_to_kwargs,
snake_to_camel, snake_to_camel,
parse_exception, parse_exception,
repr_from_attrs, repr_from_attrs,
@ -115,7 +116,6 @@ def test_at_least():
def test_compare_dicts(): def test_compare_dicts():
d1 = {"one": 1, "two": 2} d1 = {"one": 1, "two": 2}
d2 = {"one": 1, "two": 2} d2 = {"one": 1, "two": 2}
d3 = {"one": 1, "three": 3} d3 = {"one": 1, "three": 3}
@ -189,3 +189,25 @@ def test_compare_lists():
assert compare_lists(list1, list2) is False assert compare_lists(list1, list2) is False
assert compare_lists(list1, list3) is False assert compare_lists(list1, list3) is False
assert compare_lists(list1, list4) is True assert compare_lists(list1, list4) is True
def test_dict_to_kwargs():
class Test:
one: int
two: int
def __init__(self, **kw) -> None:
for k, v in kw.items():
setattr(self, k, v)
def __repr__(self) -> str:
return "Test(one={}, two={})".format(self.one, self.two)
d1 = {"one": 1, "two": 2}
e1 = "one=1 two=2"
d2 = {"cls": Test(one=1, two=2), "three": "three"}
e2 = "cls=Test(one=1, two=2) three='three'"
r1 = dict_to_kwargs(d1)
assert r1 == e1
r2 = dict_to_kwargs(d2)
assert r2 == e2

View file

@ -183,3 +183,12 @@ def compare_init(obj_a: object, obj_b: object) -> bool:
obj_b.__init__.__annotations__.pop("self", None) obj_b.__init__.__annotations__.pop("self", None)
return compare_dicts(obj_a.__init__.__annotations__, obj_b.__init__.__annotations__) return compare_dicts(obj_a.__init__.__annotations__, obj_b.__init__.__annotations__)
return False return False
def dict_to_kwargs(in_dict: t.Dict[str, t.Any]) -> str:
"""Format a dict as a string of key/value pairs."""
items = []
for key, value in in_dict.items():
out_str = f"{key}={value!r}"
items = [*items, out_str]
return " ".join(items)

View file

@ -56,7 +56,7 @@ def resolve_hostname(
# Project # Project
from hyperglass.log import log from hyperglass.log import log
log.debug("Ensuring {!r} is resolvable...", hostname) log.bind(hostname=hostname).debug("Ensuring hostname is resolvable")
ip4 = None ip4 = None
ip6 = None ip6 = None

View file

@ -12,9 +12,8 @@ dependencies = [
"aiofiles>=23.2.1", "aiofiles>=23.2.1",
"distro==1.8.0", "distro==1.8.0",
"favicons==0.2.2", "favicons==0.2.2",
"gunicorn>=21.2.0",
"httpx==0.24.0", "httpx==0.24.0",
"loguru==0.7.0", "loguru>=0.7.2",
"netmiko==4.1.2", "netmiko==4.1.2",
"paramiko==3.4.0", "paramiko==3.4.0",
"psutil==5.9.4", "psutil==5.9.4",
@ -29,7 +28,7 @@ dependencies = [
"toml>=0.10.2", "toml>=0.10.2",
"pydantic-settings>=2.2.1", "pydantic-settings>=2.2.1",
"pydantic-extra-types>=2.6.0", "pydantic-extra-types>=2.6.0",
"litestar[standard,brotli]>=2.7.0", "litestar[standard,brotli]>=2.7.1",
] ]
readme = "README.md" readme = "README.md"
requires-python = ">= 3.11" requires-python = ">= 3.11"

View file

@ -66,8 +66,6 @@ freetype-py==2.4.0
# via rlpycairo # via rlpycairo
future==0.18.3 future==0.18.3
# via textfsm # via textfsm
gunicorn==21.2.0
# via hyperglass
h11==0.14.0 h11==0.14.0
# via httpcore # via httpcore
# via uvicorn # via uvicorn
@ -90,9 +88,9 @@ jinja2==3.1.3
# via litestar # via litestar
jsbeautifier==1.15.1 jsbeautifier==1.15.1
# via litestar # via litestar
litestar==2.7.0 litestar==2.7.1
# via hyperglass # via hyperglass
loguru==0.7.0 loguru==0.7.2
# via hyperglass # via hyperglass
lxml==5.1.0 lxml==5.1.0
# via svglib # via svglib
@ -118,7 +116,6 @@ ntc-templates==4.3.0
# via netmiko # via netmiko
packaging==23.2 packaging==23.2
# via black # via black
# via gunicorn
# via pytest # via pytest
paramiko==3.4.0 paramiko==3.4.0
# via hyperglass # via hyperglass
@ -250,7 +247,7 @@ typing-extensions==4.9.0
uvicorn==0.21.1 uvicorn==0.21.1
# via hyperglass # via hyperglass
# via litestar # via litestar
uvloop==0.19.0 uvloop==0.18.0
# via hyperglass # via hyperglass
# via litestar # via litestar
# via uvicorn # via uvicorn

View file

@ -53,8 +53,6 @@ freetype-py==2.4.0
# via rlpycairo # via rlpycairo
future==0.18.3 future==0.18.3
# via textfsm # via textfsm
gunicorn==21.2.0
# via hyperglass
h11==0.14.0 h11==0.14.0
# via httpcore # via httpcore
# via uvicorn # via uvicorn
@ -72,9 +70,9 @@ jinja2==3.1.3
# via litestar # via litestar
jsbeautifier==1.15.1 jsbeautifier==1.15.1
# via litestar # via litestar
litestar==2.7.0 litestar==2.7.1
# via hyperglass # via hyperglass
loguru==0.7.0 loguru==0.7.2
# via hyperglass # via hyperglass
lxml==5.1.0 lxml==5.1.0
# via svglib # via svglib
@ -92,8 +90,6 @@ netmiko==4.1.2
# via hyperglass # via hyperglass
ntc-templates==4.3.0 ntc-templates==4.3.0
# via netmiko # via netmiko
packaging==24.0
# via gunicorn
paramiko==3.4.0 paramiko==3.4.0
# via hyperglass # via hyperglass
# via netmiko # via netmiko
@ -191,7 +187,7 @@ typing-extensions==4.9.0
uvicorn==0.21.1 uvicorn==0.21.1
# via hyperglass # via hyperglass
# via litestar # via litestar
uvloop==0.19.0 uvloop==0.18.0
# via hyperglass # via hyperglass
# via litestar # via litestar
# via uvicorn # via uvicorn