logging overhaul

This commit is contained in:
thatmattlove 2024-04-01 23:28:46 -04:00
parent d2e1486b5a
commit d2f734ba1c
47 changed files with 412 additions and 458 deletions

2
.gitignore vendored
View file

@ -6,7 +6,7 @@ TODO*
test.py
.DS_Store
.idea
old_*.py
*_old.py
*.rdb
#
# Github Default from https://github.com/martinohanlon/flightlight/issues/1

View file

@ -1,4 +1,6 @@
"""hyperglass API."""
# Standard Library
import logging
# Third Party
from litestar import Litestar
@ -33,21 +35,28 @@ OPEN_API = OpenAPIConfig(
root_schema_site="elements",
)
HANDLERS = [
device,
devices,
queries,
info,
query,
]
app = Litestar(
route_handlers=[
device,
devices,
queries,
info,
query,
if not STATE.settings.disable_ui:
HANDLERS = [
*HANDLERS,
create_static_files_router(
path="/images", directories=[IMAGES_DIR], name="images", include_in_schema=False
),
create_static_files_router(
path="/", directories=[UI_DIR], name="ui", html_mode=True, include_in_schema=False
),
],
]
app = Litestar(
route_handlers=HANDLERS,
exception_handlers={
HTTPException: http_handler,
HyperglassError: app_handler,

View file

@ -2,6 +2,7 @@
# Third Party
from litestar import Request, Response
from litestar.exceptions import ValidationException
# Project
from hyperglass.log import log
@ -18,9 +19,7 @@ __all__ = (
def default_handler(request: Request, exc: BaseException) -> Response:
"""Handle uncaught errors."""
state = use_state()
log.critical(
"{method} {path} {detail!s}", method=request.method, path=request.url.path, detail=exc
)
log.bind(method=request.method, path=request.url.path, detail=str(exc)).critical("Error")
return Response(
{"output": state.params.messages.general, "level": "danger", "keywords": []},
status_code=500,
@ -29,9 +28,7 @@ def default_handler(request: Request, exc: BaseException) -> Response:
def http_handler(request: Request, exc: BaseException) -> Response:
"""Handle web server errors."""
log.critical(
"{method} {path} {detail}", method=request.method, path=request.url.path, detail=exc.detail
)
log.bind(method=request.method, path=request.url.path, detail=exc.detail).critical("HTTP Error")
return Response(
{"output": exc.detail, "level": "danger", "keywords": []},
status_code=exc.status_code,
@ -40,8 +37,8 @@ def http_handler(request: Request, exc: BaseException) -> Response:
def app_handler(request: Request, exc: BaseException) -> Response:
"""Handle application errors."""
log.critical(
"{method} {path} {detail}", method=request.method, path=request.url.path, detail=exc.message
log.bind(method=request.method, path=request.url.path, detail=exc.message).critical(
"hyperglass Error"
)
return Response(
{"output": exc.message, "level": exc.level, "keywords": exc.keywords},
@ -49,14 +46,11 @@ def app_handler(request: Request, exc: BaseException) -> Response:
)
def validation_handler(request: Request, exc: BaseException) -> Response:
def validation_handler(request: Request, exc: ValidationException) -> Response:
"""Handle Pydantic validation errors raised by FastAPI."""
error = exc.errors()[0]
log.critical(
"{method} {path} {detail}",
method=request.method,
path=request.url.path,
detail=error["msg"],
log.bind(method=request.method, path=request.url.path, detail=error["msg"]).critical(
"Validation Error"
)
return Response(
{"output": error["msg"], "level": "error", "keywords": error["loc"]},

View file

@ -73,7 +73,9 @@ async def query(_state: HyperglassState, request: Request, data: Query) -> Query
# each command output value is unique.
cache_key = f"hyperglass.query.{data.digest()}"
log.info("{!r} starting query execution", data)
_log = log.bind(query=data.summary())
_log.info("Starting query execution")
cache_response = cache.get_map(cache_key, "output")
json_output = False
@ -81,7 +83,7 @@ async def query(_state: HyperglassState, request: Request, data: Query) -> Query
runtime = 65535
if cache_response:
log.debug("{!r} cache hit (cache key {!r})", data, cache_key)
_log.bind(cache_key=cache_key).debug("Cache hit")
# If a cached response exists, reset the expiration time.
cache.expire(cache_key, expire_in=_state.params.cache.timeout)
@ -91,7 +93,7 @@ async def query(_state: HyperglassState, request: Request, data: Query) -> Query
timestamp = cache.get_map(cache_key, "timestamp")
elif not cache_response:
log.debug("{!r} cache miss (cache key {!r})", data, cache_key)
_log.bind(cache_key=cache_key).debug("Cache miss")
timestamp = data.timestamp
@ -109,7 +111,7 @@ async def query(_state: HyperglassState, request: Request, data: Query) -> Query
endtime = time.time()
elapsedtime = round(endtime - starttime, 4)
log.debug("{!r} runtime: {!s} seconds", data, elapsedtime)
_log.debug("Runtime: {!s} seconds", elapsedtime)
if output is None:
raise HyperglassError(message=_state.params.messages.general, alert="danger")
@ -125,7 +127,7 @@ async def query(_state: HyperglassState, request: Request, data: Query) -> Query
cache.set_map_item(cache_key, "timestamp", timestamp)
cache.expire(cache_key, expire_in=_state.params.cache.timeout)
log.debug("{!r} cached for {!s} seconds", data, _state.params.cache.timeout)
_log.bind(cache_timeout=_state.params.cache.timeout).debug("Response cached")
runtime = int(round(elapsedtime, 0))
@ -137,8 +139,7 @@ async def query(_state: HyperglassState, request: Request, data: Query) -> Query
if json_output:
response_format = "application/json"
log.success("{!r} execution completed", data)
_log.info("Execution completed")
response = {
"output": cache_response,

View file

@ -65,4 +65,6 @@ async def send_webhook(
}
)
except Exception as err:
log.error("Error sending webhook to {}: {!s}", params.logging.http.provider, err)
log.bind(destination=params.logging.http.provider, error=str(err)).error(
"Failed to send webhook"
)

View file

@ -217,19 +217,11 @@ class _ForwardHandler(socketserver.BaseRequestHandler):
data = self.request.recv(1024)
if not data:
break
self.logger.trace(
">>> OUT {0} send to {1}: {2} >>>".format(
self.info, self.remote_address, hexlify(data)
),
)
chan.sendall(data)
if chan in rqst: # else
if not chan.recv_ready():
break
data = chan.recv(1024)
self.logger.trace(
"<<< IN {0} recv: {1} <<<".format(self.info, hexlify(data)),
)
self.request.sendall(data)
def handle(self):
@ -249,10 +241,8 @@ class _ForwardHandler(socketserver.BaseRequestHandler):
chan = None
if chan is None:
msg = "{0} to {1} was rejected by the SSH server".format(self.info, self.remote_address)
self.logger.trace(msg)
raise HandlerSSHTunnelForwarderError(msg)
self.logger.trace("{0} connected".format(self.info))
try:
self._redirect(chan)
except socket.error:
@ -260,13 +250,12 @@ class _ForwardHandler(socketserver.BaseRequestHandler):
# exception. It was seen that a 3way FIN is processed later on, so
# no need to make an ordered close of the connection here or raise
# the exception beyond this point...
self.logger.trace("{0} sending RST".format(self.info))
except Exception as e:
self.logger.trace("{0} error: {1}".format(self.info, repr(e)))
pass
except Exception:
pass
finally:
chan.close()
self.request.close()
self.logger.trace("{0} connection closed.".format(self.info))
class _ForwardServer(socketserver.TCPServer): # Not Threading
@ -283,10 +272,7 @@ class _ForwardServer(socketserver.TCPServer): # Not Threading
def handle_error(self, request, client_address):
(exc_class, exc, tb) = sys.exc_info()
self.logger.error(
"Could not establish connection from {0} to remote " "side of the tunnel",
request.getsockname(),
)
self.logger.bind(source=request.getsockname()).error("Could not establish connection to remote side of the tunnel")
self.tunnel_ok.put(False)
@property
@ -814,16 +800,13 @@ class SSHTunnelForwarder:
check_host(self.ssh_host)
check_port(self.ssh_port)
self.logger.info(
"Connecting to gateway: {h}:{p} as user '{u}', timeout {t}",
h=self.ssh_host,
p=self.ssh_port,
u=self.ssh_username,
t=self.gateway_timeout,
)
self.logger.debug("Concurrent connections allowed: {0}", self._threaded)
self.logger.bind(
host=self.ssh_host,
port=self.ssh_port,
username=self.ssh_username,
timeout=self.gateway_timeout,
).info("Connecting to gateway")
self.logger.bind(count=self._threaded).debug("Concurrent connections allowed")
@staticmethod
def _read_ssh_config(
@ -1154,8 +1137,7 @@ class SSHTunnelForwarder:
if self.skip_tunnel_checkup:
self.tunnel_is_up[_srv.local_address] = True
return
self.logger.info("Checking tunnel to: {a}", a=_srv.remote_address)
self.logger.debug("Checking tunnel", address=_srv.remote_address)
if isinstance(_srv.local_address, str): # UNIX stream
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
@ -1171,13 +1153,13 @@ class SSHTunnelForwarder:
)
s.connect(connect_to)
self.tunnel_is_up[_srv.local_address] = _srv.tunnel_ok.get(timeout=TUNNEL_TIMEOUT * 1.1)
self.logger.debug("Tunnel to {0} is DOWN".format(_srv.remote_address))
self.logger.bind(status="DOWN", address=_srv.remote_address).debug("Tunnel Status")
except socket.error:
self.logger.debug("Tunnel to {0} is DOWN".format(_srv.remote_address))
self.logger.bind(status="DOWN", address=_srv.remote_address).debug("Tunnel Status")
self.tunnel_is_up[_srv.local_address] = False
except queue.Empty:
self.logger.debug("Tunnel to {0} is UP".format(_srv.remote_address))
self.logger.bind(status="UP", address=_srv.remote_address).debug("Tunnel Status")
self.tunnel_is_up[_srv.local_address] = True
finally:
s.close()

View file

@ -63,7 +63,7 @@ def load_dsl(path: Path, *, empty_allowed: bool) -> LoadedConfig:
raise ConfigError(
"'{!s}' exists, but it is empty and is required to start hyperglass.".format(path),
)
log.debug("Loaded configuration from {!s}", path)
log.bind(path=path).debug("Loaded configuration")
return data or {}
@ -101,7 +101,7 @@ def load_python(path: Path, *, empty_allowed: bool) -> LoadedConfig:
if data is None and empty_allowed is False:
raise ConfigError(f"'{path!s} exists', but variable or function 'main' is an invalid type")
log.debug("Loaded configuration from {!s}", path)
log.bind(path=path).debug("Loaded configuration")
return data or {}

View file

@ -4,7 +4,7 @@
from pydantic import ValidationError
# Project
from hyperglass.log import log, enable_file_logging, enable_syslog_logging
from hyperglass.log import log
from hyperglass.settings import Settings
from hyperglass.models.ui import UIParameters
from hyperglass.models.directive import Directive, Directives
@ -31,7 +31,7 @@ def init_files() -> None:
path = Settings.app_path / directory
if not path.exists():
path.mkdir(parents=True)
log.debug("Created directory {!s}", path)
log.debug("Created directory", path=path)
def init_params() -> "Params":
@ -40,20 +40,20 @@ def init_params() -> "Params":
# Map imported user configuration to expected schema.
params = Params(**user_config)
# Set up file logging once configuration parameters are initialized.
enable_file_logging(
log_directory=params.logging.directory,
log_format=params.logging.format,
log_max_size=params.logging.max_size,
debug=Settings.debug,
)
# # Set up file logging once configuration parameters are initialized.
# enable_file_logging(
# log_directory=params.logging.directory,
# log_format=params.logging.format,
# log_max_size=params.logging.max_size,
# debug=Settings.debug,
# )
# Set up syslog logging if enabled.
if params.logging.syslog is not None and params.logging.syslog.enable:
enable_syslog_logging(
syslog_host=params.logging.syslog.host,
syslog_port=params.logging.syslog.port,
)
# if params.logging.syslog is not None and params.logging.syslog.enable:
# enable_syslog_logging(
# syslog_host=params.logging.syslog.host,
# syslog_port=params.logging.syslog.port,
# )
if params.logging.http is not None and params.logging.http.enable:
log.debug("HTTP logging is enabled")
@ -103,7 +103,7 @@ def init_devices() -> "Devices":
raise ConfigError("No devices are defined in devices file")
devices = Devices(*items)
log.debug("Initialized devices {!r}", devices)
log.debug("Initialized devices", devices=devices)
return devices

View file

@ -107,6 +107,7 @@ class PublicHyperglassError(HyperglassError):
_level = "warning"
_message_template = "Something went wrong."
_original_template_name: str = ""
def __init_subclass__(
cls, *, template: Optional[str] = None, level: Optional[ErrorLevel] = None
@ -115,6 +116,7 @@ class PublicHyperglassError(HyperglassError):
if template is not None:
cls._message_template = template
cls._original_template_name = template
if level is not None:
cls._level = level
@ -128,19 +130,17 @@ class PublicHyperglassError(HyperglassError):
error = self._safe_format(str(error), **kwargs)
kwargs["error"] = error
template = self._message_template
(messages := use_state("params").messages)
if messages.has(self._message_template):
self._message_template = messages[self._message_template]
self._message = self._safe_format(self._message_template, **kwargs)
if messages.has(self._original_template_name):
template = messages[self._original_template_name]
if "error" in kwargs and "({error})" not in template:
template += " ({error})"
self._message = self._safe_format(template, **kwargs)
self._keywords = list(kwargs.values())
super().__init__(message=self._message, level=self._level, keywords=self._keywords)
def handle_error(self, error: Any) -> None:
"""Add details to the error template, if provided."""
if error is not None:
self._message_template = self._message_template + " ({error})"
class PrivateHyperglassError(HyperglassError):
"""Base exception class for internal system errors.
@ -156,7 +156,7 @@ class PrivateHyperglassError(HyperglassError):
k: ", ".join(str(loc) for t in errors for loc in t["loc"] if t["type"] == k)
for k in {e["type"] for e in errors}
}
return ", ".join([f"{k} ({v})" for k, v in parsed.items()])
return ", ".join(parsed.values())
def __init_subclass__(cls, *, level: Optional[ErrorLevel] = None) -> None:
"""Override error attributes from subclass."""

View file

@ -59,11 +59,9 @@ class InvalidQuery(PublicHyperglassError, template="request_timeout"):
kwargs = {
"query_type": query.query_type,
"target": query.query_target,
"error": str(error),
**kwargs,
}
if error is not None:
self.handle_error(error)
kwargs["error"] = str(error)
super().__init__(**kwargs)
@ -109,10 +107,7 @@ class InputInvalid(PublicHyperglassError, template="invalid_input"):
) -> None:
"""Initialize parent error."""
kwargs = {"target": target, **kwargs}
if error is not None:
self.handle_error(error)
kwargs["error"] = str(error)
kwargs = {"target": target, "error": str(error), **kwargs}
super().__init__(**kwargs)
@ -128,11 +123,9 @@ class InputNotAllowed(PublicHyperglassError, template="target_not_allowed"):
kwargs = {
"query_type": query.query_type,
"target": query.query_target,
"error": str(error),
**kwargs,
}
if error is not None:
self.handle_error(error)
kwargs["error"] = str(error)
super().__init__(**kwargs)
@ -148,10 +141,8 @@ class ResponseEmpty(PublicHyperglassError, template="no_output"):
kwargs = {
"query_type": query.query_type,
"target": query.query_target,
"error": str(error),
**kwargs,
}
if error is not None:
self.handle_error(error)
kwargs["error"] = str(error)
super().__init__(**kwargs)

View file

@ -19,6 +19,9 @@ from hyperglass.exceptions.public import InputInvalid
from hyperglass.exceptions.private import ConfigError
if t.TYPE_CHECKING:
# Third Party
from loguru import Logger
# Project
from hyperglass.models.api.query import Query, QueryTarget
from hyperglass.models.directive import Directive
@ -35,14 +38,12 @@ class Construct:
query: "Query"
transport: str
target: str
_log: "Logger"
def __init__(self, device: "Device", query: "Query"):
"""Initialize command construction."""
log.debug(
"Constructing '{}' query for '{}'",
query.query_type,
str(query.query_target),
)
self._log = log.bind(type=query.query_type, target=query.query_target)
self._log.debug("Constructing query")
self.query = query
self.device = device
self.target = self.query.query_target
@ -75,7 +76,7 @@ class Construct:
def json(self, afi):
"""Return JSON version of validated query for REST devices."""
log.debug("Building JSON query for {q}", q=repr(self.query))
self._log.debug("Building JSON query")
return _json.dumps(
{
"query_type": self.query.query_type,
@ -125,8 +126,7 @@ class Construct:
for rule in [r for r in self.directive.rules if r._passed is True]:
for command in rule.commands:
query.append(self.format(command))
log.debug("Constructed query: {}", query)
self._log.bind(constructed_query=query).debug("Constructed query")
return query
@ -189,7 +189,7 @@ class Formatter:
if was_modified:
modified = " ".join(asns)
log.debug("Modified target '{}' to '{}'", target, modified)
log.bind(original=target, modified=modified).debug("Modified target")
return modified
return query
@ -217,7 +217,7 @@ class Formatter:
result = " ".join(asns)
if was_modified:
log.debug("Modified target '{}' to '{}'", target, result)
log.bind(original=target, modified=result).debug("Modified target")
return result

View file

@ -51,8 +51,8 @@ class SSHConnection(Connection):
return open_tunnel(proxy._target, proxy.port, **tunnel_kwargs)
except BaseSSHTunnelForwarderError as scrape_proxy_error:
log.error(
f"Error connecting to device {self.device.name} via " f"proxy {proxy.name}"
log.bind(device=self.device.name, proxy=proxy.name).error(
"Failed to connect to device via proxy"
)
raise ScrapeError(
error=scrape_proxy_error, device=self.device

View file

@ -47,15 +47,13 @@ class NetmikoConnection(SSHConnection):
command output.
"""
params = use_state("params")
if host is not None:
log.debug(
"Connecting to {} via proxy {} [{}]",
self.device.name,
self.device.proxy.address,
f"{host}:{port}",
)
else:
log.debug("Connecting directly to {}", self.device.name)
_log = log.bind(
device=self.device.name,
address=f"{host}:{port}",
proxy=str(self.device.proxy.address) if self.device.proxy is not None else None,
)
_log.debug("Connecting to device")
global_args = netmiko_device_globals.get(self.device.platform, {})

View file

@ -47,9 +47,8 @@ async def execute(query: "Query") -> Union["OutputDataModel", str]:
"""Initiate query validation and execution."""
params = use_state("params")
output = params.messages.general
log.debug("Received query {!r}", query)
log.debug("Matched device config: {!r}", query.device)
_log = log.bind(query=query, device=query.device)
_log.debug("")
mapped_driver = map_driver(query.device.driver)
driver: "Connection" = mapped_driver(query.device, query)
@ -68,8 +67,7 @@ async def execute(query: "Query") -> Union["OutputDataModel", str]:
response = await driver.collect()
output = await driver.response(response)
log.debug("Response for {!r}\n{response}", query, response=response)
_log.bind(response=response).debug("Query response")
if is_series(output):
if len(output) == 0:

View file

@ -85,7 +85,7 @@ class BaseExternal:
available = await self._atest()
if available:
log.debug("Initialized session with {}", self.base_url)
log.bind(url=self.base_url).debug("Initialized session")
return self
raise self._exception(f"Unable to create session to {self.name}")
@ -96,7 +96,7 @@ class BaseExternal:
traceback: t.Optional["TracebackType"] = None,
) -> True:
"""Close connection on exit."""
log.debug("Closing session with {}", self.base_url)
log.bind(url=self.base_url).debug("Closing session")
if exc_type is not None:
log.error(str(exc_value))
@ -111,7 +111,7 @@ class BaseExternal:
available = self._test()
if available:
log.debug("Initialized session with {}", self.base_url)
log.bind(url=self.base_url).debug("Initialized session")
return self
raise self._exception(f"Unable to create session to {self.name}")
@ -162,7 +162,7 @@ class BaseExternal:
def _test(self: "BaseExternal") -> bool:
"""Open a low-level connection to the base URL to ensure its port is open."""
log.debug("Testing connection to {}", self.base_url)
log.bind(url=self.base_url).debug("Testing connection")
try:
# Parse out just the hostname from a URL string.
@ -199,9 +199,16 @@ class BaseExternal:
supported_methods = ("GET", "POST", "PUT", "DELETE", "HEAD", "PATCH")
(method, endpoint, item, headers, params, data, timeout, response_required,) = itemgetter(
*kwargs.keys()
)(kwargs)
(
method,
endpoint,
item,
headers,
params,
data,
timeout,
response_required,
) = itemgetter(*kwargs.keys())(kwargs)
if method.upper() not in supported_methods:
raise self._exception(
@ -245,8 +252,7 @@ class BaseExternal:
except TypeError as err:
raise self._exception(f"Timeout must be an int, got: {str(timeout)}") from err
request["timeout"] = timeout
log.debug("Constructed request parameters {}", request)
log.bind(request=request).debug("Constructed request parameters")
return request
async def _arequest( # noqa: C901

View file

@ -82,7 +82,6 @@ def parse_whois(output: str, targets: t.List[str]) -> TargetDetail:
data = {}
for line in lines(output):
# Unpack each line's parsed values.
asn, ip, prefix, country, rir, allocated, org = line
@ -98,8 +97,7 @@ def parse_whois(output: str, targets: t.List[str]) -> TargetDetail:
"allocated": allocated,
"org": org,
}
log.debug("Parsed bgp.tools data: {}", data)
log.bind(data=data).debug("Parsed bgp.tools data")
return data
@ -151,7 +149,7 @@ async def network_info(*targets: str) -> TargetData:
for target in (target for target in query_targets if target in cached):
# Reassign the cached network info to the matching resource.
query_data[target] = cached[target]
log.debug("Using cached network info for {}", target)
log.bind(target=target).debug("Using cached network info")
# Remove cached items from the resource list so they're not queried.
targets = [t for t in query_targets if t not in cached]
@ -167,10 +165,10 @@ async def network_info(*targets: str) -> TargetData:
# Cache the response
for target in targets:
cache.set_map_item(CACHE_KEY, target, query_data[target])
log.debug("Cached network info for {}", t)
log.bind(target=t).debug("Cached network info")
except Exception as err:
log.error(str(err))
log.error(err)
return {**default_data, **query_data}

View file

@ -27,8 +27,7 @@ class GenericHook(BaseExternal, name="Generic"):
"""Send an incoming webhook to http endpoint."""
payload = Webhook(**query)
log.debug("Sending query data to {}:\n{}", self.config.host.host, payload)
log.bind(host=self.config.host.host, payload=payload).debug("Sending request")
return await self._apost(
endpoint=self.config.host.path,

View file

@ -25,7 +25,6 @@ class MSTeams(BaseExternal, name="MSTeams"):
"""Send an incoming webhook to Microsoft Teams."""
payload = Webhook(**query)
log.debug("Sending query data to Microsoft Teams:\n{}", payload)
log.bind(destination="MS Teams", payload=payload).debug("Sending request")
return await self._apost(endpoint=self.config.host.path, data=payload.msteams())

View file

@ -19,7 +19,8 @@ CACHE_KEY = "hyperglass.external.rpki"
def rpki_state(prefix: t.Union["IPv4Address", "IPv6Address", str], asn: t.Union[int, str]) -> int:
"""Get RPKI state and map to expected integer."""
log.debug("Validating RPKI State for {p} via AS{a}", p=prefix, a=asn)
_log = log.bind(prefix=prefix, asn=asn)
_log.debug("Validating RPKI State")
cache = use_state("cache")
@ -31,23 +32,22 @@ def rpki_state(prefix: t.Union["IPv4Address", "IPv6Address", str], asn: t.Union[
if cached is not None:
state = cached
else:
ql = 'query GetValidation {{ validation(prefix: "{}", asn: {}) {{ state }} }}'
query = ql.format(prefix, asn)
log.debug("Cloudflare RPKI GraphQL Query: {!r}", query)
_log.bind(query=query).debug("Cloudflare RPKI GraphQL Query")
try:
with BaseExternal(base_url="https://rpki.cloudflare.com") as client:
response = client._post("/api/graphql", data={"query": query})
try:
validation_state = response["data"]["validation"]["state"]
except KeyError as missing:
log.error("Response from Cloudflare missing key '{}': {!r}", missing, response)
_log.error("Response from Cloudflare missing key '{}': {!r}", missing, response)
validation_state = 3
state = RPKI_STATE_MAP[validation_state]
cache.set_map_item(CACHE_KEY, ro, state)
except Exception as err:
log.error(str(err))
log.error(err)
# Don't cache the state when an error produced it.
state = 3

View file

@ -25,7 +25,6 @@ class SlackHook(BaseExternal, name="Slack"):
"""Send an incoming webhook to Slack."""
payload = Webhook(**query)
log.debug("Sending query data to Slack:\n{}", payload)
log.bind(destination="Slack", payload=payload).debug("Sending request")
return await self._apost(endpoint=self.config.host.path, data=payload.slack())

View file

@ -12,7 +12,6 @@ from pathlib import Path
# Project
from hyperglass.log import log
from hyperglass.util import copyfiles, check_path, dotenv_to_dict
from hyperglass.state import use_state
if t.TYPE_CHECKING:
# Project
@ -25,11 +24,7 @@ def get_ui_build_timeout() -> t.Optional[int]:
if "HYPERGLASS_UI_BUILD_TIMEOUT" in os.environ:
timeout = int(os.environ["HYPERGLASS_UI_BUILD_TIMEOUT"])
log.info("Found UI build timeout environment variable: {}", timeout)
elif "POETRY_HYPERGLASS_UI_BUILD_TIMEOUT" in os.environ:
timeout = int(os.environ["POETRY_HYPERGLASS_UI_BUILD_TIMEOUT"])
log.info("Found UI build timeout environment variable: {}", timeout)
log.bind(timeout=timeout).debug("Found UI build timeout environment variable")
return timeout
@ -61,7 +56,7 @@ async def read_package_json() -> t.Dict[str, t.Any]:
except Exception as err:
raise RuntimeError(f"Error reading package.json: {str(err)}") from err
log.debug("package.json:\n{p}", p=package_json)
log.bind(package_json=package_json).debug("package.json value")
return package_json
@ -173,7 +168,7 @@ def generate_opengraph(
# Copy the original image to the target path
copied = shutil.copy2(image_path, target_path)
log.debug("Copied {} to {}", str(image_path), str(target_path))
log.bind(source=str(image_path), destination=str(target_path)).debug("Copied OpenGraph image")
with Image.open(copied) as src:
# Only resize the image if it needs to be resized
@ -200,8 +195,7 @@ def generate_opengraph(
if not dst_path.exists():
raise RuntimeError(f"Unable to save resized image to {str(dst_path)}")
log.debug("Opengraph image ready at {}", str(dst_path))
log.bind(path=str(dst_path)).debug("OpenGraph image ready")
return True
@ -327,7 +321,7 @@ async def build_frontend( # noqa: C901
base_url="/images/favicons/",
) as favicons:
await favicons.generate()
log.debug("Generated {} favicons", favicons.completed)
log.bind(count=favicons.completed).debug("Generated favicons")
write_favicon_formats(favicons.formats())
build_data = {
@ -337,7 +331,7 @@ async def build_frontend( # noqa: C901
}
build_json = json.dumps(build_data, default=str)
log.debug("UI Build Data:\n{}", build_json)
log.bind(data=build_json).debug("UI Build Data")
# Create SHA256 hash from all parameters passed to UI, use as
# build identifier.
@ -348,7 +342,7 @@ async def build_frontend( # noqa: C901
if dot_env_file.exists() and not force:
env_data = dotenv_to_dict(dot_env_file)
env_build_id = env_data.get("HYPERGLASS_BUILD_ID", "None")
log.debug("Previous Build ID: {!r}", env_build_id)
log.bind(id=env_build_id).debug("Previous build detected")
if env_build_id == build_id:
log.debug("UI parameters unchanged since last build, skipping UI build...")
@ -357,7 +351,7 @@ async def build_frontend( # noqa: C901
env_config.update({"HYPERGLASS_BUILD_ID": build_id})
dot_env_file.write_text("\n".join(f"{k}={v}" for k, v in env_config.items()))
log.debug("Wrote UI environment file {!r}", str(dot_env_file))
log.bind(path=str(dot_env_file)).debug("Wrote UI environment file")
# Initiate Next.JS export process.
if any((not dev_mode, force, full)):
@ -371,7 +365,7 @@ async def build_frontend( # noqa: C901
log.debug("Re-initialized node_modules")
if build_result:
log.success("Completed UI build")
log.info("Completed UI build")
elif dev_mode and not force:
log.debug("Running in developer mode, did not build new UI files")

View file

@ -11,9 +11,9 @@ from loguru import logger as _loguru_logger
from rich.theme import Theme
from rich.console import Console
from rich.logging import RichHandler
from gunicorn.glogging import Logger as GunicornLogger # type: ignore
# Local
from .util import dict_to_kwargs
from .constants import __version__
if t.TYPE_CHECKING:
@ -21,20 +21,21 @@ if t.TYPE_CHECKING:
from pathlib import Path
# Third Party
from loguru import Logger as LoguruLogger
from loguru import Logger as Record
from pydantic import ByteSize
# Project
from hyperglass.models.fields import LogFormat
_FMT = (
"<lvl><b>[{level}]</b> {time:YYYYMMDD} {time:HH:mm:ss} <lw>|</lw> {name}<lw>:</lw>"
"<b>{line}</b> <lw>|</lw> {function}</lvl> <lvl><b>→</b></lvl> {message}"
_FMT_DEBUG = (
"<lvl><b>[{level}]</b> {time:YYYYMMDD} {time:HH:mm:ss} <lw>|</lw>"
"<b>{line}</b> <lw>|</lw> {function}</lvl> <lvl><b>→</b></lvl> {message} {extra}"
)
_FMT_FILE = "[{time:YYYYMMDD} {time:HH:mm:ss}] {message}"
_DATE_FMT = "%Y%m%d %H:%M:%S"
_FMT_BASIC = "{message}"
_FMT = "<lvl><b>[{level}]</b> {time:YYYYMMDD} {time:HH:mm:ss} <lw>|</lw></lvl> {message} {extra}"
_FMT_FILE = "[{time:YYYYMMDD} {time:HH:mm:ss}] {message} {extra}"
_FMT_BASIC = "{message} {extra}"
_LOG_LEVELS = [
{"name": "TRACE", "color": "<m>"},
{"name": "DEBUG", "color": "<c>"},
@ -66,6 +67,32 @@ HyperglassConsole = Console(
log = _loguru_logger
def formatter(record: "Record") -> str:
"""Format log messages with extra data as kwargs string."""
msg = record.get("message", "")
extra = record.get("extra", {})
extra_str = dict_to_kwargs(extra)
return " ".join((msg, extra_str))
def filter_uvicorn_values(record: "Record") -> bool:
"""Drop noisy uvicorn messages."""
drop = (
"Application startup",
"Application shutdown",
"Finished server process",
"Shutting down",
"Waiting for application",
"Started server process",
"Started parent process",
"Stopping parent process",
)
for match in drop:
if match in record["message"]:
return False
return True
class LibInterceptHandler(logging.Handler):
"""Custom log handler for integrating third party library logging with hyperglass's logger."""
@ -89,104 +116,51 @@ class LibInterceptHandler(logging.Handler):
_loguru_logger.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage())
class CustomGunicornLogger(GunicornLogger):
"""Custom logger to direct Gunicorn/Uvicorn logs to Loguru.
See: https://pawamoy.github.io/posts/unify-logging-for-a-gunicorn-uvicorn-app/
"""
def setup(self, cfg: t.Any) -> None:
"""Override Gunicorn setup."""
handler = logging.NullHandler()
self.error_logger = logging.getLogger("gunicorn.error")
self.error_logger.addHandler(handler)
self.access_logger = logging.getLogger("gunicorn.access")
self.access_logger.addHandler(handler)
self.error_logger.setLevel(cfg.loglevel)
self.access_logger.setLevel(cfg.loglevel)
def setup_lib_logging(log_level: str) -> None:
"""Override the logging handlers for dependency libraries.
See: https://pawamoy.github.io/posts/unify-logging-for-a-gunicorn-uvicorn-app/
"""
intercept_handler = LibInterceptHandler()
names = {
name.split(".")[0]
for name in (
*logging.root.manager.loggerDict.keys(),
"gunicorn",
"gunicorn.access",
"gunicorn.error",
"uvicorn",
"uvicorn.access",
"uvicorn.error",
"uvicorn.asgi",
"netmiko",
"paramiko",
"httpx",
)
}
for name in names:
logging.getLogger(name).handlers = [intercept_handler]
def _log_patcher(record):
"""Patch for exception handling in logger.
See: https://github.com/Delgan/loguru/issues/504
"""
exception = record["exception"]
if exception is not None:
fixed = Exception(str(exception.value))
record["exception"] = exception._replace(value=fixed)
def init_logger(level: str = "INFO"):
def init_logger(level: t.Union[int, str] = logging.INFO):
"""Initialize hyperglass logging instance."""
# Reset built-in Loguru configurations.
_loguru_logger.remove()
if sys.stdout.isatty():
if not sys.stdout.isatty():
# Use Rich for logging if hyperglass started from a TTY.
_loguru_logger.add(
sink=RichHandler(
console=HyperglassConsole,
rich_tracebacks=True,
level=level,
tracebacks_show_locals=level == "DEBUG",
tracebacks_show_locals=level == logging.DEBUG,
log_time_format="[%Y%m%d %H:%M:%S]",
),
format=_FMT_BASIC,
format=formatter,
level=level,
filter=filter_uvicorn_values,
enqueue=True,
)
else:
# Otherwise, use regular format.
_loguru_logger.add(sys.stdout, format=_FMT, level=level, enqueue=True)
_loguru_logger.add(
sink=sys.stdout,
enqueue=True,
format=_FMT if level == logging.INFO else _FMT_DEBUG,
level=level,
filter=filter_uvicorn_values,
)
_loguru_logger.configure(levels=_LOG_LEVELS, patcher=_log_patcher)
_loguru_logger.configure(levels=_LOG_LEVELS)
return _loguru_logger
def _log_success(self: "LoguruLogger", message: str, *a: t.Any, **kw: t.Any) -> None:
"""Add custom builtin logging handler for the success level."""
if self.isEnabledFor(25):
self._log(25, message, a, **kw)
def enable_file_logging(
log_directory: "Path", log_format: "LogFormat", log_max_size: "ByteSize", debug: bool
*,
directory: "Path",
log_format: "LogFormat",
max_size: "ByteSize",
level: t.Union[str, int],
) -> None:
"""Set up file-based logging from configuration parameters."""
log_level = "DEBUG" if debug else "INFO"
if log_format == "json":
log_file_name = "hyperglass.log.json"
structured = True
@ -194,7 +168,7 @@ def enable_file_logging(
log_file_name = "hyperglass.log"
structured = False
log_file = log_directory / log_file_name
log_file = directory / log_file_name
if log_format == "text":
now_str = datetime.utcnow().strftime("%B %d, %Y beginning at %H:%M:%S UTC")
@ -203,7 +177,7 @@ def enable_file_logging(
for line in (
f"hyperglass {__version__}",
f"Logs for {now_str}",
f"Log Level: {log_level}",
f"Log Level: {'INFO' if level == logging.INFO else 'DEBUG'}",
)
)
header = "\n" + "\n".join(header_lines) + "\n"
@ -216,31 +190,22 @@ def enable_file_logging(
sink=log_file,
format=_FMT_FILE,
serialize=structured,
level=log_level,
level=level,
encoding="utf8",
rotation=log_max_size.human_readable(),
rotation=max_size.human_readable(),
)
log.debug("Logging to file {!s}", log_file)
_loguru_logger.bind(path=log_file).debug("Logging to file")
def enable_syslog_logging(syslog_host: str, syslog_port: int) -> None:
def enable_syslog_logging(*, host: str, port: int) -> None:
"""Set up syslog logging from configuration parameters."""
# Standard Library
from logging.handlers import SysLogHandler
_loguru_logger.add(
SysLogHandler(address=(str(syslog_host), syslog_port)),
SysLogHandler(address=(str(host), port)),
format=_FMT_BASIC,
enqueue=True,
)
log.debug(
"Logging to syslog target {}:{} enabled",
str(syslog_host),
str(syslog_port),
)
# Side Effects
logging.addLevelName(25, "SUCCESS")
logging.Logger.success = _log_success
_loguru_logger.bind(host=host, port=port).debug("Logging to syslog target")

View file

@ -2,17 +2,14 @@
# Standard Library
import sys
import shutil
import typing as t
import asyncio
import platform
import logging
# Third Party
from gunicorn.arbiter import Arbiter # type: ignore
from gunicorn.app.base import BaseApplication # type: ignore
import uvicorn
# Local
from .log import log, init_logger, setup_lib_logging
from .log import LibInterceptHandler, init_logger, enable_file_logging, enable_syslog_logging
from .util import get_node_version
from .constants import MIN_NODE_VERSION, MIN_PYTHON_VERSION, __version__
@ -34,10 +31,9 @@ from .util import cpu_count
from .state import use_state
from .settings import Settings
log_level = "INFO" if Settings.debug is False else "DEBUG"
setup_lib_logging(log_level)
init_logger(log_level)
LOG_LEVEL = logging.INFO if Settings.debug is False else logging.DEBUG
logging.basicConfig(handlers=[LibInterceptHandler()], level=0, force=True)
log = init_logger(LOG_LEVEL)
async def build_ui() -> bool:
@ -78,10 +74,7 @@ def register_all_plugins() -> None:
failures += register_plugin(plugin_file, common=True)
for failure in failures:
log.warning(
"Plugin {!r} is not a valid hyperglass plugin and was not registered",
failure,
)
log.bind(plugin=failure).warning("Invalid hyperglass plugin")
def unregister_all_plugins() -> None:
@ -93,124 +86,93 @@ def unregister_all_plugins() -> None:
manager().reset()
def on_starting(server: "Arbiter") -> None:
"""Gunicorn pre-start tasks."""
def start(*, log_level: t.Union[str, int], workers: int) -> None:
"""Start hyperglass via ASGI server."""
python_version = platform.python_version()
required = ".".join((str(v) for v in MIN_PYTHON_VERSION))
log.debug("Python {} detected ({} required)", python_version, required)
register_all_plugins()
if not Settings.disable_ui:
asyncio.run(build_ui())
def when_ready(server: "Arbiter") -> None:
"""Gunicorn post-start hook."""
log.success(
"Started hyperglass {} on http://{} with {!s} workers",
__version__,
Settings.bind(),
server.app.cfg.settings["workers"].value,
uvicorn.run(
app="hyperglass.api:app",
host=str(Settings.host),
port=Settings.port,
workers=workers,
log_level=log_level,
log_config={
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"default": {
"()": "uvicorn.logging.DefaultFormatter",
"format": "%(message)s",
},
"access": {
"()": "uvicorn.logging.AccessFormatter",
"format": "%(message)s",
},
},
"handlers": {
"default": {"formatter": "default", "class": "hyperglass.log.LibInterceptHandler"},
"access": {"formatter": "access", "class": "hyperglass.log.LibInterceptHandler"},
},
"loggers": {
"uvicorn.error": {"level": "ERROR", "handlers": ["default"], "propagate": False},
"uvicorn.access": {"level": "INFO", "handlers": ["access"], "propagate": False},
},
},
)
def on_exit(_: t.Any) -> None:
"""Gunicorn shutdown tasks."""
state = use_state()
if not Settings.dev_mode:
state.clear()
log.info("Cleared hyperglass state")
unregister_all_plugins()
log.critical("Stopping hyperglass {}", __version__)
class HyperglassWSGI(BaseApplication):
"""Custom gunicorn app."""
def __init__(self: "HyperglassWSGI", app: str, options: t.Dict[str, t.Any]):
"""Initialize custom WSGI."""
self.application = app
self.options = options or {}
super().__init__()
def load_config(self: "HyperglassWSGI"):
"""Load gunicorn config."""
config = {
key: value
for key, value in self.options.items()
if key in self.cfg.settings and value is not None
}
for key, value in config.items():
self.cfg.set(key.lower(), value)
def load(self: "HyperglassWSGI"):
"""Load gunicorn app."""
return self.application
def start(*, log_level: str, workers: int, **kwargs) -> None:
"""Start hyperglass via gunicorn."""
# Local
from .log import CustomGunicornLogger
HyperglassWSGI(
app="hyperglass.api:app",
options={
"preload": True,
"errorlog": "-",
"accesslog": "-",
"workers": workers,
"on_exit": on_exit,
"loglevel": log_level,
"bind": Settings.bind(),
"on_starting": on_starting,
"when_ready": when_ready,
"command": shutil.which("gunicorn"),
"logger_class": CustomGunicornLogger,
"worker_class": "uvicorn.workers.UvicornWorker",
"logconfig_dict": {"formatters": {"generic": {"format": "%(message)s"}}},
**kwargs,
},
).run()
def run(_workers: int = None):
def run(workers: int = None):
"""Run hyperglass."""
# Local
from .configuration import init_user_config
try:
log.debug("System settings: {!r}", Settings)
log.debug(repr(Settings))
state = use_state()
state.clear()
init_user_config()
workers = 1 if Settings.debug else cpu_count(2)
enable_file_logging(
directory=state.params.logging.directory,
max_size=state.params.logging.max_size,
log_format=state.params.logging.format,
level=LOG_LEVEL,
)
start(log_level=log_level, workers=workers)
if state.params.logging.syslog is not None:
enable_syslog_logging(
host=state.params.logging.syslog.host,
port=state.params.logging.syslog.port,
)
_workers = workers
if workers is None:
if Settings.debug:
_workers = 1
else:
_workers = cpu_count(2)
log.bind(
version=__version__,
listening=f"http://{Settings.bind()}",
workers=_workers,
).info(
"Starting hyperglass",
)
start(log_level=LOG_LEVEL, workers=_workers)
log.bind(version=__version__).critical("Stopping hyperglass")
except Exception as error:
log.critical(error)
# Handle app exceptions.
if not Settings.dev_mode:
state = use_state()
state.clear()
log.info("Cleared Redis cache")
log.debug("Cleared hyperglass state")
unregister_all_plugins()
raise error
except SystemExit:
# Handle Gunicorn exit.
sys.exit(4)
except BaseException:
except (SystemExit, BaseException):
sys.exit(4)

View file

@ -25,6 +25,22 @@ QueryTarget = constr(strip_whitespace=True, min_length=1)
QueryType = constr(strip_whitespace=True, strict=True, min_length=1)
def deserialize(kw: t.Dict[str, t.Any]) -> "Query":
return Query(**kw)
class SimpleQuery(BaseModel):
"""A simple representation of a post-validated query."""
query_location: str
query_target: t.Union[t.List[str], str]
query_type: str
def __repr_name__(self) -> str:
"""Alias SimpleQuery to Query for clarity in logging."""
return "Query"
class Query(BaseModel):
"""Validation model for input query parameters."""
@ -33,10 +49,12 @@ class Query(BaseModel):
query_location: QueryLocation # Device `name` field
query_target: t.Union[t.List[QueryTarget], QueryTarget]
query_type: QueryType # Directive `id` field
_kwargs: t.Dict[str, t.Any]
def __init__(self, **data) -> None:
"""Initialize the query with a UTC timestamp at initialization time."""
super().__init__(**data)
self._kwargs = data
self.timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
state = use_state()
@ -58,6 +76,14 @@ class Query(BaseModel):
except InputValidationError as err:
raise InputInvalid(**err.kwargs) from err
def summary(self) -> SimpleQuery:
"""Summarized and post-validated model of a Query."""
return SimpleQuery(
query_location=self.query_location,
query_target=self.query_target,
query_type=self.query_type,
)
def __repr__(self) -> str:
"""Represent only the query fields."""
return repr_from_attrs(self, ("query_location", "query_type", "query_target"))
@ -82,7 +108,7 @@ class Query(BaseModel):
self.directive.validate_target(self.query_target)
# Run plugin-based validations.
self._input_plugin_manager.validate(query=self)
log.debug("Validation passed for query {!r}", self)
log.bind(query=self.summary()).debug("Validation passed")
def transform_query_target(self) -> QueryTarget:
"""Transform a query target based on defined plugins."""

View file

@ -95,15 +95,8 @@ class Device(HyperglassModelWithId, extra="allow"):
if name is None:
raise ValueError("name is required.")
legacy_display_name = values.pop("display_name", None)
if legacy_display_name is not None:
log.warning("The 'display_name' field is deprecated. Use the 'name' field instead.")
device_id = generate_id(legacy_display_name)
display_name = legacy_display_name
else:
device_id = generate_id(name)
display_name = name
device_id = generate_id(name)
display_name = name
return {"id": device_id, "name": display_name, "display_name": None, **values}
@ -204,7 +197,11 @@ class Device(HyperglassModelWithId, extra="allow"):
target = Settings.static_path / "images" / value.name
copied = shutil.copy2(value, target)
log.debug("Copied {} avatar from {!r} to {!r}", values["name"], str(value), str(target))
log.bind(
device=values["name"],
source=str(value),
destination=str(target),
).debug("Copied device avatar")
with Image.open(copied) as src:
if src.width > 512:

View file

@ -10,6 +10,12 @@ from ..main import HyperglassModel
class Messages(HyperglassModel):
"""Validation model for params.messages."""
model_config = ConfigDict(
title="Messages",
description="Customize almost all user-facing UI & API messages.",
json_schema_extra={"level": 2},
)
no_input: str = Field(
"{field} must be specified.",
title="No Input",
@ -85,9 +91,3 @@ class Messages(HyperglassModel):
if not self.has(attr):
raise KeyError(f"'{attr}' does not exist on Messages model")
return getattr(self, attr)
model_config = ConfigDict(
title="Messages",
description="Customize almost all user-facing UI & API messages.",
json_schema_extra={"level": 2},
)

View file

@ -107,19 +107,22 @@ class RuleWithIP(Rule):
def membership(self, target: IPvAnyNetwork, network: IPvAnyNetwork) -> bool:
"""Check if IP address belongs to network."""
log.debug("Checking membership of {} for {}", str(target), str(network))
_log = log.bind(target=str(target), network=str(network))
_log.debug("Checking target membership")
if (
network.network_address <= target.network_address
and network.broadcast_address >= target.broadcast_address
):
log.debug("{} is a member of {}", target, network)
_log.debug("Target membership verified")
return True
return False
def in_range(self, target: IPvAnyNetwork) -> bool:
"""Verify if target prefix length is within ge/le threshold."""
if target.prefixlen <= self.le and target.prefixlen >= self.ge:
log.debug("{} is in range {}-{}", target, self.ge, self.le)
log.bind(target=str(target), range=f"{self.ge!s}-{self.le!s}").debug(
"Target is in range"
)
return True
return False
@ -130,7 +133,7 @@ class RuleWithIP(Rule):
if isinstance(target, t.List):
if len(target) > 1:
self._passed = False
raise InputValidationError("Target must be a single value")
raise InputValidationError(error="Target must be a single value", target=target)
target = target[0]
try:
@ -141,7 +144,9 @@ class RuleWithIP(Rule):
raise InputValidationError(error=str(err), target=target) from err
if valid_target.version != self.condition.version:
log.debug("{!s} is not the same IP version as {!s}", target, self.condition)
log.bind(target=str(target), condition=str(self.condition)).debug(
"Mismatching IP version"
)
return False
is_member = self.membership(valid_target, self.condition)
@ -223,7 +228,7 @@ class RuleWithPattern(Rule):
return True
if isinstance(target, t.List) and not multiple:
raise InputValidationError("Target must be a single value")
raise InputValidationError(error="Target must be a single value", target=target)
result = validate_single_value(target)

View file

@ -338,8 +338,9 @@ class MultiModel(RootModel[MultiModelT]):
self._count = len(self.root)
for item in new:
log.debug(
"Added {} '{!s}' to {}",
item.__class__.__name__,
getattr(item, self.unique_by),
self.__class__.__name__,
"Added {} '{!s}' to {}".format(
item.__class__.__name__,
getattr(item, self.unique_by),
self.__class__.__name__,
)
)

View file

@ -163,5 +163,5 @@ class AristaBGPTable(_AristaBase):
winning_weight=WINNING_WEIGHT,
)
log.debug("Serialized Arista response: {!r}", serialized)
log.bind(platform="arista_eos", response=repr(serialized)).debug("Serialized response")
return serialized

View file

@ -115,5 +115,5 @@ class FRRRoute(_FRRBase):
winning_weight="high",
)
log.info("Serialized FRR response: {}", serialized)
log.bind(platform="frr", response=repr(serialized)).debug("Serialized response")
return serialized

View file

@ -195,6 +195,5 @@ class JuniperBGPTable(JuniperBase):
)
serialized = BGPRouteTable(vrf=vrf, count=count, routes=routes, winning_weight="low")
log.debug("Serialized Juniper response: {}", repr(serialized))
log.bind(platform="juniper", response=repr(serialized)).debug("Serialized response")
return serialized

View file

@ -51,13 +51,8 @@ def check_legacy_fields(*, model: str, data: t.Dict[str, t.Any]) -> t.Dict[str,
new_value = data.get(field.new)
if legacy_value is not None and new_value is None:
if field.overwrite:
log.warning(
(
"The {!r} field has been deprecated and will be removed in a future release. "
"Use the {!r} field moving forward."
),
f"{model}.{field.old}",
field.new,
log.bind(old_field=f"{model}.{field.old}", new_field=field.new).warning(
"Deprecated field"
)
data[field.new] = legacy_value
else:

View file

@ -108,7 +108,7 @@ class Webhook(HyperglassModel):
{"markdown": True, "facts": header_data},
],
}
log.debug("Created MS Teams webhook: {}", str(payload))
log.bind(type="MS Teams", payload=str(payload)).debug("Created webhook")
return payload
@ -158,5 +158,5 @@ class Webhook(HyperglassModel):
},
],
}
log.debug("Created Slack webhook: {}", str(payload))
log.bind(type="Slack", payload=str(payload)).debug("Created webhook")
return payload

View file

@ -28,11 +28,13 @@ def parse_arista(output: t.Sequence[str]) -> "OutputDataModel":
"""Parse a Arista BGP JSON response."""
result = None
_log = log.bind(plugin=BGPRoutePluginArista.__name__)
for response in output:
try:
parsed: t.Dict = json.loads(response)
log.debug("Pre-parsed data:\n{}", parsed)
_log.debug("Pre-parsed data", data=parsed)
vrf = list(parsed["vrfs"].keys())[0]
routes = parsed["vrfs"][vrf]
@ -46,19 +48,19 @@ def parse_arista(output: t.Sequence[str]) -> "OutputDataModel":
result += bgp_table
except json.JSONDecodeError as err:
log.critical("Error decoding JSON: {}", str(err))
_log.bind(error=str(err)).critical("Failed to decode JSON")
raise ParsingError("Error parsing response data") from err
except KeyError as err:
log.critical("'{}' was not found in the response", str(err))
_log.bind(key=str(err)).critical("Missing required key in response")
raise ParsingError("Error parsing response data") from err
except IndexError as err:
log.critical(str(err))
_log.critical(err)
raise ParsingError("Error parsing response data") from err
except ValidationError as err:
log.critical(str(err))
_log.critical(err)
raise ParsingError(err.errors()) from err
return result

View file

@ -69,6 +69,7 @@ def parse_juniper(output: Sequence[str]) -> "OutputDataModel": # noqa: C901
"""Parse a Juniper BGP XML response."""
result = None
_log = log.bind(plugin=BGPRoutePluginJuniper.__name__)
for response in output:
cleaned = clean_xml_output(response)
@ -76,8 +77,7 @@ def parse_juniper(output: Sequence[str]) -> "OutputDataModel": # noqa: C901
parsed: "OrderedDict" = xmltodict.parse(
cleaned, force_list=("rt", "rt-entry", "community")
)
log.debug("Initially Parsed Response: \n{}", parsed)
_log.debug("Pre-parsed data", data=parsed)
if "rpc-reply" in parsed.keys():
if "xnm:error" in parsed["rpc-reply"]:
@ -105,12 +105,15 @@ def parse_juniper(output: Sequence[str]) -> "OutputDataModel": # noqa: C901
result += bgp_table
except xmltodict.expat.ExpatError as err:
_log.bind(error=str(err)).critical("Failed to decode XML")
raise ParsingError("Error parsing response data") from err
except KeyError as err:
_log.bind(key=str(err)).critical("Missing required key in response")
raise ParsingError("{key} was not found in the response", key=str(err)) from err
except ValidationError as err:
_log.critical(err)
raise ParsingError(err) from err
return result

View file

@ -111,10 +111,11 @@ class PluginManager(t.Generic[PluginT]):
if issubclass(plugin, HyperglassPlugin):
instance = plugin(*args, **kwargs)
self._state.add_plugin(self._type, instance)
_log = log.bind(type=self._type, name=instance.name)
if instance._hyperglass_builtin is True:
log.debug("Registered {} built-in plugin {!r}", self._type, instance.name)
_log.debug("Registered built-in plugin")
else:
log.success("Registered {} plugin {!r}", self._type, instance.name)
_log.info("Registered plugin")
return
except TypeError:
raise PluginError( # noqa: B904
@ -148,7 +149,7 @@ class InputPluginManager(PluginManager[InputPlugin], type="input"):
for plugin in self._gather_plugins(query):
result = plugin.validate(query)
result_test = "valid" if result is True else "invalid" if result is False else "none"
log.debug("Input Plugin Validation {!r} result={!r}", plugin.name, result_test)
log.bind(name=plugin.name, result=result_test).debug("Input Plugin Validation")
if result is False:
raise InputValidationError(
error="No matched validation rules", target=query.query_target
@ -162,7 +163,7 @@ class InputPluginManager(PluginManager[InputPlugin], type="input"):
result = query.query_target
for plugin in self._gather_plugins(query):
result = plugin.transform(query=query)
log.debug("Input Plugin Transform {!r} result={!r}", plugin.name, result)
log.bind(name=plugin.name, result=repr(result)).debug("Input Plugin Transform")
return result
@ -182,9 +183,9 @@ class OutputPluginManager(PluginManager[OutputPlugin], type="output"):
)
common = (plugin for plugin in self.plugins() if plugin.common is True)
for plugin in (*directives, *common):
log.debug("Output Plugin {!r} starting with\n{!r}", plugin.name, result)
log.bind(plugin=plugin.name, value=result).debug("Output Plugin Starting Value")
result = plugin.process(output=result, query=query)
log.debug("Output Plugin {!r} completed with\n{!r}", plugin.name, result)
log.bind(plugin=plugin.name, value=result).debug("Output Plugin Ending Value")
if result is False:
return result

View file

@ -25,5 +25,5 @@ class OutputPlugin(HyperglassPlugin, DirectivePlugin, PlatformPlugin):
def process(self, *, output: OutputType, query: "Query") -> OutputType:
"""Process or manipulate output from a device."""
log.warning("Output plugin '{}' has not implemented a 'process()' method", self.name)
log.warning("Output plugin has not implemented a 'process()' method", plugin=self.name)
return output

View file

@ -8,8 +8,6 @@ from pathlib import Path
import pytest
# Project
from hyperglass.log import log
from hyperglass.models.api.query import Query
from hyperglass.models.data.bgp_route import BGPRouteTable
# Local

View file

@ -172,11 +172,12 @@ class RedisManager:
) -> None:
pipeline_self.instance.execute()
if exc_type is not None:
log.error(
"Error in pipeline {!r} from parent instance {!r}:\n{!s}",
pipeline_self,
pipeline_self.parent,
exc_value,
log.bind(
pipeline=repr(pipeline_self),
parent=repr(pipeline_self.parent),
error=exc_value,
).error(
"Error exiting pipeline",
)
return RedisManagerPipeline(

View file

@ -8,6 +8,7 @@ from .tools import (
get_fmt_keys,
compare_dicts,
compare_lists,
dict_to_kwargs,
snake_to_camel,
parse_exception,
repr_from_attrs,
@ -29,6 +30,7 @@ __all__ = (
"copyfiles",
"cpu_count",
"deep_convert_keys",
"dict_to_kwargs",
"dotenv_to_dict",
"get_driver",
"get_fmt_keys",

View file

@ -7,13 +7,13 @@ from queue import Queue
from pathlib import Path
from threading import Thread
# Project
from hyperglass.log import log
async def move_files(src: Path, dst: Path, files: t.Iterable[Path]) -> t.Tuple[str]: # noqa: C901
"""Move iterable of files from source to destination."""
# Project
from hyperglass.log import log
def error(*args, **kwargs):
msg = ", ".join(args)
kwargs = {k: str(v) for k, v in kwargs.items()}
@ -91,6 +91,10 @@ class FileCopy(Thread):
def copyfiles(src_files: t.Iterable[Path], dst_files: t.Iterable[Path]):
"""Copy iterable of files from source to destination with threading."""
# Project
from hyperglass.log import log
queue = Queue()
threads = ()
src_files_len = len(src_files)
@ -113,7 +117,7 @@ def copyfiles(src_files: t.Iterable[Path], dst_files: t.Iterable[Path]):
for _ in src_files:
copied = queue.get()
log.debug("Copied {}", str(copied))
log.bind(path=copied).debug("Copied file", path=copied)
for thread in threads:
thread.join()

View file

@ -13,6 +13,7 @@ from ..tools import (
get_fmt_keys,
compare_dicts,
compare_lists,
dict_to_kwargs,
snake_to_camel,
parse_exception,
repr_from_attrs,
@ -115,7 +116,6 @@ def test_at_least():
def test_compare_dicts():
d1 = {"one": 1, "two": 2}
d2 = {"one": 1, "two": 2}
d3 = {"one": 1, "three": 3}
@ -189,3 +189,25 @@ def test_compare_lists():
assert compare_lists(list1, list2) is False
assert compare_lists(list1, list3) is False
assert compare_lists(list1, list4) is True
def test_dict_to_kwargs():
class Test:
one: int
two: int
def __init__(self, **kw) -> None:
for k, v in kw.items():
setattr(self, k, v)
def __repr__(self) -> str:
return "Test(one={}, two={})".format(self.one, self.two)
d1 = {"one": 1, "two": 2}
e1 = "one=1 two=2"
d2 = {"cls": Test(one=1, two=2), "three": "three"}
e2 = "cls=Test(one=1, two=2) three='three'"
r1 = dict_to_kwargs(d1)
assert r1 == e1
r2 = dict_to_kwargs(d2)
assert r2 == e2

View file

@ -183,3 +183,12 @@ def compare_init(obj_a: object, obj_b: object) -> bool:
obj_b.__init__.__annotations__.pop("self", None)
return compare_dicts(obj_a.__init__.__annotations__, obj_b.__init__.__annotations__)
return False
def dict_to_kwargs(in_dict: t.Dict[str, t.Any]) -> str:
"""Format a dict as a string of key/value pairs."""
items = []
for key, value in in_dict.items():
out_str = f"{key}={value!r}"
items = [*items, out_str]
return " ".join(items)

View file

@ -56,7 +56,7 @@ def resolve_hostname(
# Project
from hyperglass.log import log
log.debug("Ensuring {!r} is resolvable...", hostname)
log.bind(hostname=hostname).debug("Ensuring hostname is resolvable")
ip4 = None
ip6 = None

View file

@ -12,9 +12,8 @@ dependencies = [
"aiofiles>=23.2.1",
"distro==1.8.0",
"favicons==0.2.2",
"gunicorn>=21.2.0",
"httpx==0.24.0",
"loguru==0.7.0",
"loguru>=0.7.2",
"netmiko==4.1.2",
"paramiko==3.4.0",
"psutil==5.9.4",
@ -29,7 +28,7 @@ dependencies = [
"toml>=0.10.2",
"pydantic-settings>=2.2.1",
"pydantic-extra-types>=2.6.0",
"litestar[standard,brotli]>=2.7.0",
"litestar[standard,brotli]>=2.7.1",
]
readme = "README.md"
requires-python = ">= 3.11"

View file

@ -66,8 +66,6 @@ freetype-py==2.4.0
# via rlpycairo
future==0.18.3
# via textfsm
gunicorn==21.2.0
# via hyperglass
h11==0.14.0
# via httpcore
# via uvicorn
@ -90,9 +88,9 @@ jinja2==3.1.3
# via litestar
jsbeautifier==1.15.1
# via litestar
litestar==2.7.0
litestar==2.7.1
# via hyperglass
loguru==0.7.0
loguru==0.7.2
# via hyperglass
lxml==5.1.0
# via svglib
@ -118,7 +116,6 @@ ntc-templates==4.3.0
# via netmiko
packaging==23.2
# via black
# via gunicorn
# via pytest
paramiko==3.4.0
# via hyperglass
@ -250,7 +247,7 @@ typing-extensions==4.9.0
uvicorn==0.21.1
# via hyperglass
# via litestar
uvloop==0.19.0
uvloop==0.18.0
# via hyperglass
# via litestar
# via uvicorn

View file

@ -53,8 +53,6 @@ freetype-py==2.4.0
# via rlpycairo
future==0.18.3
# via textfsm
gunicorn==21.2.0
# via hyperglass
h11==0.14.0
# via httpcore
# via uvicorn
@ -72,9 +70,9 @@ jinja2==3.1.3
# via litestar
jsbeautifier==1.15.1
# via litestar
litestar==2.7.0
litestar==2.7.1
# via hyperglass
loguru==0.7.0
loguru==0.7.2
# via hyperglass
lxml==5.1.0
# via svglib
@ -92,8 +90,6 @@ netmiko==4.1.2
# via hyperglass
ntc-templates==4.3.0
# via netmiko
packaging==24.0
# via gunicorn
paramiko==3.4.0
# via hyperglass
# via netmiko
@ -191,7 +187,7 @@ typing-extensions==4.9.0
uvicorn==0.21.1
# via hyperglass
# via litestar
uvloop==0.19.0
uvloop==0.18.0
# via hyperglass
# via litestar
# via uvicorn