forked from mirrors/thatmattlove-hyperglass
Update code formatting - line length
This commit is contained in:
parent
a62785227e
commit
52ebf4663c
50 changed files with 151 additions and 464 deletions
|
|
@ -25,10 +25,7 @@ class HyperglassError(Exception):
|
|||
"""hyperglass base exception."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "",
|
||||
level: str = "warning",
|
||||
keywords: Optional[List[str]] = None,
|
||||
self, message: str = "", level: str = "warning", keywords: Optional[List[str]] = None,
|
||||
) -> None:
|
||||
"""Initialize the hyperglass base exception class."""
|
||||
self._message = message
|
||||
|
|
@ -87,16 +84,12 @@ class _UnformattedHyperglassError(HyperglassError):
|
|||
|
||||
_level = "warning"
|
||||
|
||||
def __init__(
|
||||
self, unformatted_msg: str = "", level: Optional[str] = None, **kwargs
|
||||
) -> None:
|
||||
def __init__(self, unformatted_msg: str = "", level: Optional[str] = None, **kwargs) -> None:
|
||||
"""Format error message with keyword arguments."""
|
||||
self._message = unformatted_msg.format(**kwargs)
|
||||
self._level = level or self._level
|
||||
self._keywords = list(kwargs.values())
|
||||
super().__init__(
|
||||
message=self._message, level=self._level, keywords=self._keywords
|
||||
)
|
||||
super().__init__(message=self._message, level=self._level, keywords=self._keywords)
|
||||
|
||||
|
||||
class _PredefinedHyperglassError(HyperglassError):
|
||||
|
|
@ -107,9 +100,7 @@ class _PredefinedHyperglassError(HyperglassError):
|
|||
self._fmt_msg = self._message.format(**kwargs)
|
||||
self._level = level or self._level
|
||||
self._keywords = list(kwargs.values())
|
||||
super().__init__(
|
||||
message=self._fmt_msg, level=self._level, keywords=self._keywords
|
||||
)
|
||||
super().__init__(message=self._fmt_msg, level=self._level, keywords=self._keywords)
|
||||
|
||||
|
||||
class ConfigInvalid(HyperglassError):
|
||||
|
|
|
|||
|
|
@ -113,9 +113,7 @@ def _custom_openapi():
|
|||
description=params.docs.description,
|
||||
routes=app.routes,
|
||||
)
|
||||
openapi_schema["info"]["x-logo"] = {
|
||||
"url": "/images/light" + params.web.logo.light.suffix
|
||||
}
|
||||
openapi_schema["info"]["x-logo"] = {"url": "/images/light" + params.web.logo.light.suffix}
|
||||
|
||||
query_samples = []
|
||||
queries_samples = []
|
||||
|
|
@ -123,38 +121,26 @@ def _custom_openapi():
|
|||
|
||||
with EXAMPLE_QUERY_CURL.open("r") as e:
|
||||
example = e.read()
|
||||
query_samples.append(
|
||||
{"lang": "cURL", "source": example % str(params.docs.base_url)}
|
||||
)
|
||||
query_samples.append({"lang": "cURL", "source": example % str(params.docs.base_url)})
|
||||
|
||||
with EXAMPLE_QUERY_PY.open("r") as e:
|
||||
example = e.read()
|
||||
query_samples.append(
|
||||
{"lang": "Python", "source": example % str(params.docs.base_url)}
|
||||
)
|
||||
query_samples.append({"lang": "Python", "source": example % str(params.docs.base_url)})
|
||||
|
||||
with EXAMPLE_DEVICES_CURL.open("r") as e:
|
||||
example = e.read()
|
||||
queries_samples.append(
|
||||
{"lang": "cURL", "source": example % str(params.docs.base_url)}
|
||||
)
|
||||
queries_samples.append({"lang": "cURL", "source": example % str(params.docs.base_url)})
|
||||
with EXAMPLE_DEVICES_PY.open("r") as e:
|
||||
example = e.read()
|
||||
queries_samples.append(
|
||||
{"lang": "Python", "source": example % str(params.docs.base_url)}
|
||||
)
|
||||
queries_samples.append({"lang": "Python", "source": example % str(params.docs.base_url)})
|
||||
|
||||
with EXAMPLE_QUERIES_CURL.open("r") as e:
|
||||
example = e.read()
|
||||
devices_samples.append(
|
||||
{"lang": "cURL", "source": example % str(params.docs.base_url)}
|
||||
)
|
||||
devices_samples.append({"lang": "cURL", "source": example % str(params.docs.base_url)})
|
||||
|
||||
with EXAMPLE_QUERIES_PY.open("r") as e:
|
||||
example = e.read()
|
||||
devices_samples.append(
|
||||
{"lang": "Python", "source": example % str(params.docs.base_url)}
|
||||
)
|
||||
devices_samples.append({"lang": "Python", "source": example % str(params.docs.base_url)})
|
||||
|
||||
openapi_schema["paths"]["/api/query/"]["post"]["x-code-samples"] = query_samples
|
||||
openapi_schema["paths"]["/api/devices"]["get"]["x-code-samples"] = devices_samples
|
||||
|
|
|
|||
|
|
@ -10,16 +10,14 @@ from hyperglass.configuration import params
|
|||
async def default_handler(request, exc):
|
||||
"""Handle uncaught errors."""
|
||||
return JSONResponse(
|
||||
{"output": params.messages.general, "level": "danger", "keywords": []},
|
||||
status_code=500,
|
||||
{"output": params.messages.general, "level": "danger", "keywords": []}, status_code=500,
|
||||
)
|
||||
|
||||
|
||||
async def http_handler(request, exc):
|
||||
"""Handle web server errors."""
|
||||
return JSONResponse(
|
||||
{"output": exc.detail, "level": "danger", "keywords": []},
|
||||
status_code=exc.status_code,
|
||||
{"output": exc.detail, "level": "danger", "keywords": []}, status_code=exc.status_code,
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -35,6 +33,5 @@ async def validation_handler(request, exc):
|
|||
"""Handle Pydantic validation errors raised by FastAPI."""
|
||||
error = exc.errors()[0]
|
||||
return JSONResponse(
|
||||
{"output": error["msg"], "level": "error", "keywords": error["loc"]},
|
||||
status_code=422,
|
||||
{"output": error["msg"], "level": "error", "keywords": error["loc"]}, status_code=422,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -55,9 +55,7 @@ async def send_webhook(query_data: Query, request: Request, timestamp: datetime)
|
|||
}
|
||||
)
|
||||
except Exception as err:
|
||||
log.error(
|
||||
"Error sending webhook to {}: {}", params.logging.http.provider, str(err)
|
||||
)
|
||||
log.error("Error sending webhook to {}: {}", params.logging.http.provider, str(err))
|
||||
|
||||
|
||||
@log.catch
|
||||
|
|
@ -106,9 +104,7 @@ async def query(query_data: Query, request: Request, background_tasks: Backgroun
|
|||
|
||||
elif not cache_response:
|
||||
log.debug("No existing cache entry for query {}", cache_key)
|
||||
log.debug(
|
||||
"Created new cache key {} entry for query {}", cache_key, query_data.summary
|
||||
)
|
||||
log.debug("Created new cache key {} entry for query {}", cache_key, query_data.summary)
|
||||
|
||||
timestamp = query_data.timestamp
|
||||
|
||||
|
|
|
|||
|
|
@ -8,9 +8,7 @@ from pathlib import Path
|
|||
from httpx import Headers
|
||||
|
||||
|
||||
def import_public_key(
|
||||
app_path: Union[Path, str], device_name: str, keystring: str
|
||||
) -> bool:
|
||||
def import_public_key(app_path: Union[Path, str], device_name: str, keystring: str) -> bool:
|
||||
"""Import a public key for hyperglass-agent."""
|
||||
if not isinstance(app_path, Path):
|
||||
app_path = Path(app_path)
|
||||
|
|
|
|||
|
|
@ -52,18 +52,14 @@ def _print_version(ctx, param, value):
|
|||
help=cmd_help(E.NUMBERS, "hyperglass version", supports_color),
|
||||
)
|
||||
@help_option(
|
||||
"-h",
|
||||
"--help",
|
||||
help=cmd_help(E.FOLDED_HANDS, "Show this help message", supports_color),
|
||||
"-h", "--help", help=cmd_help(E.FOLDED_HANDS, "Show this help message", supports_color),
|
||||
)
|
||||
def hg():
|
||||
"""Initialize Click Command Group."""
|
||||
pass
|
||||
|
||||
|
||||
@hg.command(
|
||||
"build-ui", help=cmd_help(E.BUTTERFLY, "Create a new UI build", supports_color)
|
||||
)
|
||||
@hg.command("build-ui", help=cmd_help(E.BUTTERFLY, "Create a new UI build", supports_color))
|
||||
@option("-t", "--timeout", required=False, default=180, help="Timeout in seconds")
|
||||
def build_frontend(timeout):
|
||||
"""Create a new UI build."""
|
||||
|
|
@ -131,9 +127,7 @@ def start(build, direct, workers): # noqa: C901
|
|||
cls=HelpColorsCommand,
|
||||
help_options_custom_colors=random_colors("-l"),
|
||||
)
|
||||
@option(
|
||||
"-l", "--length", "length", default=32, help="Number of characters [default: 32]"
|
||||
)
|
||||
@option("-l", "--length", "length", default=32, help="Number of characters [default: 32]")
|
||||
def generate_secret(length):
|
||||
"""Generate secret for hyperglass-agent.
|
||||
|
||||
|
|
@ -177,9 +171,7 @@ After adding your {devices} file, you should run the {build_cmd} command.""", #
|
|||
|
||||
@hg.command(
|
||||
"system-info",
|
||||
help=cmd_help(
|
||||
E.THERMOMETER, " Get system information for a bug report", supports_color
|
||||
),
|
||||
help=cmd_help(E.THERMOMETER, " Get system information for a bug report", supports_color),
|
||||
cls=HelpColorsCommand,
|
||||
)
|
||||
def get_system_info():
|
||||
|
|
|
|||
|
|
@ -96,9 +96,7 @@ def success(text, *args, **kwargs):
|
|||
Returns:
|
||||
{str} -- Success output
|
||||
"""
|
||||
return _base_formatter(
|
||||
_state="success", _text=text, _callback=echo, *args, **kwargs
|
||||
)
|
||||
return _base_formatter(_state="success", _text=text, _callback=echo, *args, **kwargs)
|
||||
|
||||
|
||||
def warning(text, *args, **kwargs):
|
||||
|
|
@ -111,9 +109,7 @@ def warning(text, *args, **kwargs):
|
|||
Returns:
|
||||
{str} -- Warning output
|
||||
"""
|
||||
return _base_formatter(
|
||||
_state="warning", _text=text, _callback=echo, *args, **kwargs
|
||||
)
|
||||
return _base_formatter(_state="warning", _text=text, _callback=echo, *args, **kwargs)
|
||||
|
||||
|
||||
def label(text, *args, **kwargs):
|
||||
|
|
|
|||
|
|
@ -56,12 +56,7 @@ class HelpColorsFormatter(click.HelpFormatter):
|
|||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
headers_color=None,
|
||||
options_color=None,
|
||||
options_custom_colors=None,
|
||||
*args,
|
||||
**kwargs
|
||||
self, headers_color=None, options_color=None, options_custom_colors=None, *args, **kwargs
|
||||
):
|
||||
"""Initialize help formatter.
|
||||
|
||||
|
|
@ -98,9 +93,7 @@ class HelpColorsFormatter(click.HelpFormatter):
|
|||
|
||||
def write_dl(self, rows, **kwargs):
|
||||
"""Write Options section."""
|
||||
colorized_rows = [
|
||||
(click.style(row[0], **self._pick_color(row[0])), row[1]) for row in rows
|
||||
]
|
||||
colorized_rows = [(click.style(row[0], **self._pick_color(row[0])), row[1]) for row in rows]
|
||||
super().write_dl(colorized_rows, **kwargs)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -20,9 +20,7 @@ IGNORED_FILES = [".DS_Store"]
|
|||
|
||||
INSTALL_PATHS = [
|
||||
inquirer.List(
|
||||
"install_path",
|
||||
message="Choose a directory for hyperglass",
|
||||
choices=[USER_PATH, ROOT_PATH],
|
||||
"install_path", message="Choose a directory for hyperglass", choices=[USER_PATH, ROOT_PATH],
|
||||
)
|
||||
]
|
||||
|
||||
|
|
@ -104,9 +102,7 @@ class Installer:
|
|||
|
||||
if not compare_post.left_list == compare_post.right_list:
|
||||
error(
|
||||
"Files in {a} do not match files in {b}",
|
||||
a=str(ASSET_DIR),
|
||||
b=str(target_dir),
|
||||
"Files in {a} do not match files in {b}", a=str(ASSET_DIR), b=str(target_dir),
|
||||
)
|
||||
return False
|
||||
|
||||
|
|
|
|||
|
|
@ -122,6 +122,4 @@ class Message:
|
|||
|
||||
def __repr__(self):
|
||||
"""Stringify the instance character for representation."""
|
||||
return "Message(msg={m}, kw={k}, emoji={e})".format(
|
||||
m=self.msg, k=self.kw, e=self.emoji
|
||||
)
|
||||
return "Message(msg={m}, kw={k}, emoji={e})".format(m=self.msg, k=self.kw, e=self.emoji)
|
||||
|
|
|
|||
|
|
@ -62,9 +62,7 @@ def _cancel_all_tasks(loop, tasks):
|
|||
for task in to_cancel:
|
||||
task.cancel()
|
||||
|
||||
loop.run_until_complete(
|
||||
asyncio.gather(*to_cancel, loop=loop, return_exceptions=True)
|
||||
)
|
||||
loop.run_until_complete(asyncio.gather(*to_cancel, loop=loop, return_exceptions=True))
|
||||
|
||||
for task in to_cancel:
|
||||
if task.cancelled():
|
||||
|
|
|
|||
|
|
@ -117,12 +117,8 @@ def check_address(address):
|
|||
elif isinstance(address, str):
|
||||
if os.name != "posix":
|
||||
raise ValueError("Platform does not support UNIX domain sockets")
|
||||
if not (
|
||||
os.path.exists(address) or os.access(os.path.dirname(address), os.W_OK)
|
||||
):
|
||||
raise ValueError(
|
||||
"ADDRESS not a valid socket domain socket ({0})".format(address)
|
||||
)
|
||||
if not (os.path.exists(address) or os.access(os.path.dirname(address), os.W_OK)):
|
||||
raise ValueError("ADDRESS not a valid socket domain socket ({0})".format(address))
|
||||
else:
|
||||
raise ValueError(
|
||||
"ADDRESS is not a tuple, string, or character buffer "
|
||||
|
|
@ -238,16 +234,12 @@ class _ForwardHandler(socketserver.BaseRequestHandler):
|
|||
if not chan.recv_ready():
|
||||
break
|
||||
data = chan.recv(1024)
|
||||
self.logger.trace(
|
||||
"<<< IN {0} recv: {1} <<<".format(self.info, hexlify(data)),
|
||||
)
|
||||
self.logger.trace("<<< IN {0} recv: {1} <<<".format(self.info, hexlify(data)),)
|
||||
self.request.sendall(data)
|
||||
|
||||
def handle(self):
|
||||
uid = get_connection_id()
|
||||
self.info = "#{0} <-- {1}".format(
|
||||
uid, self.client_address or self.server.local_address
|
||||
)
|
||||
self.info = "#{0} <-- {1}".format(uid, self.client_address or self.server.local_address)
|
||||
src_address = self.request.getpeername()
|
||||
if not isinstance(src_address, tuple):
|
||||
src_address = ("dummy", 12345)
|
||||
|
|
@ -261,9 +253,7 @@ class _ForwardHandler(socketserver.BaseRequestHandler):
|
|||
except paramiko.SSHException:
|
||||
chan = None
|
||||
if chan is None:
|
||||
msg = "{0} to {1} was rejected by the SSH server".format(
|
||||
self.info, self.remote_address
|
||||
)
|
||||
msg = "{0} to {1} was rejected by the SSH server".format(self.info, self.remote_address)
|
||||
self.logger.trace(msg)
|
||||
raise HandlerSSHTunnelForwarderError(msg)
|
||||
|
||||
|
|
@ -373,9 +363,7 @@ class _UnixStreamForwardServer(UnixStreamServer):
|
|||
return self.RequestHandlerClass.remote_address[1]
|
||||
|
||||
|
||||
class _ThreadingUnixStreamForwardServer(
|
||||
socketserver.ThreadingMixIn, _UnixStreamForwardServer
|
||||
):
|
||||
class _ThreadingUnixStreamForwardServer(socketserver.ThreadingMixIn, _UnixStreamForwardServer):
|
||||
"""
|
||||
Allow concurrent connections to each tunnel
|
||||
"""
|
||||
|
|
@ -693,11 +681,7 @@ class SSHTunnelForwarder:
|
|||
return _ThreadingForwardServer if self._threaded else _ForwardServer
|
||||
|
||||
def _make_unix_ssh_forward_server_class(self, remote_address_):
|
||||
return (
|
||||
_ThreadingUnixStreamForwardServer
|
||||
if self._threaded
|
||||
else _UnixStreamForwardServer
|
||||
)
|
||||
return _ThreadingUnixStreamForwardServer if self._threaded else _UnixStreamForwardServer
|
||||
|
||||
def _make_ssh_forward_server(self, remote_address, local_bind_address):
|
||||
"""
|
||||
|
|
@ -710,9 +694,7 @@ class SSHTunnelForwarder:
|
|||
else:
|
||||
forward_maker_class = self._make_ssh_forward_server_class
|
||||
_Server = forward_maker_class(remote_address)
|
||||
ssh_forward_server = _Server(
|
||||
local_bind_address, _Handler, logger=self.logger,
|
||||
)
|
||||
ssh_forward_server = _Server(local_bind_address, _Handler, logger=self.logger,)
|
||||
|
||||
if ssh_forward_server:
|
||||
ssh_forward_server.daemon_threads = self.daemon_forward_servers
|
||||
|
|
@ -724,8 +706,7 @@ class SSHTunnelForwarder:
|
|||
"Problem setting up ssh {0} <> {1} forwarder. You can "
|
||||
"suppress this exception by using the `mute_exceptions`"
|
||||
"argument".format(
|
||||
address_to_str(local_bind_address),
|
||||
address_to_str(remote_address),
|
||||
address_to_str(local_bind_address), address_to_str(remote_address),
|
||||
),
|
||||
)
|
||||
except IOError:
|
||||
|
|
@ -802,9 +783,7 @@ class SSHTunnelForwarder:
|
|||
)
|
||||
# local binds
|
||||
self._local_binds = self._get_binds(local_bind_address, local_bind_addresses)
|
||||
self._local_binds = self._consolidate_binds(
|
||||
self._local_binds, self._remote_binds
|
||||
)
|
||||
self._local_binds = self._consolidate_binds(self._local_binds, self._remote_binds)
|
||||
|
||||
(
|
||||
self.ssh_host,
|
||||
|
|
@ -882,16 +861,12 @@ class SSHTunnelForwarder:
|
|||
ssh_port = ssh_port or hostname_info.get("port")
|
||||
|
||||
proxycommand = hostname_info.get("proxycommand")
|
||||
ssh_proxy = ssh_proxy or (
|
||||
paramiko.ProxyCommand(proxycommand) if proxycommand else None
|
||||
)
|
||||
ssh_proxy = ssh_proxy or (paramiko.ProxyCommand(proxycommand) if proxycommand else None)
|
||||
if compression is None:
|
||||
compression = hostname_info.get("compression", "")
|
||||
compression = True if compression.upper() == "YES" else False
|
||||
except IOError:
|
||||
logger.warning(
|
||||
"Could not read SSH configuration file: {f}", f=ssh_config_file
|
||||
)
|
||||
logger.warning("Could not read SSH configuration file: {f}", f=ssh_config_file)
|
||||
except (AttributeError, TypeError): # ssh_config_file is None
|
||||
logger.info("Skipping loading of ssh configuration file")
|
||||
finally:
|
||||
|
|
@ -979,8 +954,7 @@ class SSHTunnelForwarder:
|
|||
count = len(remote_binds) - len(local_binds)
|
||||
if count < 0:
|
||||
raise ValueError(
|
||||
"Too many local bind addresses "
|
||||
"(local_bind_addresses > remote_bind_addresses)"
|
||||
"Too many local bind addresses " "(local_bind_addresses > remote_bind_addresses)"
|
||||
)
|
||||
local_binds.extend([("0.0.0.0", 0) for x in range(count)])
|
||||
return local_binds
|
||||
|
|
@ -1002,9 +976,7 @@ class SSHTunnelForwarder:
|
|||
- ``paramiko.Pkey`` - it will be transparently added to loaded keys
|
||||
"""
|
||||
ssh_loaded_pkeys = SSHTunnelForwarder.get_keys(
|
||||
logger=logger,
|
||||
host_pkey_directories=host_pkey_directories,
|
||||
allow_agent=allow_agent,
|
||||
logger=logger, host_pkey_directories=host_pkey_directories, allow_agent=allow_agent,
|
||||
)
|
||||
|
||||
if isinstance(ssh_pkey, str):
|
||||
|
|
@ -1058,9 +1030,7 @@ class SSHTunnelForwarder:
|
|||
try:
|
||||
self._connect_to_gateway()
|
||||
except socket.gaierror: # raised by paramiko.Transport
|
||||
msg = "Could not resolve IP address for {0}, aborting!".format(
|
||||
self.ssh_host
|
||||
)
|
||||
msg = "Could not resolve IP address for {0}, aborting!".format(self.ssh_host)
|
||||
self.logger.error(msg)
|
||||
return
|
||||
except (paramiko.SSHException, socket.error) as e:
|
||||
|
|
@ -1109,9 +1079,7 @@ class SSHTunnelForwarder:
|
|||
"""Processes optional deprecate arguments."""
|
||||
|
||||
if deprecated_attrib not in DEPRECATIONS:
|
||||
raise ValueError(
|
||||
"{0} not included in deprecations list".format(deprecated_attrib)
|
||||
)
|
||||
raise ValueError("{0} not included in deprecations list".format(deprecated_attrib))
|
||||
if deprecated_attrib in kwargs:
|
||||
warnings.warn(
|
||||
"'{0}' is DEPRECATED use '{1}' instead".format(
|
||||
|
|
@ -1148,17 +1116,10 @@ class SSHTunnelForwarder:
|
|||
for pkey_class in (
|
||||
(key_type,)
|
||||
if key_type
|
||||
else (
|
||||
paramiko.RSAKey,
|
||||
paramiko.DSSKey,
|
||||
paramiko.ECDSAKey,
|
||||
paramiko.Ed25519Key,
|
||||
)
|
||||
else (paramiko.RSAKey, paramiko.DSSKey, paramiko.ECDSAKey, paramiko.Ed25519Key,)
|
||||
):
|
||||
try:
|
||||
ssh_pkey = pkey_class.from_private_key_file(
|
||||
pkey_file, password=pkey_password
|
||||
)
|
||||
ssh_pkey = pkey_class.from_private_key_file(pkey_file, password=pkey_password)
|
||||
|
||||
logger.debug(
|
||||
"Private key file ({k0}, {k1}) successfully loaded",
|
||||
|
|
@ -1202,9 +1163,7 @@ class SSHTunnelForwarder:
|
|||
else _srv.local_address
|
||||
)
|
||||
s.connect(connect_to)
|
||||
self.tunnel_is_up[_srv.local_address] = _srv.tunnel_ok.get(
|
||||
timeout=TUNNEL_TIMEOUT * 1.1
|
||||
)
|
||||
self.tunnel_is_up[_srv.local_address] = _srv.tunnel_ok.get(timeout=TUNNEL_TIMEOUT * 1.1)
|
||||
self.logger.debug("Tunnel to {0} is DOWN".format(_srv.remote_address))
|
||||
except socket.error:
|
||||
self.logger.debug("Tunnel to {0} is DOWN".format(_srv.remote_address))
|
||||
|
|
@ -1232,8 +1191,7 @@ class SSHTunnelForwarder:
|
|||
self._create_tunnels()
|
||||
if not self.is_active:
|
||||
self._raise(
|
||||
BaseSSHTunnelForwarderError,
|
||||
reason="Could not establish session to SSH gateway",
|
||||
BaseSSHTunnelForwarderError, reason="Could not establish session to SSH gateway",
|
||||
)
|
||||
for _srv in self._server_list:
|
||||
thread = threading.Thread(
|
||||
|
|
@ -1247,8 +1205,7 @@ class SSHTunnelForwarder:
|
|||
self.is_alive = any(self.tunnel_is_up.values())
|
||||
if not self.is_alive:
|
||||
self._raise(
|
||||
HandlerSSHTunnelForwarderError,
|
||||
"An error occurred while opening tunnels.",
|
||||
HandlerSSHTunnelForwarderError, "An error occurred while opening tunnels.",
|
||||
)
|
||||
|
||||
def stop(self) -> None:
|
||||
|
|
@ -1270,8 +1227,7 @@ class SSHTunnelForwarder:
|
|||
"""
|
||||
self.logger.info("Closing all open connections...")
|
||||
opened_address_text = (
|
||||
", ".join((address_to_str(k.local_address) for k in self._server_list))
|
||||
or "None"
|
||||
", ".join((address_to_str(k.local_address) for k in self._server_list)) or "None"
|
||||
)
|
||||
self.logger.debug("Listening tunnels: " + opened_address_text)
|
||||
self._stop_transport()
|
||||
|
|
@ -1311,9 +1267,7 @@ class SSHTunnelForwarder:
|
|||
|
||||
if self.ssh_password: # avoid conflict using both pass and pkey
|
||||
self.logger.debug(
|
||||
"Trying to log in with password: {0}".format(
|
||||
"*" * len(self.ssh_password)
|
||||
)
|
||||
"Trying to log in with password: {0}".format("*" * len(self.ssh_password))
|
||||
)
|
||||
try:
|
||||
self._transport = self._get_transport()
|
||||
|
|
@ -1364,9 +1318,7 @@ class SSHTunnelForwarder:
|
|||
os.unlink(_srv.local_address)
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
"Unable to unlink socket {0}: {1}".format(
|
||||
self.local_address, repr(e)
|
||||
)
|
||||
"Unable to unlink socket {0}: {1}".format(self.local_address, repr(e))
|
||||
)
|
||||
self.is_alive = False
|
||||
if self.is_active:
|
||||
|
|
@ -1413,9 +1365,7 @@ class SSHTunnelForwarder:
|
|||
|
||||
self._check_is_started()
|
||||
return [
|
||||
_server.local_port
|
||||
for _server in self._server_list
|
||||
if _server.local_port is not None
|
||||
_server.local_port for _server in self._server_list if _server.local_port is not None
|
||||
]
|
||||
|
||||
@property
|
||||
|
|
@ -1423,9 +1373,7 @@ class SSHTunnelForwarder:
|
|||
"""Return a list containing the IP addresses listening for the tunnels."""
|
||||
self._check_is_started()
|
||||
return [
|
||||
_server.local_host
|
||||
for _server in self._server_list
|
||||
if _server.local_host is not None
|
||||
_server.local_host for _server in self._server_list if _server.local_host is not None
|
||||
]
|
||||
|
||||
@property
|
||||
|
|
@ -1461,10 +1409,7 @@ class SSHTunnelForwarder:
|
|||
def __str__(self) -> str:
|
||||
credentials = {
|
||||
"password": self.ssh_password,
|
||||
"pkeys": [
|
||||
(key.get_name(), hexlify(key.get_fingerprint()))
|
||||
for key in self.ssh_pkeys
|
||||
]
|
||||
"pkeys": [(key.get_name(), hexlify(key.get_fingerprint())) for key in self.ssh_pkeys]
|
||||
if any(self.ssh_pkeys)
|
||||
else None,
|
||||
}
|
||||
|
|
@ -1496,9 +1441,7 @@ class SSHTunnelForwarder:
|
|||
credentials,
|
||||
self.ssh_host_key if self.ssh_host_key else "not checked",
|
||||
"" if self.is_alive else "not ",
|
||||
"disabled"
|
||||
if not self.set_keepalive
|
||||
else "every {0} sec".format(self.set_keepalive),
|
||||
"disabled" if not self.set_keepalive else "every {0} sec".format(self.set_keepalive),
|
||||
"disabled" if self.skip_tunnel_checkup else "enabled",
|
||||
"" if self._threaded else "not ",
|
||||
"" if self.compression else "not ",
|
||||
|
|
@ -1612,8 +1555,6 @@ def _bindlist(input_str):
|
|||
_port = "22" # default port if not given
|
||||
return _ip, int(_port)
|
||||
except ValueError:
|
||||
raise argparse.ArgumentTypeError(
|
||||
"Address tuple must be of type IP_ADDRESS:PORT"
|
||||
)
|
||||
raise argparse.ArgumentTypeError("Address tuple must be of type IP_ADDRESS:PORT")
|
||||
except AssertionError:
|
||||
raise argparse.ArgumentTypeError("Both IP:PORT can't be missing!")
|
||||
|
|
|
|||
|
|
@ -118,9 +118,7 @@ def _get_commands(data: Dict) -> List[Directive]:
|
|||
return commands
|
||||
|
||||
|
||||
def _device_commands(
|
||||
device: Dict, directives: List[Directive]
|
||||
) -> Generator[Directive, None, None]:
|
||||
def _device_commands(device: Dict, directives: List[Directive]) -> Generator[Directive, None, None]:
|
||||
device_commands = device.get("commands", [])
|
||||
for directive in directives:
|
||||
if directive.id in device_commands:
|
||||
|
|
@ -176,9 +174,7 @@ enable_file_logging(
|
|||
# Set up syslog logging if enabled.
|
||||
if params.logging.syslog is not None and params.logging.syslog.enable:
|
||||
enable_syslog_logging(
|
||||
logger=log,
|
||||
syslog_host=params.logging.syslog.host,
|
||||
syslog_port=params.logging.syslog.port,
|
||||
logger=log, syslog_host=params.logging.syslog.host, syslog_port=params.logging.syslog.port,
|
||||
)
|
||||
|
||||
if params.logging.http is not None and params.logging.http.enable:
|
||||
|
|
@ -196,18 +192,14 @@ try:
|
|||
# If keywords are unmodified (default), add the org name &
|
||||
# site_title.
|
||||
if Params().site_keywords == params.site_keywords:
|
||||
params.site_keywords = sorted(
|
||||
{*params.site_keywords, params.org_name, params.site_title}
|
||||
)
|
||||
params.site_keywords = sorted({*params.site_keywords, params.org_name, params.site_title})
|
||||
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
||||
content_greeting = get_markdown(
|
||||
config_path=params.web.greeting,
|
||||
default="",
|
||||
params={"title": params.web.greeting.title},
|
||||
config_path=params.web.greeting, default="", params={"title": params.web.greeting.title},
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -14,9 +14,7 @@ from hyperglass.exceptions.private import ConfigInvalid
|
|||
Importer = TypeVar("Importer")
|
||||
|
||||
|
||||
def validate_config(
|
||||
config: Union[Dict[str, Any], List[Any]], importer: Importer
|
||||
) -> Importer:
|
||||
def validate_config(config: Union[Dict[str, Any], List[Any]], importer: Importer) -> Importer:
|
||||
"""Validate a config dict against a model."""
|
||||
validated = None
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -124,9 +124,7 @@ class PublicHyperglassError(HyperglassError):
|
|||
kwargs["error"] = error
|
||||
self._message = self._safe_format(self._message_template, **kwargs)
|
||||
self._keywords = list(kwargs.values())
|
||||
super().__init__(
|
||||
message=self._message, level=self._level, keywords=self._keywords
|
||||
)
|
||||
super().__init__(message=self._message, level=self._level, keywords=self._keywords)
|
||||
|
||||
def handle_error(self, error: Any) -> None:
|
||||
"""Add details to the error template, if provided."""
|
||||
|
|
@ -156,6 +154,4 @@ class PrivateHyperglassError(HyperglassError):
|
|||
kwargs["error"] = error
|
||||
self._message = self._safe_format(message, **kwargs)
|
||||
self._keywords = list(kwargs.values())
|
||||
super().__init__(
|
||||
message=self._message, level=self._level, keywords=self._keywords
|
||||
)
|
||||
super().__init__(message=self._message, level=self._level, keywords=self._keywords)
|
||||
|
|
|
|||
|
|
@ -10,9 +10,7 @@ from ._common import ErrorLevel, PrivateHyperglassError
|
|||
class ExternalError(PrivateHyperglassError):
|
||||
"""Raised when an error during a connection to an external service occurs."""
|
||||
|
||||
def __init__(
|
||||
self, message: str, level: ErrorLevel, **kwargs: Dict[str, Any]
|
||||
) -> None:
|
||||
def __init__(self, message: str, level: ErrorLevel, **kwargs: Dict[str, Any]) -> None:
|
||||
"""Set level according to level argument."""
|
||||
self._level = level
|
||||
super().__init__(message, **kwargs)
|
||||
|
|
@ -31,9 +29,7 @@ class UnsupportedDevice(PrivateHyperglassError):
|
|||
|
||||
drivers = ("", *[*DRIVER_MAP.keys(), *CLASS_MAPPER.keys()].sort())
|
||||
driver_list = "\n - ".join(drivers)
|
||||
super().__init__(
|
||||
message=f"'{nos}' is not supported. Must be one of:{driver_list}"
|
||||
)
|
||||
super().__init__(message=f"'{nos}' is not supported. Must be one of:{driver_list}")
|
||||
|
||||
|
||||
class InputValidationError(PrivateHyperglassError):
|
||||
|
|
|
|||
|
|
@ -33,9 +33,7 @@ class AuthError(
|
|||
super().__init__(error=str(error), device=device.name, proxy=device.proxy)
|
||||
|
||||
|
||||
class RestError(
|
||||
PublicHyperglassError, template=params.messages.connection_error, level="danger"
|
||||
):
|
||||
class RestError(PublicHyperglassError, template=params.messages.connection_error, level="danger"):
|
||||
"""Raised upon a rest API client error."""
|
||||
|
||||
def __init__(self, error: BaseException, *, device: Device):
|
||||
|
|
@ -86,9 +84,7 @@ class QueryLocationNotFound(NotFound):
|
|||
|
||||
def __init__(self, location: Any, **kwargs: Dict[str, Any]) -> None:
|
||||
"""Initialize a NotFound error for a query location."""
|
||||
super().__init__(
|
||||
type=params.web.text.query_location, name=str(location), **kwargs
|
||||
)
|
||||
super().__init__(type=params.web.text.query_location, name=str(location), **kwargs)
|
||||
|
||||
|
||||
class QueryTypeNotFound(NotFound):
|
||||
|
|
@ -96,9 +92,7 @@ class QueryTypeNotFound(NotFound):
|
|||
|
||||
def __init__(self, query_type: Any, **kwargs: Dict[str, Any]) -> None:
|
||||
"""Initialize a NotFound error for a query type."""
|
||||
super().__init__(
|
||||
type=params.web.text.query_type, name=str(query_type), **kwargs
|
||||
)
|
||||
super().__init__(type=params.web.text.query_type, name=str(query_type), **kwargs)
|
||||
|
||||
|
||||
class QueryGroupNotFound(NotFound):
|
||||
|
|
|
|||
|
|
@ -32,9 +32,7 @@ class Construct:
|
|||
def __init__(self, device, query):
|
||||
"""Initialize command construction."""
|
||||
log.debug(
|
||||
"Constructing '{}' query for '{}'",
|
||||
query.query_type,
|
||||
str(query.query_target),
|
||||
"Constructing '{}' query for '{}'", query.query_type, str(query.query_target),
|
||||
)
|
||||
self.query = query
|
||||
self.device = device
|
||||
|
|
@ -73,10 +71,7 @@ class Construct:
|
|||
for key in [k for k in keys if k != "target"]:
|
||||
if key not in attrs:
|
||||
raise ConfigError(
|
||||
(
|
||||
"Command '{c}' has attribute '{k}', "
|
||||
"which is missing from device '{d}'"
|
||||
),
|
||||
("Command '{c}' has attribute '{k}', " "which is missing from device '{d}'"),
|
||||
level="danger",
|
||||
c=self.directive.name,
|
||||
k=key,
|
||||
|
|
|
|||
|
|
@ -80,9 +80,7 @@ class AgentConnection(Connection):
|
|||
)
|
||||
log.debug("Encoded JWT: {}", encoded_query)
|
||||
|
||||
raw_response = await http_client.post(
|
||||
endpoint, json={"encoded": encoded_query}
|
||||
)
|
||||
raw_response = await http_client.post(endpoint, json={"encoded": encoded_query})
|
||||
log.debug("HTTP status code: {}", raw_response.status_code)
|
||||
|
||||
raw = raw_response.text
|
||||
|
|
|
|||
|
|
@ -36,9 +36,7 @@ class SSHConnection(Connection):
|
|||
}
|
||||
if proxy.credential._method == "password":
|
||||
# Use password auth if no key is defined.
|
||||
tunnel_kwargs[
|
||||
"ssh_password"
|
||||
] = proxy.credential.password.get_secret_value()
|
||||
tunnel_kwargs["ssh_password"] = proxy.credential.password.get_secret_value()
|
||||
else:
|
||||
# Otherwise, use key auth.
|
||||
tunnel_kwargs["ssh_pkey"] = proxy.credential.key.as_posix()
|
||||
|
|
@ -53,8 +51,7 @@ class SSHConnection(Connection):
|
|||
|
||||
except BaseSSHTunnelForwarderError as scrape_proxy_error:
|
||||
log.error(
|
||||
f"Error connecting to device {self.device.name} via "
|
||||
f"proxy {proxy.name}"
|
||||
f"Error connecting to device {self.device.name} via " f"proxy {proxy.name}"
|
||||
)
|
||||
raise ScrapeError(error=scrape_proxy_error, device=self.device)
|
||||
|
||||
|
|
|
|||
|
|
@ -78,9 +78,7 @@ class NetmikoConnection(SSHConnection):
|
|||
|
||||
if self.device.credential._method == "password":
|
||||
# Use password auth if no key is defined.
|
||||
driver_kwargs[
|
||||
"password"
|
||||
] = self.device.credential.password.get_secret_value()
|
||||
driver_kwargs["password"] = self.device.credential.password.get_secret_value()
|
||||
else:
|
||||
# Otherwise, use key auth.
|
||||
driver_kwargs["use_keys"] = True
|
||||
|
|
@ -88,9 +86,7 @@ class NetmikoConnection(SSHConnection):
|
|||
if self.device.credential._method == "encrypted_key":
|
||||
# If the key is encrypted, use the password field as the
|
||||
# private key password.
|
||||
driver_kwargs[
|
||||
"passphrase"
|
||||
] = self.device.credential.password.get_secret_value()
|
||||
driver_kwargs["passphrase"] = self.device.credential.password.get_secret_value()
|
||||
|
||||
try:
|
||||
nm_connect_direct = ConnectHandler(**driver_kwargs)
|
||||
|
|
|
|||
|
|
@ -98,9 +98,7 @@ class ScrapliConnection(SSHConnection):
|
|||
|
||||
if self.device.credential._method == "password":
|
||||
# Use password auth if no key is defined.
|
||||
driver_kwargs[
|
||||
"auth_password"
|
||||
] = self.device.credential.password.get_secret_value()
|
||||
driver_kwargs["auth_password"] = self.device.credential.password.get_secret_value()
|
||||
else:
|
||||
# Otherwise, use key auth.
|
||||
driver_kwargs["auth_private_key"] = self.device.credential.key.as_posix()
|
||||
|
|
@ -112,9 +110,7 @@ class ScrapliConnection(SSHConnection):
|
|||
] = self.device.credential.password.get_secret_value()
|
||||
|
||||
driver = driver(**driver_kwargs)
|
||||
driver.logger = log.bind(
|
||||
logger_name=f"scrapli.{driver.host}:{driver.port}-driver"
|
||||
)
|
||||
driver.logger = log.bind(logger_name=f"scrapli.{driver.host}:{driver.port}-driver")
|
||||
try:
|
||||
responses = ()
|
||||
async with driver as connection:
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ hyperglass-frr API calls, returns the output back to the front end.
|
|||
|
||||
# Standard Library
|
||||
import signal
|
||||
from typing import Any, Dict, Union, Callable, Sequence, TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Any, Dict, Union, Callable, Sequence
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
|
|
|
|||
24
hyperglass/external/_base.py
vendored
24
hyperglass/external/_base.py
vendored
|
|
@ -140,9 +140,7 @@ class BaseExternal:
|
|||
|
||||
except gaierror as err:
|
||||
# Raised if the target isn't listening on the port
|
||||
raise self._exception(
|
||||
f"{self.name} appears to be unreachable", err
|
||||
) from None
|
||||
raise self._exception(f"{self.name} appears to be unreachable", err) from None
|
||||
|
||||
return True
|
||||
|
||||
|
|
@ -157,21 +155,13 @@ class BaseExternal:
|
|||
|
||||
supported_methods = ("GET", "POST", "PUT", "DELETE", "HEAD", "PATCH")
|
||||
|
||||
(
|
||||
method,
|
||||
endpoint,
|
||||
item,
|
||||
headers,
|
||||
params,
|
||||
data,
|
||||
timeout,
|
||||
response_required,
|
||||
) = itemgetter(*kwargs.keys())(kwargs)
|
||||
(method, endpoint, item, headers, params, data, timeout, response_required,) = itemgetter(
|
||||
*kwargs.keys()
|
||||
)(kwargs)
|
||||
|
||||
if method.upper() not in supported_methods:
|
||||
raise self._exception(
|
||||
f'Method must be one of {", ".join(supported_methods)}. '
|
||||
f"Got: {str(method)}"
|
||||
f'Method must be one of {", ".join(supported_methods)}. ' f"Got: {str(method)}"
|
||||
)
|
||||
|
||||
endpoint = "/".join(
|
||||
|
|
@ -209,9 +199,7 @@ class BaseExternal:
|
|||
try:
|
||||
timeout = int(timeout)
|
||||
except TypeError:
|
||||
raise self._exception(
|
||||
f"Timeout must be an int, got: {str(timeout)}"
|
||||
)
|
||||
raise self._exception(f"Timeout must be an int, got: {str(timeout)}")
|
||||
request["timeout"] = timeout
|
||||
|
||||
log.debug("Constructed request parameters {}", request)
|
||||
|
|
|
|||
4
hyperglass/external/bgptools.py
vendored
4
hyperglass/external/bgptools.py
vendored
|
|
@ -31,9 +31,7 @@ def parse_whois(output: str, targets: List[str]) -> Dict[str, str]:
|
|||
def lines(raw):
|
||||
"""Generate clean string values for each column."""
|
||||
for r in (r for r in raw.split("\n") if r):
|
||||
fields = (
|
||||
re.sub(r"(\n|\r)", "", field).strip(" ") for field in r.split("|")
|
||||
)
|
||||
fields = (re.sub(r"(\n|\r)", "", field).strip(" ") for field in r.split("|"))
|
||||
yield fields
|
||||
|
||||
data = {}
|
||||
|
|
|
|||
4
hyperglass/external/generic.py
vendored
4
hyperglass/external/generic.py
vendored
|
|
@ -12,9 +12,7 @@ class GenericHook(BaseExternal, name="Generic"):
|
|||
def __init__(self, config):
|
||||
"""Initialize external base class with http connection details."""
|
||||
|
||||
super().__init__(
|
||||
base_url=f"{config.host.scheme}://{config.host.host}", config=config
|
||||
)
|
||||
super().__init__(base_url=f"{config.host.scheme}://{config.host.host}", config=config)
|
||||
|
||||
async def send(self, query):
|
||||
"""Send an incoming webhook to http endpoint."""
|
||||
|
|
|
|||
4
hyperglass/external/msteams.py
vendored
4
hyperglass/external/msteams.py
vendored
|
|
@ -12,9 +12,7 @@ class MSTeams(BaseExternal, name="MSTeams"):
|
|||
def __init__(self, config):
|
||||
"""Initialize external base class with Microsoft Teams connection details."""
|
||||
|
||||
super().__init__(
|
||||
base_url="https://outlook.office.com", config=config, parse=False
|
||||
)
|
||||
super().__init__(base_url="https://outlook.office.com", config=config, parse=False)
|
||||
|
||||
async def send(self, query):
|
||||
"""Send an incoming webhook to Microsoft Teams."""
|
||||
|
|
|
|||
4
hyperglass/external/rpki.py
vendored
4
hyperglass/external/rpki.py
vendored
|
|
@ -41,9 +41,7 @@ def rpki_state(prefix, asn):
|
|||
log.error(str(err))
|
||||
state = 3
|
||||
|
||||
msg = "RPKI Validation State for {} via AS{} is {}".format(
|
||||
prefix, asn, RPKI_NAME_MAP[state]
|
||||
)
|
||||
msg = "RPKI Validation State for {} via AS{} is {}".format(prefix, asn, RPKI_NAME_MAP[state])
|
||||
if cached is not None:
|
||||
msg += " [CACHED]"
|
||||
|
||||
|
|
|
|||
3
hyperglass/external/webhooks.py
vendored
3
hyperglass/external/webhooks.py
vendored
|
|
@ -24,6 +24,5 @@ class Webhook(BaseExternal):
|
|||
return provider_class(config)
|
||||
except KeyError:
|
||||
raise UnsupportedError(
|
||||
message="{p} is not yet supported as a webhook target.",
|
||||
p=config.provider.title(),
|
||||
message="{p} is not yet supported as a webhook target.", p=config.provider.title(),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -108,11 +108,7 @@ def enable_file_logging(logger, log_directory, log_format, log_max_size):
|
|||
lf.write(f'\n\n{"".join(log_break)}\n\n')
|
||||
|
||||
logger.add(
|
||||
log_file,
|
||||
format=_FMT,
|
||||
rotation=log_max_size,
|
||||
serialize=structured,
|
||||
enqueue=True,
|
||||
log_file, format=_FMT, rotation=log_max_size, serialize=structured, enqueue=True,
|
||||
)
|
||||
|
||||
logger.debug("Logging to {} enabled", str(log_file))
|
||||
|
|
@ -127,9 +123,7 @@ def enable_syslog_logging(logger, syslog_host, syslog_port):
|
|||
from logging.handlers import SysLogHandler
|
||||
|
||||
logger.add(
|
||||
SysLogHandler(address=(str(syslog_host), syslog_port)),
|
||||
format=_FMT_BASIC,
|
||||
enqueue=True,
|
||||
SysLogHandler(address=(str(syslog_host), syslog_port)), format=_FMT_BASIC, enqueue=True,
|
||||
)
|
||||
logger.debug(
|
||||
"Logging to syslog target {}:{} enabled", str(syslog_host), str(syslog_port),
|
||||
|
|
|
|||
|
|
@ -125,14 +125,12 @@ def register_all_plugins(devices: "Devices") -> None:
|
|||
"""Validate and register configured plugins."""
|
||||
|
||||
for plugin_file in {
|
||||
Path(p)
|
||||
for p in (p for d in devices.objects for c in d.commands for p in c.plugins)
|
||||
Path(p) for p in (p for d in devices.objects for c in d.commands for p in c.plugins)
|
||||
}:
|
||||
failures = register_plugin(plugin_file)
|
||||
for failure in failures:
|
||||
log.warning(
|
||||
"Plugin '{}' is not a valid hyperglass plugin, and was not registered",
|
||||
failure,
|
||||
"Plugin '{}' is not a valid hyperglass plugin, and was not registered", failure,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -26,9 +26,7 @@ from hyperglass.exceptions.private import InputValidationError
|
|||
from ..config.devices import Device
|
||||
from ..commands.generic import Directive
|
||||
|
||||
DIRECTIVE_IDS = [
|
||||
directive.id for device in devices.objects for directive in device.commands
|
||||
]
|
||||
DIRECTIVE_IDS = [directive.id for device in devices.objects for directive in device.commands]
|
||||
|
||||
DIRECTIVE_GROUPS = {
|
||||
group
|
||||
|
|
@ -76,9 +74,7 @@ class Query(BaseModel):
|
|||
"example": "1.1.1.0/24",
|
||||
},
|
||||
}
|
||||
schema_extra = {
|
||||
"x-code-samples": [{"lang": "Python", "source": "print('stuff')"}]
|
||||
}
|
||||
schema_extra = {"x-code-samples": [{"lang": "Python", "source": "print('stuff')"}]}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Initialize the query with a UTC timestamp at initialization time."""
|
||||
|
|
|
|||
|
|
@ -26,20 +26,14 @@ class QueryError(BaseModel):
|
|||
"""Pydantic model configuration."""
|
||||
|
||||
title = "Query Error"
|
||||
description = (
|
||||
"Response received when there is an error executing the requested query."
|
||||
)
|
||||
description = "Response received when there is an error executing the requested query."
|
||||
fields = {
|
||||
"output": {
|
||||
"title": "Output",
|
||||
"description": "Error Details",
|
||||
"example": "192.0.2.1/32 is not allowed.",
|
||||
},
|
||||
"level": {
|
||||
"title": "Level",
|
||||
"description": "Error Severity",
|
||||
"example": "danger",
|
||||
},
|
||||
"level": {"title": "Level", "description": "Error Severity", "example": "danger"},
|
||||
"keywords": {
|
||||
"title": "Keywords",
|
||||
"description": "Relevant keyword values contained in the `output` field, which can be used for formatting.",
|
||||
|
|
@ -189,11 +183,7 @@ class RoutersResponse(BaseModel):
|
|||
description = "Device attributes"
|
||||
schema_extra = {
|
||||
"examples": [
|
||||
{
|
||||
"id": "nyc_router_1",
|
||||
"name": "NYC Router 1",
|
||||
"network": "New York City, NY",
|
||||
}
|
||||
{"id": "nyc_router_1", "name": "NYC Router 1", "network": "New York City, NY"}
|
||||
]
|
||||
}
|
||||
|
||||
|
|
@ -217,11 +207,11 @@ class SupportedQueryResponse(BaseModel):
|
|||
"""Pydantic model configuration."""
|
||||
|
||||
title = "Query Type"
|
||||
description = "If enabled is `true`, the `name` field may be used to specify the query type."
|
||||
description = (
|
||||
"If enabled is `true`, the `name` field may be used to specify the query type."
|
||||
)
|
||||
schema_extra = {
|
||||
"examples": [
|
||||
{"name": "bgp_route", "display_name": "BGP Route", "enable": True}
|
||||
]
|
||||
"examples": [{"name": "bgp_route", "display_name": "BGP Route", "enable": True}]
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -71,9 +71,7 @@ def validate_ip(value, query_type, query_vrf): # noqa: C901
|
|||
|
||||
except ValueError:
|
||||
raise InputInvalid(
|
||||
params.messages.invalid_input,
|
||||
target=value,
|
||||
query_type=query_type_params.display_name,
|
||||
params.messages.invalid_input, target=value, query_type=query_type_params.display_name,
|
||||
)
|
||||
|
||||
# Test the valid IP address to determine if it is:
|
||||
|
|
@ -83,9 +81,7 @@ def validate_ip(value, query_type, query_vrf): # noqa: C901
|
|||
# ...and returns an error if so.
|
||||
if valid_ip.is_reserved or valid_ip.is_unspecified or valid_ip.is_loopback:
|
||||
raise InputInvalid(
|
||||
params.messages.invalid_input,
|
||||
target=value,
|
||||
query_type=query_type_params.display_name,
|
||||
params.messages.invalid_input, target=value, query_type=query_type_params.display_name,
|
||||
)
|
||||
|
||||
ip_version = valid_ip.version
|
||||
|
|
@ -105,9 +101,7 @@ def validate_ip(value, query_type, query_vrf): # noqa: C901
|
|||
pass
|
||||
|
||||
if ace.action == "permit":
|
||||
log.debug(
|
||||
"{t} is allowed by access-list {a}", t=str(valid_ip), a=repr(ace)
|
||||
)
|
||||
log.debug("{t} is allowed by access-list {a}", t=str(valid_ip), a=repr(ace))
|
||||
break
|
||||
elif ace.action == "deny":
|
||||
raise InputNotAllowed(
|
||||
|
|
@ -125,10 +119,7 @@ def validate_ip(value, query_type, query_vrf): # noqa: C901
|
|||
new_ip = valid_ip.network_address
|
||||
|
||||
log.debug(
|
||||
"Converted '{o}' to '{n}' for '{q}' query",
|
||||
o=valid_ip,
|
||||
n=new_ip,
|
||||
q=query_type,
|
||||
"Converted '{o}' to '{n}' for '{q}' query", o=valid_ip, n=new_ip, q=query_type,
|
||||
)
|
||||
|
||||
valid_ip = new_ip
|
||||
|
|
@ -137,11 +128,7 @@ def validate_ip(value, query_type, query_vrf): # noqa: C901
|
|||
# - Query type is bgp_route
|
||||
# - force_cidr option is enabled
|
||||
# - Query target is not a private address/network
|
||||
elif (
|
||||
query_type in ("bgp_route",)
|
||||
and vrf_afi.force_cidr
|
||||
and not valid_ip.is_private
|
||||
):
|
||||
elif query_type in ("bgp_route",) and vrf_afi.force_cidr and not valid_ip.is_private:
|
||||
log.debug("Getting containing prefix for {q}", q=str(valid_ip))
|
||||
|
||||
ip_str = str(valid_ip.network_address)
|
||||
|
|
@ -150,9 +137,7 @@ def validate_ip(value, query_type, query_vrf): # noqa: C901
|
|||
|
||||
if containing_prefix is None:
|
||||
log.error(
|
||||
"Unable to find containing prefix for {}. Got: {}",
|
||||
str(valid_ip),
|
||||
network_info,
|
||||
"Unable to find containing prefix for {}. Got: {}", str(valid_ip), network_info,
|
||||
)
|
||||
raise InputInvalid("{q} does not have a containing prefix", q=ip_str)
|
||||
|
||||
|
|
@ -163,13 +148,9 @@ def validate_ip(value, query_type, query_vrf): # noqa: C901
|
|||
|
||||
except ValueError as err:
|
||||
log.error(
|
||||
"Unable to find containing prefix for {q}. Error: {e}",
|
||||
q=str(valid_ip),
|
||||
e=err,
|
||||
)
|
||||
raise InputInvalid(
|
||||
"{q} does does not have a containing prefix", q=valid_ip
|
||||
"Unable to find containing prefix for {q}. Error: {e}", q=str(valid_ip), e=err,
|
||||
)
|
||||
raise InputInvalid("{q} does does not have a containing prefix", q=valid_ip)
|
||||
|
||||
# For a host query with bgp_route query type and force_cidr
|
||||
# disabled, convert the host query to a single IP address.
|
||||
|
|
|
|||
|
|
@ -24,9 +24,7 @@ class _IPv6(CommandSet):
|
|||
bgp_aspath: StrictStr = 'show bgp ipv6 unicast quote-regexp "{target}"'
|
||||
bgp_route: StrictStr = "show bgp ipv6 unicast {target} | exclude pathid:|Epoch"
|
||||
ping: StrictStr = "ping ipv6 {target} repeat 5 source {source}"
|
||||
traceroute: StrictStr = (
|
||||
"traceroute ipv6 {target} timeout 1 probe 2 source {source}"
|
||||
)
|
||||
traceroute: StrictStr = ("traceroute ipv6 {target} timeout 1 probe 2 source {source}")
|
||||
|
||||
|
||||
class _VPNIPv4(CommandSet):
|
||||
|
|
@ -36,9 +34,7 @@ class _VPNIPv4(CommandSet):
|
|||
bgp_aspath: StrictStr = 'show bgp vpnv4 unicast vrf {vrf} quote-regexp "{target}"'
|
||||
bgp_route: StrictStr = "show bgp vpnv4 unicast vrf {vrf} {target}"
|
||||
ping: StrictStr = "ping vrf {vrf} {target} repeat 5 source {source}"
|
||||
traceroute: StrictStr = (
|
||||
"traceroute vrf {vrf} {target} timeout 1 probe 2 source {source}"
|
||||
)
|
||||
traceroute: StrictStr = ("traceroute vrf {vrf} {target} timeout 1 probe 2 source {source}")
|
||||
|
||||
|
||||
class _VPNIPv6(CommandSet):
|
||||
|
|
@ -48,9 +44,7 @@ class _VPNIPv6(CommandSet):
|
|||
bgp_aspath: StrictStr = 'show bgp vpnv6 unicast vrf {vrf} quote-regexp "{target}"'
|
||||
bgp_route: StrictStr = "show bgp vpnv6 unicast vrf {vrf} {target}"
|
||||
ping: StrictStr = "ping vrf {vrf} {target} repeat 5 source {source}"
|
||||
traceroute: StrictStr = (
|
||||
"traceroute vrf {vrf} {target} timeout 1 probe 2 source {source}"
|
||||
)
|
||||
traceroute: StrictStr = ("traceroute vrf {vrf} {target} timeout 1 probe 2 source {source}")
|
||||
|
||||
|
||||
class CiscoIOSCommands(CommandGroup):
|
||||
|
|
|
|||
|
|
@ -292,8 +292,6 @@ class Directive(HyperglassModel):
|
|||
}
|
||||
|
||||
if self.field.is_select:
|
||||
value["options"] = [
|
||||
o.export_dict() for o in self.field.options if o is not None
|
||||
]
|
||||
value["options"] = [o.export_dict() for o in self.field.options if o is not None]
|
||||
|
||||
return value
|
||||
|
|
|
|||
|
|
@ -92,9 +92,7 @@ class Device(HyperglassModel, extra="allow"):
|
|||
legacy_display_name = values.pop("display_name", None)
|
||||
|
||||
if legacy_display_name is not None:
|
||||
log.warning(
|
||||
"The 'display_name' field is deprecated. Use the 'name' field instead."
|
||||
)
|
||||
log.warning("The 'display_name' field is deprecated. Use the 'name' field instead.")
|
||||
device_id = generate_id(legacy_display_name)
|
||||
display_name = legacy_display_name
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -23,9 +23,7 @@ class EndpointConfig(HyperglassModel):
|
|||
description="Displayed inside each API endpoint section.",
|
||||
)
|
||||
summary: StrictStr = Field(
|
||||
...,
|
||||
title="Endpoint Summary",
|
||||
description="Displayed beside the API endpoint URI.",
|
||||
..., title="Endpoint Summary", description="Displayed beside the API endpoint URI.",
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -41,9 +39,7 @@ class Docs(HyperglassModel):
|
|||
description="OpenAPI UI library to use for the hyperglass API docs. Currently, the options are [Swagger UI](/fixme) and [Redoc](/fixme).",
|
||||
)
|
||||
base_url: HttpUrl = Field(
|
||||
"https://lg.example.net",
|
||||
title="Base URL",
|
||||
description="Base URL used in request samples.",
|
||||
"https://lg.example.net", title="Base URL", description="Base URL used in request samples.",
|
||||
)
|
||||
uri: AnyUri = Field(
|
||||
"/api/docs",
|
||||
|
|
|
|||
|
|
@ -11,9 +11,7 @@ class Network(HyperglassModel):
|
|||
"""Validation Model for per-network/asn config in devices.yaml."""
|
||||
|
||||
name: StrictStr = Field(
|
||||
...,
|
||||
title="Network Name",
|
||||
description="Internal name of the device's primary network.",
|
||||
..., title="Network Name", description="Internal name of the device's primary network.",
|
||||
)
|
||||
display_name: StrictStr = Field(
|
||||
...,
|
||||
|
|
|
|||
|
|
@ -32,9 +32,7 @@ class OpenGraph(HyperglassModel):
|
|||
supported_extensions = (".jpg", ".jpeg", ".png")
|
||||
if value is not None and value.suffix not in supported_extensions:
|
||||
raise ValueError(
|
||||
"OpenGraph image must be one of {e}".format(
|
||||
e=", ".join(supported_extensions)
|
||||
)
|
||||
"OpenGraph image must be one of {e}".format(e=", ".join(supported_extensions))
|
||||
)
|
||||
|
||||
return value
|
||||
|
|
|
|||
|
|
@ -110,9 +110,7 @@ class Params(ParamsPublic, HyperglassModel):
|
|||
description="Allowed CORS hosts. By default, no CORS hosts are allowed.",
|
||||
)
|
||||
netmiko_delay_factor: IntFloat = Field(
|
||||
0.1,
|
||||
title="Netmiko Delay Factor",
|
||||
description="Override the netmiko global delay factor.",
|
||||
0.1, title="Netmiko Delay Factor", description="Override the netmiko global delay factor.",
|
||||
)
|
||||
|
||||
# Sub Level Params
|
||||
|
|
@ -184,9 +182,7 @@ class Params(ParamsPublic, HyperglassModel):
|
|||
|
||||
def content_params(self) -> Dict[str, Any]:
|
||||
"""Export content-specific parameters."""
|
||||
return self.dict(
|
||||
include={"primary_asn", "org_name", "site_title", "site_description"}
|
||||
)
|
||||
return self.dict(include={"primary_asn", "org_name", "site_title", "site_description"})
|
||||
|
||||
def frontend(self) -> Dict[str, Any]:
|
||||
"""Export UI-specific parameters."""
|
||||
|
|
|
|||
|
|
@ -39,9 +39,7 @@ class BgpCommunityPattern(HyperglassModel):
|
|||
"""Pydantic model configuration."""
|
||||
|
||||
title = "Pattern"
|
||||
description = (
|
||||
"Regular expression patterns used to validate BGP Community queries."
|
||||
)
|
||||
description = "Regular expression patterns used to validate BGP Community queries."
|
||||
|
||||
|
||||
class BgpAsPathPattern(HyperglassModel):
|
||||
|
|
@ -67,9 +65,7 @@ class BgpAsPathPattern(HyperglassModel):
|
|||
"""Pydantic model configuration."""
|
||||
|
||||
title = "Pattern"
|
||||
description = (
|
||||
"Regular expression patterns used to validate BGP AS Path queries."
|
||||
)
|
||||
description = "Regular expression patterns used to validate BGP AS Path queries."
|
||||
|
||||
|
||||
class Community(HyperglassModel):
|
||||
|
|
@ -84,9 +80,7 @@ class BgpCommunity(HyperglassModel):
|
|||
"""Validation model for bgp_community configuration."""
|
||||
|
||||
enable: StrictBool = Field(
|
||||
True,
|
||||
title="Enable",
|
||||
description="Enable or disable the BGP Community query type.",
|
||||
True, title="Enable", description="Enable or disable the BGP Community query type.",
|
||||
)
|
||||
display_name: StrictStr = Field(
|
||||
"BGP Community",
|
||||
|
|
@ -115,9 +109,7 @@ class BgpAsPath(HyperglassModel):
|
|||
"""Validation model for bgp_aspath configuration."""
|
||||
|
||||
enable: StrictBool = Field(
|
||||
True,
|
||||
title="Enable",
|
||||
description="Enable or disable the BGP AS Path query type.",
|
||||
True, title="Enable", description="Enable or disable the BGP AS Path query type.",
|
||||
)
|
||||
display_name: StrictStr = Field(
|
||||
"BGP AS Path",
|
||||
|
|
@ -168,9 +160,7 @@ class Queries(HyperglassModel):
|
|||
query_obj = getattr(self, query)
|
||||
_map[query] = {
|
||||
"name": query,
|
||||
**query_obj.export_dict(
|
||||
include={"display_name", "enable", "mode", "communities"}
|
||||
),
|
||||
**query_obj.export_dict(include={"display_name", "enable", "mode", "communities"}),
|
||||
}
|
||||
return _map
|
||||
|
||||
|
|
@ -185,11 +175,7 @@ class Queries(HyperglassModel):
|
|||
for query in SUPPORTED_QUERY_TYPES:
|
||||
query_obj = getattr(self, query)
|
||||
_list.append(
|
||||
{
|
||||
"name": query,
|
||||
"display_name": query_obj.display_name,
|
||||
"enable": query_obj.enable,
|
||||
}
|
||||
{"name": query, "display_name": query_obj.display_name, "enable": query_obj.enable}
|
||||
)
|
||||
return _list
|
||||
|
||||
|
|
|
|||
|
|
@ -79,6 +79,4 @@ class HyperglassModel(BaseModel):
|
|||
"exclude_unset": kwargs.pop("exclude_unset", False),
|
||||
}
|
||||
|
||||
return yaml.safe_dump(
|
||||
json.loads(self.export_json(**export_kwargs)), *args, **kwargs
|
||||
)
|
||||
return yaml.safe_dump(json.loads(self.export_json(**export_kwargs)), *args, **kwargs)
|
||||
|
|
|
|||
|
|
@ -110,9 +110,7 @@ class FRRRoute(_FRRBase):
|
|||
}
|
||||
)
|
||||
|
||||
serialized = ParsedRoutes(
|
||||
vrf=vrf, count=len(routes), routes=routes, winning_weight="high",
|
||||
)
|
||||
serialized = ParsedRoutes(vrf=vrf, count=len(routes), routes=routes, winning_weight="high",)
|
||||
|
||||
log.info("Serialized FRR response: {}", serialized)
|
||||
return serialized
|
||||
|
|
|
|||
|
|
@ -83,9 +83,7 @@ class JuniperRouteTableEntry(_JuniperBase):
|
|||
|
||||
_path_attr = values.get("bgp-path-attributes", {})
|
||||
_path_attr_agg = _path_attr.get("attr-aggregator", {}).get("attr-value", {})
|
||||
values["as-path"] = _path_attr.get("attr-as-path-effective", {}).get(
|
||||
"attr-value", ""
|
||||
)
|
||||
values["as-path"] = _path_attr.get("attr-as-path-effective", {}).get("attr-value", "")
|
||||
values["source-as"] = _path_attr_agg.get("aggr-as-number", 0)
|
||||
values["source-rid"] = _path_attr_agg.get("aggr-router-id", "")
|
||||
values["peer-rid"] = values["peer-id"]
|
||||
|
|
@ -171,9 +169,7 @@ class JuniperRoute(_JuniperBase):
|
|||
count = 0
|
||||
for table in self.rt:
|
||||
count += table.rt_entry_count
|
||||
prefix = "/".join(
|
||||
str(i) for i in (table.rt_destination, table.rt_prefix_length)
|
||||
)
|
||||
prefix = "/".join(str(i) for i in (table.rt_destination, table.rt_prefix_length))
|
||||
for route in table.rt_entry:
|
||||
routes.append(
|
||||
{
|
||||
|
|
@ -193,9 +189,7 @@ class JuniperRoute(_JuniperBase):
|
|||
}
|
||||
)
|
||||
|
||||
serialized = ParsedRoutes(
|
||||
vrf=vrf, count=count, routes=routes, winning_weight="low",
|
||||
)
|
||||
serialized = ParsedRoutes(vrf=vrf, count=count, routes=routes, winning_weight="low",)
|
||||
|
||||
log.debug("Serialized Juniper response: {}", serialized)
|
||||
return serialized
|
||||
|
|
|
|||
|
|
@ -75,8 +75,7 @@ class Webhook(HyperglassModel):
|
|||
return f"`{str(value)}`"
|
||||
|
||||
header_data = [
|
||||
{"name": k, "value": code(v)}
|
||||
for k, v in self.headers.dict(by_alias=True).items()
|
||||
{"name": k, "value": code(v)} for k, v in self.headers.dict(by_alias=True).items()
|
||||
]
|
||||
time_fmt = self.timestamp.strftime("%Y %m %d %H:%M:%S")
|
||||
payload = {
|
||||
|
|
@ -131,39 +130,21 @@ class Webhook(HyperglassModel):
|
|||
header_data.append(field)
|
||||
|
||||
query_data = [
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": make_field("Query Location", self.query_location),
|
||||
},
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": make_field("Query Target", self.query_target, code=True),
|
||||
},
|
||||
{"type": "mrkdwn", "text": make_field("Query Location", self.query_location)},
|
||||
{"type": "mrkdwn", "text": make_field("Query Target", self.query_target, code=True)},
|
||||
{"type": "mrkdwn", "text": make_field("Query Type", self.query_type)},
|
||||
{"type": "mrkdwn", "text": make_field("Query VRF", self.query_vrf)},
|
||||
]
|
||||
|
||||
source_data = [
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": make_field("Source IP", self.source, code=True),
|
||||
},
|
||||
{"type": "mrkdwn", "text": make_field("Source IP", self.source, code=True)},
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": make_field("Source Prefix", self.network.prefix, code=True),
|
||||
},
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": make_field("Source ASN", self.network.asn, code=True),
|
||||
},
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": make_field("Source Country", self.network.country),
|
||||
},
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": make_field("Source Organization", self.network.org),
|
||||
},
|
||||
{"type": "mrkdwn", "text": make_field("Source ASN", self.network.asn, code=True)},
|
||||
{"type": "mrkdwn", "text": make_field("Source Country", self.network.country)},
|
||||
{"type": "mrkdwn", "text": make_field("Source Organization", self.network.org)},
|
||||
]
|
||||
|
||||
time_fmt = self.timestamp.strftime("%Y %m %d %H:%M:%S")
|
||||
|
|
@ -171,20 +152,14 @@ class Webhook(HyperglassModel):
|
|||
payload = {
|
||||
"text": _WEBHOOK_TITLE,
|
||||
"blocks": [
|
||||
{
|
||||
"type": "section",
|
||||
"text": {"type": "mrkdwn", "text": f"*{time_fmt} UTC*"},
|
||||
},
|
||||
{"type": "section", "text": {"type": "mrkdwn", "text": f"*{time_fmt} UTC*"}},
|
||||
{"type": "section", "fields": query_data},
|
||||
{"type": "divider"},
|
||||
{"type": "section", "fields": source_data},
|
||||
{"type": "divider"},
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": "*Headers*\n" + "\n".join(header_data),
|
||||
},
|
||||
"text": {"type": "mrkdwn", "text": "*Headers*\n" + "\n".join(header_data)},
|
||||
},
|
||||
],
|
||||
}
|
||||
|
|
|
|||
|
|
@ -58,9 +58,7 @@ def parse_juniper(output: Sequence) -> Dict: # noqa: C901
|
|||
cleaned = clean_xml_output(response)
|
||||
|
||||
try:
|
||||
parsed = xmltodict.parse(
|
||||
cleaned, force_list=("rt", "rt-entry", "community")
|
||||
)
|
||||
parsed = xmltodict.parse(cleaned, force_list=("rt", "rt-entry", "community"))
|
||||
|
||||
log.debug("Initially Parsed Response: \n{}", parsed)
|
||||
|
||||
|
|
|
|||
|
|
@ -50,9 +50,7 @@ def parse_linux_ping(output):
|
|||
|
||||
_bytes, seq, ttl, rtt = _process_numbers(bytes_seq_ttl_rtt)
|
||||
|
||||
reply_stats.append(
|
||||
{"bytes": _bytes, "sequence": seq, "ttl": ttl, "rtt": rtt}
|
||||
)
|
||||
reply_stats.append({"bytes": _bytes, "sequence": seq, "ttl": ttl, "rtt": rtt})
|
||||
|
||||
stats = [l for l in _stats.splitlines() if l]
|
||||
|
||||
|
|
|
|||
|
|
@ -11,9 +11,7 @@ from threading import Thread
|
|||
from hyperglass.log import log
|
||||
|
||||
|
||||
async def move_files( # noqa: C901
|
||||
src: Path, dst: Path, files: Iterable[Path]
|
||||
) -> Tuple[str]:
|
||||
async def move_files(src: Path, dst: Path, files: Iterable[Path]) -> Tuple[str]: # noqa: C901
|
||||
"""Move iterable of files from source to destination.
|
||||
|
||||
Arguments:
|
||||
|
|
@ -133,9 +131,7 @@ def copyfiles(src_files: Iterable[Path], dst_files: Iterable[Path]):
|
|||
return True
|
||||
|
||||
|
||||
def check_path(
|
||||
path: Union[Path, str], mode: str = "r", create: bool = False
|
||||
) -> Optional[Path]:
|
||||
def check_path(path: Union[Path, str], mode: str = "r", create: bool = False) -> Optional[Path]:
|
||||
"""Verify if a path exists and is accessible."""
|
||||
|
||||
result = None
|
||||
|
|
|
|||
|
|
@ -22,9 +22,7 @@ def get_node_version() -> Tuple[int, int, int]:
|
|||
"""Get the system's NodeJS version."""
|
||||
node_path = shutil.which("node")
|
||||
|
||||
raw_version = subprocess.check_output( # noqa: S603
|
||||
[node_path, "--version"]
|
||||
).decode()
|
||||
raw_version = subprocess.check_output([node_path, "--version"]).decode() # noqa: S603
|
||||
|
||||
# Node returns the version as 'v14.5.0', for example. Remove the v.
|
||||
version = raw_version.replace("v", "")
|
||||
|
|
@ -162,11 +160,7 @@ async def build_ui(app_path):
|
|||
|
||||
|
||||
def generate_opengraph(
|
||||
image_path: Path,
|
||||
max_width: int,
|
||||
max_height: int,
|
||||
target_path: Path,
|
||||
background_color: str,
|
||||
image_path: Path, max_width: int, max_height: int, target_path: Path, background_color: str,
|
||||
):
|
||||
"""Generate an OpenGraph compliant image."""
|
||||
# Third Party
|
||||
|
|
@ -340,9 +334,7 @@ async def build_frontend( # noqa: C901
|
|||
log.debug("Previous Build ID: {}", ef_id)
|
||||
|
||||
if ef_id == build_id:
|
||||
log.debug(
|
||||
"UI parameters unchanged since last build, skipping UI build..."
|
||||
)
|
||||
log.debug("UI parameters unchanged since last build, skipping UI build...")
|
||||
return True
|
||||
|
||||
env_vars["buildId"] = build_id
|
||||
|
|
@ -368,11 +360,7 @@ async def build_frontend( # noqa: C901
|
|||
migrate_images(app_path, params)
|
||||
|
||||
generate_opengraph(
|
||||
params.web.opengraph.image,
|
||||
1200,
|
||||
630,
|
||||
images_dir,
|
||||
params.web.theme.colors.black,
|
||||
params.web.opengraph.image, 1200, 630, images_dir, params.web.theme.colors.black,
|
||||
)
|
||||
|
||||
except Exception as err:
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue