forked from mirrors/thatmattlove-hyperglass
Complete directives implementation, refactor exceptions, deprecate VRFs, bump minimum Python version
This commit is contained in:
parent
b05e544e40
commit
5ccfe50792
58 changed files with 1222 additions and 1010 deletions
2
.flake8
2
.flake8
|
|
@ -3,7 +3,7 @@ max-line-length=88
|
||||||
count=True
|
count=True
|
||||||
show-source=False
|
show-source=False
|
||||||
statistics=True
|
statistics=True
|
||||||
exclude=.git, __pycache__, hyperglass/api/examples/*.py, hyperglass/compat/_sshtunnel.py, test.py
|
exclude=.git, hyperglass/ui, __pycache__, hyperglass/api/examples/*.py, hyperglass/compat/_sshtunnel.py, test.py
|
||||||
filename=*.py
|
filename=*.py
|
||||||
per-file-ignores=
|
per-file-ignores=
|
||||||
hyperglass/main.py:E402
|
hyperglass/main.py:E402
|
||||||
|
|
|
||||||
|
|
@ -263,7 +263,7 @@ app.mount("/", StaticFiles(directory=UI_DIR, html=True), name="ui")
|
||||||
def start(**kwargs):
|
def start(**kwargs):
|
||||||
"""Start the web server with Uvicorn ASGI."""
|
"""Start the web server with Uvicorn ASGI."""
|
||||||
# Third Party
|
# Third Party
|
||||||
import uvicorn
|
import uvicorn # type: ignore
|
||||||
|
|
||||||
try:
|
try:
|
||||||
uvicorn.run("hyperglass.api:app", **ASGI_PARAMS, **kwargs)
|
uvicorn.run("hyperglass.api:app", **ASGI_PARAMS, **kwargs)
|
||||||
|
|
|
||||||
|
|
@ -69,6 +69,7 @@ async def send_webhook(query_data: Query, request: Request, timestamp: datetime)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@log.catch
|
||||||
async def query(query_data: Query, request: Request, background_tasks: BackgroundTasks):
|
async def query(query_data: Query, request: Request, background_tasks: BackgroundTasks):
|
||||||
"""Ingest request data pass it to the backend application to perform the query."""
|
"""Ingest request data pass it to the backend application to perform the query."""
|
||||||
|
|
||||||
|
|
|
||||||
30
hyperglass/cache/aio.py
vendored
30
hyperglass/cache/aio.py
vendored
|
|
@ -8,13 +8,13 @@ import asyncio
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict
|
||||||
|
|
||||||
# Third Party
|
# Third Party
|
||||||
from aredis import StrictRedis as AsyncRedis
|
from aredis import StrictRedis as AsyncRedis # type: ignore
|
||||||
from aredis.pubsub import PubSub as AsyncPubSub
|
from aredis.pubsub import PubSub as AsyncPubSub # type: ignore
|
||||||
from aredis.exceptions import RedisError
|
from aredis.exceptions import RedisError # type: ignore
|
||||||
|
|
||||||
# Project
|
# Project
|
||||||
from hyperglass.cache.base import BaseCache
|
from hyperglass.cache.base import BaseCache
|
||||||
from hyperglass.exceptions import HyperglassError
|
from hyperglass.exceptions.private import DependencyError
|
||||||
|
|
||||||
|
|
||||||
class AsyncCache(BaseCache):
|
class AsyncCache(BaseCache):
|
||||||
|
|
@ -50,19 +50,17 @@ class AsyncCache(BaseCache):
|
||||||
err_msg = str(err.__context__)
|
err_msg = str(err.__context__)
|
||||||
|
|
||||||
if "auth" in err_msg.lower():
|
if "auth" in err_msg.lower():
|
||||||
raise HyperglassError(
|
raise DependencyError(
|
||||||
"Authentication to Redis server {server} failed.".format(
|
"Authentication to Redis server {s} failed with message: '{e}'",
|
||||||
server=repr(self)
|
s=repr(self, e=err_msg),
|
||||||
),
|
)
|
||||||
level="danger",
|
|
||||||
) from None
|
|
||||||
else:
|
else:
|
||||||
raise HyperglassError(
|
raise DependencyError(
|
||||||
"Unable to connect to Redis server {server}".format(
|
"Unable to connect to Redis server {s} due to error {e}",
|
||||||
server=repr(self)
|
s=repr(self),
|
||||||
),
|
e=err_msg,
|
||||||
level="danger",
|
)
|
||||||
) from None
|
|
||||||
|
|
||||||
async def get(self, *args: str) -> Any:
|
async def get(self, *args: str) -> Any:
|
||||||
"""Get item(s) from cache."""
|
"""Get item(s) from cache."""
|
||||||
|
|
|
||||||
23
hyperglass/cache/sync.py
vendored
23
hyperglass/cache/sync.py
vendored
|
|
@ -13,7 +13,7 @@ from redis.exceptions import RedisError
|
||||||
|
|
||||||
# Project
|
# Project
|
||||||
from hyperglass.cache.base import BaseCache
|
from hyperglass.cache.base import BaseCache
|
||||||
from hyperglass.exceptions import HyperglassError
|
from hyperglass.exceptions.private import DependencyError
|
||||||
|
|
||||||
|
|
||||||
class SyncCache(BaseCache):
|
class SyncCache(BaseCache):
|
||||||
|
|
@ -49,19 +49,16 @@ class SyncCache(BaseCache):
|
||||||
err_msg = str(err.__context__)
|
err_msg = str(err.__context__)
|
||||||
|
|
||||||
if "auth" in err_msg.lower():
|
if "auth" in err_msg.lower():
|
||||||
raise HyperglassError(
|
raise DependencyError(
|
||||||
"Authentication to Redis server {server} failed.".format(
|
"Authentication to Redis server {s} failed with message: '{e}'",
|
||||||
server=repr(self)
|
s=repr(self, e=err_msg),
|
||||||
),
|
)
|
||||||
level="danger",
|
|
||||||
) from None
|
|
||||||
else:
|
else:
|
||||||
raise HyperglassError(
|
raise DependencyError(
|
||||||
"Unable to connect to Redis server {server}".format(
|
"Unable to connect to Redis server {s} due to error {e}",
|
||||||
server=repr(self)
|
s=repr(self),
|
||||||
),
|
e=err_msg,
|
||||||
level="danger",
|
)
|
||||||
) from None
|
|
||||||
|
|
||||||
def get(self, *args: str) -> Any:
|
def get(self, *args: str) -> Any:
|
||||||
"""Get item(s) from cache."""
|
"""Get item(s) from cache."""
|
||||||
|
|
|
||||||
|
|
@ -3,11 +3,12 @@
|
||||||
# Standard Library
|
# Standard Library
|
||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
from typing import Dict, List, Sequence, Generator
|
from typing import Dict, List, Generator
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
# Third Party
|
# Third Party
|
||||||
import yaml
|
import yaml
|
||||||
|
from pydantic import ValidationError
|
||||||
|
|
||||||
# Project
|
# Project
|
||||||
from hyperglass.log import (
|
from hyperglass.log import (
|
||||||
|
|
@ -17,18 +18,13 @@ from hyperglass.log import (
|
||||||
enable_syslog_logging,
|
enable_syslog_logging,
|
||||||
)
|
)
|
||||||
from hyperglass.util import set_app_path, set_cache_env, current_log_level
|
from hyperglass.util import set_app_path, set_cache_env, current_log_level
|
||||||
from hyperglass.defaults import CREDIT, DEFAULT_DETAILS
|
from hyperglass.defaults import CREDIT
|
||||||
from hyperglass.constants import (
|
from hyperglass.constants import PARSED_RESPONSE_FIELDS, __version__
|
||||||
SUPPORTED_QUERY_TYPES,
|
|
||||||
PARSED_RESPONSE_FIELDS,
|
|
||||||
__version__,
|
|
||||||
)
|
|
||||||
from hyperglass.exceptions import ConfigError, ConfigMissing
|
|
||||||
from hyperglass.util.files import check_path
|
from hyperglass.util.files import check_path
|
||||||
|
from hyperglass.exceptions.private import ConfigError, ConfigMissing
|
||||||
from hyperglass.models.commands.generic import Directive
|
|
||||||
from hyperglass.models.config.params import Params
|
from hyperglass.models.config.params import Params
|
||||||
from hyperglass.models.config.devices import Devices
|
from hyperglass.models.config.devices import Devices
|
||||||
|
from hyperglass.models.commands.generic import Directive
|
||||||
|
|
||||||
# Local
|
# Local
|
||||||
from .markdown import get_markdown
|
from .markdown import get_markdown
|
||||||
|
|
@ -84,10 +80,9 @@ def _config_required(config_path: Path) -> Dict:
|
||||||
config = yaml.safe_load(cf)
|
config = yaml.safe_load(cf)
|
||||||
|
|
||||||
except (yaml.YAMLError, yaml.MarkedYAMLError) as yaml_error:
|
except (yaml.YAMLError, yaml.MarkedYAMLError) as yaml_error:
|
||||||
raise ConfigError(str(yaml_error))
|
raise ConfigError(message="Error reading YAML file: '{e}'", e=yaml_error)
|
||||||
|
|
||||||
if config is None:
|
if config is None:
|
||||||
log.critical("{} appears to be empty", str(config_path))
|
|
||||||
raise ConfigMissing(missing_item=config_path.name)
|
raise ConfigMissing(missing_item=config_path.name)
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
@ -106,20 +101,25 @@ def _config_optional(config_path: Path) -> Dict:
|
||||||
config = yaml.safe_load(cf) or {}
|
config = yaml.safe_load(cf) or {}
|
||||||
|
|
||||||
except (yaml.YAMLError, yaml.MarkedYAMLError) as yaml_error:
|
except (yaml.YAMLError, yaml.MarkedYAMLError) as yaml_error:
|
||||||
raise ConfigError(error_msg=str(yaml_error))
|
raise ConfigError(message="Error reading YAML file: '{e}'", e=yaml_error)
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
def _get_commands(data: Dict) -> Sequence[Directive]:
|
def _get_commands(data: Dict) -> List[Directive]:
|
||||||
commands = []
|
commands = []
|
||||||
for name, command in data.items():
|
for name, command in data.items():
|
||||||
commands.append(Directive(id=name, **command))
|
try:
|
||||||
|
commands.append(Directive(id=name, **command))
|
||||||
|
except ValidationError as err:
|
||||||
|
raise ConfigError(
|
||||||
|
message="Validation error in command '{c}': '{e}'", c=name, e=err
|
||||||
|
) from err
|
||||||
return commands
|
return commands
|
||||||
|
|
||||||
|
|
||||||
def _device_commands(
|
def _device_commands(
|
||||||
device: Dict, directives: Sequence[Directive]
|
device: Dict, directives: List[Directive]
|
||||||
) -> Generator[Directive, None, None]:
|
) -> Generator[Directive, None, None]:
|
||||||
device_commands = device.get("commands", [])
|
device_commands = device.get("commands", [])
|
||||||
for directive in directives:
|
for directive in directives:
|
||||||
|
|
@ -127,7 +127,7 @@ def _device_commands(
|
||||||
yield directive
|
yield directive
|
||||||
|
|
||||||
|
|
||||||
def _get_devices(data: Sequence[Dict], directives: Sequence[Directive]) -> Devices:
|
def _get_devices(data: List[Dict], directives: List[Directive]) -> Devices:
|
||||||
for device in data:
|
for device in data:
|
||||||
device_commands = list(_device_commands(device, directives))
|
device_commands = list(_device_commands(device, directives))
|
||||||
device["commands"] = device_commands
|
device["commands"] = device_commands
|
||||||
|
|
@ -141,7 +141,7 @@ set_log_level(logger=log, debug=user_config.get("debug", True))
|
||||||
|
|
||||||
# Map imported user configuration to expected schema.
|
# Map imported user configuration to expected schema.
|
||||||
log.debug("Unvalidated configuration from {}: {}", CONFIG_MAIN, user_config)
|
log.debug("Unvalidated configuration from {}: {}", CONFIG_MAIN, user_config)
|
||||||
params = validate_config(config=user_config, importer=Params)
|
params: Params = validate_config(config=user_config, importer=Params)
|
||||||
|
|
||||||
# Re-evaluate debug state after config is validated
|
# Re-evaluate debug state after config is validated
|
||||||
log_level = current_log_level(log)
|
log_level = current_log_level(log)
|
||||||
|
|
@ -159,11 +159,7 @@ commands = _get_commands(_user_commands)
|
||||||
# Map imported user devices to expected schema.
|
# Map imported user devices to expected schema.
|
||||||
_user_devices = _config_required(CONFIG_DEVICES)
|
_user_devices = _config_required(CONFIG_DEVICES)
|
||||||
log.debug("Unvalidated devices from {}: {}", CONFIG_DEVICES, _user_devices)
|
log.debug("Unvalidated devices from {}: {}", CONFIG_DEVICES, _user_devices)
|
||||||
# devices = validate_config(config=_user_devices.get("routers", []), importer=Devices)
|
devices: Devices = _get_devices(_user_devices.get("routers", []), commands)
|
||||||
devices = _get_devices(_user_devices.get("routers", []), commands)
|
|
||||||
|
|
||||||
# Validate commands are both supported and properly mapped.
|
|
||||||
# validate_nos_commands(devices.all_nos, commands)
|
|
||||||
|
|
||||||
# Set cache configurations to environment variables, so they can be
|
# Set cache configurations to environment variables, so they can be
|
||||||
# used without importing this module (Gunicorn, etc).
|
# used without importing this module (Gunicorn, etc).
|
||||||
|
|
@ -223,22 +219,12 @@ def _build_networks() -> List[Dict]:
|
||||||
"name": device.name,
|
"name": device.name,
|
||||||
"network": device.network.display_name,
|
"network": device.network.display_name,
|
||||||
"directives": [c.frontend(params) for c in device.commands],
|
"directives": [c.frontend(params) for c in device.commands],
|
||||||
"vrfs": [
|
|
||||||
{
|
|
||||||
"_id": vrf._id,
|
|
||||||
"display_name": vrf.display_name,
|
|
||||||
"default": vrf.default,
|
|
||||||
"ipv4": True if vrf.ipv4 else False, # noqa: IF100
|
|
||||||
"ipv6": True if vrf.ipv6 else False, # noqa: IF100
|
|
||||||
}
|
|
||||||
for vrf in device.vrfs
|
|
||||||
],
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
networks.append(network_def)
|
networks.append(network_def)
|
||||||
|
|
||||||
if not networks:
|
if not networks:
|
||||||
raise ConfigError(error_msg="Unable to build network to device mapping")
|
raise ConfigError(message="Unable to build network to device mapping")
|
||||||
return networks
|
return networks
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -247,51 +233,12 @@ content_params = json.loads(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _build_vrf_help() -> Dict:
|
|
||||||
"""Build a dict of vrfs as keys, help content as values."""
|
|
||||||
all_help = {}
|
|
||||||
for vrf in devices.vrf_objects:
|
|
||||||
|
|
||||||
vrf_help = {}
|
|
||||||
for command in SUPPORTED_QUERY_TYPES:
|
|
||||||
cmd = getattr(vrf.info, command)
|
|
||||||
if cmd.enable:
|
|
||||||
help_params = {**content_params, **cmd.params.dict()}
|
|
||||||
|
|
||||||
if help_params["title"] is None:
|
|
||||||
command_params = getattr(params.queries, command)
|
|
||||||
help_params[
|
|
||||||
"title"
|
|
||||||
] = f"{vrf.display_name}: {command_params.display_name}"
|
|
||||||
|
|
||||||
md = get_markdown(
|
|
||||||
config_path=cmd,
|
|
||||||
default=DEFAULT_DETAILS[command],
|
|
||||||
params=help_params,
|
|
||||||
)
|
|
||||||
|
|
||||||
vrf_help.update(
|
|
||||||
{
|
|
||||||
command: {
|
|
||||||
"content": md,
|
|
||||||
"enable": cmd.enable,
|
|
||||||
"params": help_params,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
all_help.update({vrf._id: vrf_help})
|
|
||||||
|
|
||||||
return all_help
|
|
||||||
|
|
||||||
|
|
||||||
content_greeting = get_markdown(
|
content_greeting = get_markdown(
|
||||||
config_path=params.web.greeting,
|
config_path=params.web.greeting,
|
||||||
default="",
|
default="",
|
||||||
params={"title": params.web.greeting.title},
|
params={"title": params.web.greeting.title},
|
||||||
)
|
)
|
||||||
|
|
||||||
content_vrf = _build_vrf_help()
|
|
||||||
|
|
||||||
content_credit = CREDIT.format(version=__version__)
|
content_credit = CREDIT.format(version=__version__)
|
||||||
|
|
||||||
|
|
@ -323,11 +270,7 @@ _frontend_params.update(
|
||||||
"queries": {**params.queries.map, "list": params.queries.list},
|
"queries": {**params.queries.map, "list": params.queries.list},
|
||||||
"networks": networks,
|
"networks": networks,
|
||||||
"parsed_data_fields": PARSED_RESPONSE_FIELDS,
|
"parsed_data_fields": PARSED_RESPONSE_FIELDS,
|
||||||
"content": {
|
"content": {"credit": content_credit, "greeting": content_greeting},
|
||||||
"credit": content_credit,
|
|
||||||
"vrf": content_vrf,
|
|
||||||
"greeting": content_greeting,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
frontend_params = _frontend_params
|
frontend_params = _frontend_params
|
||||||
|
|
|
||||||
|
|
@ -9,11 +9,10 @@ from typing import Dict, List, Union, Callable
|
||||||
from pydantic import ValidationError
|
from pydantic import ValidationError
|
||||||
|
|
||||||
# Project
|
# Project
|
||||||
from hyperglass.log import log
|
|
||||||
from hyperglass.models import HyperglassModel
|
from hyperglass.models import HyperglassModel
|
||||||
from hyperglass.constants import TRANSPORT_REST, SUPPORTED_STRUCTURED_OUTPUT
|
from hyperglass.constants import TRANSPORT_REST, SUPPORTED_STRUCTURED_OUTPUT
|
||||||
from hyperglass.exceptions import ConfigError, ConfigInvalid
|
|
||||||
from hyperglass.models.commands import Commands
|
from hyperglass.models.commands import Commands
|
||||||
|
from hyperglass.exceptions.private import ConfigError, ConfigInvalid
|
||||||
|
|
||||||
|
|
||||||
def validate_nos_commands(all_nos: List[str], commands: Commands) -> bool:
|
def validate_nos_commands(all_nos: List[str], commands: Commands) -> bool:
|
||||||
|
|
@ -44,7 +43,6 @@ def validate_config(config: Union[Dict, List], importer: Callable) -> Hyperglass
|
||||||
elif isinstance(config, List):
|
elif isinstance(config, List):
|
||||||
validated = importer(config)
|
validated = importer(config)
|
||||||
except ValidationError as err:
|
except ValidationError as err:
|
||||||
log.error(str(err))
|
raise ConfigInvalid(errors=err.errors()) from None
|
||||||
raise ConfigInvalid(err.errors()) from None
|
|
||||||
|
|
||||||
return validated
|
return validated
|
||||||
|
|
|
||||||
|
|
@ -4,14 +4,14 @@
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
__name__ = "hyperglass"
|
__name__ = "hyperglass"
|
||||||
__version__ = "1.0.4"
|
__version__ = "2.0.0-dev"
|
||||||
__author__ = "Matt Love"
|
__author__ = "Matt Love"
|
||||||
__copyright__ = f"Copyright {datetime.now().year} Matthew Love"
|
__copyright__ = f"Copyright {datetime.now().year} Matthew Love"
|
||||||
__license__ = "BSD 3-Clause Clear License"
|
__license__ = "BSD 3-Clause Clear License"
|
||||||
|
|
||||||
METADATA = (__name__, __version__, __author__, __copyright__, __license__)
|
METADATA = (__name__, __version__, __author__, __copyright__, __license__)
|
||||||
|
|
||||||
MIN_PYTHON_VERSION = (3, 6)
|
MIN_PYTHON_VERSION = (3, 8)
|
||||||
|
|
||||||
MIN_NODE_VERSION = 14
|
MIN_NODE_VERSION = 14
|
||||||
|
|
||||||
|
|
@ -81,7 +81,8 @@ DRIVER_MAP = {
|
||||||
"cisco_xe": "scrapli",
|
"cisco_xe": "scrapli",
|
||||||
"cisco_xr": "scrapli",
|
"cisco_xr": "scrapli",
|
||||||
"cisco_nxos": "scrapli",
|
"cisco_nxos": "scrapli",
|
||||||
"juniper": "scrapli",
|
# TODO: Troubleshoot Juniper with Scrapli, broken after upgrading to 2021.7.30.
|
||||||
|
# "juniper": "scrapli", # noqa: E800
|
||||||
"tnsr": "scrapli",
|
"tnsr": "scrapli",
|
||||||
"frr": "scrapli",
|
"frr": "scrapli",
|
||||||
"frr_legacy": "hyperglass_agent",
|
"frr_legacy": "hyperglass_agent",
|
||||||
|
|
|
||||||
|
|
@ -6,18 +6,12 @@ import datetime
|
||||||
# Third Party
|
# Third Party
|
||||||
import jwt
|
import jwt
|
||||||
|
|
||||||
# Project
|
|
||||||
from hyperglass.exceptions import RestError
|
|
||||||
|
|
||||||
|
|
||||||
async def jwt_decode(payload: str, secret: str) -> str:
|
async def jwt_decode(payload: str, secret: str) -> str:
|
||||||
"""Decode & validate an encoded JSON Web Token (JWT)."""
|
"""Decode & validate an encoded JSON Web Token (JWT)."""
|
||||||
try:
|
decoded = jwt.decode(payload, secret, algorithm="HS256")
|
||||||
decoded = jwt.decode(payload, secret, algorithm="HS256")
|
decoded = decoded["payload"]
|
||||||
decoded = decoded["payload"]
|
return decoded
|
||||||
return decoded
|
|
||||||
except (KeyError, jwt.PyJWTError) as exp:
|
|
||||||
raise RestError(str(exp)) from None
|
|
||||||
|
|
||||||
|
|
||||||
async def jwt_encode(payload: str, secret: str, duration: int) -> str:
|
async def jwt_encode(payload: str, secret: str, duration: int) -> str:
|
||||||
|
|
|
||||||
10
hyperglass/exceptions/__init__.py
Normal file
10
hyperglass/exceptions/__init__.py
Normal file
|
|
@ -0,0 +1,10 @@
|
||||||
|
"""Custom exceptions for hyperglass."""
|
||||||
|
|
||||||
|
# Local
|
||||||
|
from ._common import HyperglassError, PublicHyperglassError, PrivateHyperglassError
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"HyperglassError",
|
||||||
|
"PublicHyperglassError",
|
||||||
|
"PrivateHyperglassError",
|
||||||
|
)
|
||||||
161
hyperglass/exceptions/_common.py
Normal file
161
hyperglass/exceptions/_common.py
Normal file
|
|
@ -0,0 +1,161 @@
|
||||||
|
"""Custom exceptions for hyperglass."""
|
||||||
|
|
||||||
|
# Standard Library
|
||||||
|
import json as _json
|
||||||
|
from typing import Any, Dict, List, Union, Literal, Optional
|
||||||
|
|
||||||
|
# Project
|
||||||
|
from hyperglass.log import log
|
||||||
|
from hyperglass.util import get_fmt_keys
|
||||||
|
from hyperglass.constants import STATUS_CODE_MAP
|
||||||
|
|
||||||
|
ErrorLevel = Literal["danger", "warning"]
|
||||||
|
|
||||||
|
|
||||||
|
class HyperglassError(Exception):
|
||||||
|
"""hyperglass base exception."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
message: str = "",
|
||||||
|
level: ErrorLevel = "warning",
|
||||||
|
keywords: Optional[List[str]] = None,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize the hyperglass base exception class."""
|
||||||
|
self._message = message
|
||||||
|
self._level = level
|
||||||
|
self._keywords = keywords or []
|
||||||
|
if self._level == "warning":
|
||||||
|
log.error(repr(self))
|
||||||
|
elif self._level == "danger":
|
||||||
|
log.critical(repr(self))
|
||||||
|
else:
|
||||||
|
log.info(repr(self))
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
"""Return the instance's error message."""
|
||||||
|
return self._message
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
"""Return the instance's severity & error message in a string."""
|
||||||
|
return f"[{self.level.upper()}] {self._message}"
|
||||||
|
|
||||||
|
def dict(self) -> Dict[str, Union[str, List[str]]]:
|
||||||
|
"""Return the instance's attributes as a dictionary."""
|
||||||
|
return {
|
||||||
|
"message": self._message,
|
||||||
|
"level": self._level,
|
||||||
|
"keywords": self._keywords,
|
||||||
|
}
|
||||||
|
|
||||||
|
def json(self) -> str:
|
||||||
|
"""Return the instance's attributes as a JSON object."""
|
||||||
|
return _json.dumps(self.__dict__())
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _safe_format(template: str, **kwargs: Dict[str, str]) -> str:
|
||||||
|
"""Safely format a string template from keyword arguments."""
|
||||||
|
|
||||||
|
keys = get_fmt_keys(template)
|
||||||
|
for key in keys:
|
||||||
|
if key not in kwargs:
|
||||||
|
kwargs.pop(key)
|
||||||
|
else:
|
||||||
|
kwargs[key] = str(kwargs[key])
|
||||||
|
return template.format(**kwargs)
|
||||||
|
|
||||||
|
def _parse_pydantic_errors(*errors: Dict[str, Any]) -> str:
|
||||||
|
|
||||||
|
errs = ("\n",)
|
||||||
|
|
||||||
|
for err in errors:
|
||||||
|
loc = " → ".join(str(loc) for loc in err["loc"])
|
||||||
|
errs += (f'Field: {loc}\n Error: {err["msg"]}\n',)
|
||||||
|
|
||||||
|
return "\n".join(errs)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def message(self) -> str:
|
||||||
|
"""Return the instance's `message` attribute."""
|
||||||
|
return self._message
|
||||||
|
|
||||||
|
@property
|
||||||
|
def level(self) -> str:
|
||||||
|
"""Return the instance's `level` attribute."""
|
||||||
|
return self._level
|
||||||
|
|
||||||
|
@property
|
||||||
|
def keywords(self) -> List[str]:
|
||||||
|
"""Return the instance's `keywords` attribute."""
|
||||||
|
return self._keywords
|
||||||
|
|
||||||
|
@property
|
||||||
|
def status_code(self) -> int:
|
||||||
|
"""Return HTTP status code based on level level."""
|
||||||
|
return STATUS_CODE_MAP.get(self._level, 500)
|
||||||
|
|
||||||
|
|
||||||
|
class PublicHyperglassError(HyperglassError):
|
||||||
|
"""Base exception class for user-facing errors.
|
||||||
|
|
||||||
|
Error text should be defined in
|
||||||
|
`hyperglass.configuration.params.messages` and associated with the
|
||||||
|
exception class at start time.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_level = "warning"
|
||||||
|
_message_template = "Something went wrong."
|
||||||
|
|
||||||
|
def __init_subclass__(
|
||||||
|
cls, *, template: Optional[str] = None, level: Optional[ErrorLevel] = None
|
||||||
|
) -> None:
|
||||||
|
"""Override error attributes from subclass."""
|
||||||
|
|
||||||
|
if template is not None:
|
||||||
|
cls._message_template = template
|
||||||
|
if level is not None:
|
||||||
|
cls._level = level
|
||||||
|
|
||||||
|
def __init__(self, **kwargs: str) -> None:
|
||||||
|
"""Format error message with keyword arguments."""
|
||||||
|
if "error" in kwargs:
|
||||||
|
error = kwargs.pop("error")
|
||||||
|
error = self._safe_format(error, **kwargs)
|
||||||
|
kwargs["error"] = error
|
||||||
|
self._message = self._safe_format(self._message_template, **kwargs)
|
||||||
|
self._keywords = list(kwargs.values())
|
||||||
|
super().__init__(
|
||||||
|
message=self._message, level=self._level, keywords=self._keywords
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle_error(self, error: Any) -> None:
|
||||||
|
"""Add details to the error template, if provided."""
|
||||||
|
|
||||||
|
if error is not None:
|
||||||
|
self._message_template = self._message_template + " ({error})"
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateHyperglassError(HyperglassError):
|
||||||
|
"""Base exception class for internal system errors.
|
||||||
|
|
||||||
|
Error text is dynamic based on the exception being caught.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_level = "warning"
|
||||||
|
|
||||||
|
def __init_subclass__(cls, *, level: Optional[ErrorLevel] = None) -> None:
|
||||||
|
"""Override error attributes from subclass."""
|
||||||
|
if level is not None:
|
||||||
|
cls._level = level
|
||||||
|
|
||||||
|
def __init__(self, message: str, **kwargs: Any) -> None:
|
||||||
|
"""Format error message with keyword arguments."""
|
||||||
|
if "error" in kwargs:
|
||||||
|
error = kwargs.pop("error")
|
||||||
|
error = self._safe_format(error, **kwargs)
|
||||||
|
kwargs["error"] = error
|
||||||
|
self._message = self._safe_format(message, **kwargs)
|
||||||
|
self._keywords = list(kwargs.values())
|
||||||
|
super().__init__(
|
||||||
|
message=self._message, level=self._level, keywords=self._keywords
|
||||||
|
)
|
||||||
94
hyperglass/exceptions/private.py
Normal file
94
hyperglass/exceptions/private.py
Normal file
|
|
@ -0,0 +1,94 @@
|
||||||
|
"""Internal/private exceptions."""
|
||||||
|
|
||||||
|
# Standard Library
|
||||||
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
|
# Local
|
||||||
|
from ._common import ErrorLevel, PrivateHyperglassError
|
||||||
|
|
||||||
|
|
||||||
|
class ExternalError(PrivateHyperglassError):
|
||||||
|
"""Raised when an error during a connection to an external service occurs."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, message: str, level: ErrorLevel, **kwargs: Dict[str, Any]
|
||||||
|
) -> None:
|
||||||
|
"""Set level according to level argument."""
|
||||||
|
self._level = level
|
||||||
|
super().__init__(message, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class UnsupportedDevice(PrivateHyperglassError):
|
||||||
|
"""Raised when an input NOS is not in the supported NOS list."""
|
||||||
|
|
||||||
|
def __init__(self, nos: str) -> None:
|
||||||
|
"""Show the unsupported NOS and a list of supported drivers."""
|
||||||
|
# Third Party
|
||||||
|
from netmiko.ssh_dispatcher import CLASS_MAPPER # type: ignore
|
||||||
|
|
||||||
|
# Project
|
||||||
|
from hyperglass.constants import DRIVER_MAP
|
||||||
|
|
||||||
|
drivers = ("", *[*DRIVER_MAP.keys(), *CLASS_MAPPER.keys()].sort())
|
||||||
|
driver_list = "\n - ".join(drivers)
|
||||||
|
super().__init__(
|
||||||
|
message=f"'{nos}' is not supported. Must be one of:{driver_list}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class InputValidationError(PrivateHyperglassError):
|
||||||
|
"""Raised when a validation check fails.
|
||||||
|
|
||||||
|
This needs to be separate from `hyperglass.exceptions.public` for
|
||||||
|
circular import reasons.
|
||||||
|
"""
|
||||||
|
|
||||||
|
kwargs: Dict[str, Any]
|
||||||
|
|
||||||
|
def __init__(self, **kwargs: Dict[str, Any]) -> None:
|
||||||
|
"""Set kwargs instance attribute so it can be consumed later.
|
||||||
|
|
||||||
|
`hyperglass.exceptions.public.InputInvalid` will be raised from
|
||||||
|
these kwargs.
|
||||||
|
"""
|
||||||
|
self.kwargs = kwargs
|
||||||
|
super().__init__(message="", **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigInvalid(PrivateHyperglassError):
|
||||||
|
"""Raised when a config item fails type or option validation."""
|
||||||
|
|
||||||
|
def __init__(self, errors: List[Dict[str, Any]]) -> None:
|
||||||
|
"""Parse Pydantic ValidationError."""
|
||||||
|
|
||||||
|
super().__init__(message=self._parse_pydantic_errors(*errors))
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigMissing(PrivateHyperglassError):
|
||||||
|
"""Raised when a required config file or item is missing or undefined."""
|
||||||
|
|
||||||
|
def __init__(self, missing_item: Any) -> None:
|
||||||
|
"""Show the missing configuration item."""
|
||||||
|
super().__init__(
|
||||||
|
(
|
||||||
|
"{item} is missing or undefined and is required to start hyperglass. "
|
||||||
|
"Please consult the installation documentation."
|
||||||
|
),
|
||||||
|
item=missing_item,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigError(PrivateHyperglassError):
|
||||||
|
"""Raised for generic user-config issues."""
|
||||||
|
|
||||||
|
|
||||||
|
class UnsupportedError(PrivateHyperglassError):
|
||||||
|
"""Raised when an unsupported action or request occurs."""
|
||||||
|
|
||||||
|
|
||||||
|
class ParsingError(PrivateHyperglassError):
|
||||||
|
"""Raised when there is a problem parsing a structured response."""
|
||||||
|
|
||||||
|
|
||||||
|
class DependencyError(PrivateHyperglassError):
|
||||||
|
"""Raised when a dependency is missing, not running, or on the wrong version."""
|
||||||
165
hyperglass/exceptions/public.py
Normal file
165
hyperglass/exceptions/public.py
Normal file
|
|
@ -0,0 +1,165 @@
|
||||||
|
"""User-facing/Public exceptions."""
|
||||||
|
|
||||||
|
# Standard Library
|
||||||
|
from typing import Any, Dict, Optional, ForwardRef
|
||||||
|
|
||||||
|
# Project
|
||||||
|
from hyperglass.configuration import params
|
||||||
|
|
||||||
|
# Local
|
||||||
|
from ._common import PublicHyperglassError
|
||||||
|
|
||||||
|
Query = ForwardRef("Query")
|
||||||
|
Device = ForwardRef("Device")
|
||||||
|
|
||||||
|
|
||||||
|
class ScrapeError(
|
||||||
|
PublicHyperglassError, template=params.messages.connection_error, level="danger",
|
||||||
|
):
|
||||||
|
"""Raised when an SSH driver error occurs."""
|
||||||
|
|
||||||
|
def __init__(self, error: BaseException, *, device: Device):
|
||||||
|
"""Initialize parent error."""
|
||||||
|
super().__init__(error=str(error), device=device.name, proxy=device.proxy)
|
||||||
|
|
||||||
|
|
||||||
|
class AuthError(
|
||||||
|
PublicHyperglassError, template=params.messages.authentication_error, level="danger"
|
||||||
|
):
|
||||||
|
"""Raised when authentication to a device fails."""
|
||||||
|
|
||||||
|
def __init__(self, error: BaseException, *, device: Device):
|
||||||
|
"""Initialize parent error."""
|
||||||
|
super().__init__(error=str(error), device=device.name, proxy=device.proxy)
|
||||||
|
|
||||||
|
|
||||||
|
class RestError(
|
||||||
|
PublicHyperglassError, template=params.messages.connection_error, level="danger"
|
||||||
|
):
|
||||||
|
"""Raised upon a rest API client error."""
|
||||||
|
|
||||||
|
def __init__(self, error: BaseException, *, device: Device):
|
||||||
|
"""Initialize parent error."""
|
||||||
|
super().__init__(error=str(error), device=device.name)
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceTimeout(
|
||||||
|
PublicHyperglassError, template=params.messages.request_timeout, level="danger"
|
||||||
|
):
|
||||||
|
"""Raised when the connection to a device times out."""
|
||||||
|
|
||||||
|
def __init__(self, error: BaseException, *, device: Device):
|
||||||
|
"""Initialize parent error."""
|
||||||
|
super().__init__(error=str(error), device=device.name, proxy=device.proxy)
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidQuery(PublicHyperglassError, template=params.messages.invalid_query):
|
||||||
|
"""Raised when input validation fails."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, error: Optional[str] = None, *, query: "Query", **kwargs: Dict[str, Any]
|
||||||
|
) -> None:
|
||||||
|
"""Initialize parent error."""
|
||||||
|
|
||||||
|
kwargs = {
|
||||||
|
"query_type": query.query_type,
|
||||||
|
"target": query.query_target,
|
||||||
|
**kwargs,
|
||||||
|
}
|
||||||
|
if error is not None:
|
||||||
|
self.handle_error(error)
|
||||||
|
kwargs["error"] = str(error)
|
||||||
|
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class NotFound(PublicHyperglassError, template=params.messages.not_found):
|
||||||
|
"""Raised when an object is not found."""
|
||||||
|
|
||||||
|
def __init__(self, type: str, name: str, **kwargs: Dict[str, str]) -> None:
|
||||||
|
"""Initialize parent error."""
|
||||||
|
super().__init__(type=type, name=name, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class QueryLocationNotFound(NotFound):
|
||||||
|
"""Raised when a query location is not found."""
|
||||||
|
|
||||||
|
def __init__(self, location: Any, **kwargs: Dict[str, Any]) -> None:
|
||||||
|
"""Initialize a NotFound error for a query location."""
|
||||||
|
super().__init__(
|
||||||
|
type=params.web.text.query_location, name=str(location), **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class QueryTypeNotFound(NotFound):
|
||||||
|
"""Raised when a query type is not found."""
|
||||||
|
|
||||||
|
def __init__(self, query_type: Any, **kwargs: Dict[str, Any]) -> None:
|
||||||
|
"""Initialize a NotFound error for a query type."""
|
||||||
|
super().__init__(
|
||||||
|
type=params.web.text.query_type, name=str(query_type), **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class QueryGroupNotFound(NotFound):
|
||||||
|
"""Raised when a query group is not found."""
|
||||||
|
|
||||||
|
def __init__(self, group: Any, **kwargs: Dict[str, Any]) -> None:
|
||||||
|
"""Initialize a NotFound error for a query group."""
|
||||||
|
super().__init__(type=params.web.text.query_group, name=str(group), **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class InputInvalid(PublicHyperglassError, template=params.messages.invalid_input):
|
||||||
|
"""Raised when input validation fails."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, error: Optional[Any] = None, *, target: str, **kwargs: Dict[str, Any]
|
||||||
|
) -> None:
|
||||||
|
"""Initialize parent error."""
|
||||||
|
|
||||||
|
kwargs = {"target": target, **kwargs}
|
||||||
|
if error is not None:
|
||||||
|
self.handle_error(error)
|
||||||
|
kwargs["error"] = str(error)
|
||||||
|
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class InputNotAllowed(PublicHyperglassError, template=params.messages.acl_not_allowed):
|
||||||
|
"""Raised when input validation fails due to a configured check."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, error: Optional[str] = None, *, query: Query, **kwargs: Dict[str, Any]
|
||||||
|
) -> None:
|
||||||
|
"""Initialize parent error."""
|
||||||
|
|
||||||
|
kwargs = {
|
||||||
|
"query_type": query.query_type,
|
||||||
|
"target": query.query_target,
|
||||||
|
**kwargs,
|
||||||
|
}
|
||||||
|
if error is not None:
|
||||||
|
self.handle_error(error)
|
||||||
|
kwargs["error"] = str(error)
|
||||||
|
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class ResponseEmpty(PublicHyperglassError, template=params.messages.no_output):
|
||||||
|
"""Raised when hyperglass can connect to the device but the response is empty."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, error: Optional[str] = None, *, query: Query, **kwargs: Dict[str, Any]
|
||||||
|
) -> None:
|
||||||
|
"""Initialize parent error."""
|
||||||
|
|
||||||
|
kwargs = {
|
||||||
|
"query_type": query.query_type,
|
||||||
|
"target": query.query_target,
|
||||||
|
**kwargs,
|
||||||
|
}
|
||||||
|
if error is not None:
|
||||||
|
self.handle_error(error)
|
||||||
|
kwargs["error"] = str(error)
|
||||||
|
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
|
@ -23,7 +23,7 @@ class Connection:
|
||||||
self.query_data = query_data
|
self.query_data = query_data
|
||||||
self.query_type = self.query_data.query_type
|
self.query_type = self.query_data.query_type
|
||||||
self.query_target = self.query_data.query_target
|
self.query_target = self.query_data.query_target
|
||||||
self._query = Construct(device=self.device, query_data=self.query_data)
|
self._query = Construct(device=self.device, query=self.query_data)
|
||||||
self.query = self._query.queries()
|
self.query = self._query.queries()
|
||||||
|
|
||||||
async def parsed_response( # noqa: C901 ("too complex")
|
async def parsed_response( # noqa: C901 ("too complex")
|
||||||
|
|
|
||||||
|
|
@ -8,27 +8,38 @@ hyperglass API modules.
|
||||||
# Standard Library
|
# Standard Library
|
||||||
import re
|
import re
|
||||||
import json as _json
|
import json as _json
|
||||||
from operator import attrgetter
|
|
||||||
|
|
||||||
# Project
|
# Project
|
||||||
from hyperglass.log import log
|
from hyperglass.log import log
|
||||||
|
from hyperglass.util import get_fmt_keys
|
||||||
from hyperglass.constants import TRANSPORT_REST, TARGET_FORMAT_SPACE
|
from hyperglass.constants import TRANSPORT_REST, TARGET_FORMAT_SPACE
|
||||||
from hyperglass.configuration import commands
|
from hyperglass.models.api.query import Query
|
||||||
|
from hyperglass.exceptions.public import InputInvalid
|
||||||
|
from hyperglass.exceptions.private import ConfigError
|
||||||
|
from hyperglass.models.config.devices import Device
|
||||||
|
from hyperglass.models.commands.generic import Directive
|
||||||
|
|
||||||
|
|
||||||
class Construct:
|
class Construct:
|
||||||
"""Construct SSH commands/REST API parameters from validated query data."""
|
"""Construct SSH commands/REST API parameters from validated query data."""
|
||||||
|
|
||||||
def __init__(self, device, query_data):
|
directive: Directive
|
||||||
|
device: Device
|
||||||
|
query: Query
|
||||||
|
transport: str
|
||||||
|
target: str
|
||||||
|
|
||||||
|
def __init__(self, device, query):
|
||||||
"""Initialize command construction."""
|
"""Initialize command construction."""
|
||||||
log.debug(
|
log.debug(
|
||||||
"Constructing {} query for '{}'",
|
"Constructing '{}' query for '{}'",
|
||||||
query_data.query_type,
|
query.query_type,
|
||||||
str(query_data.query_target),
|
str(query.query_target),
|
||||||
)
|
)
|
||||||
|
self.query = query
|
||||||
self.device = device
|
self.device = device
|
||||||
self.query_data = query_data
|
self.target = self.query.query_target
|
||||||
self.target = self.query_data.query_target
|
self.directive = query.directive
|
||||||
|
|
||||||
# Set transport method based on NOS type
|
# Set transport method based on NOS type
|
||||||
self.transport = "scrape"
|
self.transport = "scrape"
|
||||||
|
|
@ -37,76 +48,55 @@ class Construct:
|
||||||
|
|
||||||
# Remove slashes from target for required platforms
|
# Remove slashes from target for required platforms
|
||||||
if self.device.nos in TARGET_FORMAT_SPACE:
|
if self.device.nos in TARGET_FORMAT_SPACE:
|
||||||
self.target = re.sub(r"\/", r" ", str(self.query_data.query_target))
|
self.target = re.sub(r"\/", r" ", str(self.query.query_target))
|
||||||
|
|
||||||
# Set AFIs for based on query type
|
with Formatter(self.device.nos, self.query.query_type) as formatter:
|
||||||
if self.query_data.query_type in ("bgp_route", "ping", "traceroute"):
|
self.target = formatter(self.query.query_target)
|
||||||
# For IP queries, AFIs are enabled (not null/None) VRF -> AFI definitions
|
|
||||||
# where the IP version matches the IP version of the target.
|
|
||||||
self.afis = [
|
|
||||||
v
|
|
||||||
for v in (
|
|
||||||
self.query_data.query_vrf.ipv4,
|
|
||||||
self.query_data.query_vrf.ipv6,
|
|
||||||
)
|
|
||||||
if v is not None and self.query_data.query_target.version == v.version
|
|
||||||
]
|
|
||||||
elif self.query_data.query_type in ("bgp_aspath", "bgp_community"):
|
|
||||||
# For AS Path/Community queries, AFIs are just enabled VRF -> AFI
|
|
||||||
# definitions, no IP version checking is performed (since there is no IP).
|
|
||||||
self.afis = [
|
|
||||||
v
|
|
||||||
for v in (
|
|
||||||
self.query_data.query_vrf.ipv4,
|
|
||||||
self.query_data.query_vrf.ipv6,
|
|
||||||
)
|
|
||||||
if v is not None
|
|
||||||
]
|
|
||||||
|
|
||||||
with Formatter(self.device.nos, self.query_data.query_type) as formatter:
|
|
||||||
self.target = formatter(self.query_data.query_target)
|
|
||||||
|
|
||||||
def json(self, afi):
|
def json(self, afi):
|
||||||
"""Return JSON version of validated query for REST devices."""
|
"""Return JSON version of validated query for REST devices."""
|
||||||
log.debug("Building JSON query for {q}", q=repr(self.query_data))
|
log.debug("Building JSON query for {q}", q=repr(self.query))
|
||||||
return _json.dumps(
|
return _json.dumps(
|
||||||
{
|
{
|
||||||
"query_type": self.query_data.query_type,
|
"query_type": self.query.query_type,
|
||||||
"vrf": self.query_data.query_vrf.name,
|
"vrf": self.query.query_vrf.name,
|
||||||
"afi": afi.protocol,
|
"afi": afi.protocol,
|
||||||
"source": str(afi.source_address),
|
"source": str(afi.source_address),
|
||||||
"target": str(self.target),
|
"target": str(self.target),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
def scrape(self, afi):
|
def format(self, command: str) -> str:
|
||||||
"""Return formatted command for 'Scrape' endpoints (SSH)."""
|
"""Return formatted command for 'Scrape' endpoints (SSH)."""
|
||||||
if self.device.structured_output:
|
keys = get_fmt_keys(command)
|
||||||
cmd_paths = (
|
attrs = {k: v for k, v in self.device.attrs.items() if k in keys}
|
||||||
self.device.nos,
|
for key in [k for k in keys if k != "target"]:
|
||||||
"structured",
|
if key not in attrs:
|
||||||
afi.protocol,
|
raise ConfigError(
|
||||||
self.query_data.query_type,
|
(
|
||||||
)
|
"Command '{c}' has attribute '{k}', "
|
||||||
else:
|
"which is missing from device '{d}'"
|
||||||
cmd_paths = (self.device.commands, afi.protocol, self.query_data.query_type)
|
),
|
||||||
|
level="danger",
|
||||||
command = attrgetter(".".join(cmd_paths))(commands)
|
c=self.directive.name,
|
||||||
return command.format(
|
k=key,
|
||||||
target=self.target,
|
d=self.device.name,
|
||||||
source=str(afi.source_address),
|
)
|
||||||
vrf=self.query_data.query_vrf.name,
|
return command.format(target=self.target, **attrs)
|
||||||
)
|
|
||||||
|
|
||||||
def queries(self):
|
def queries(self):
|
||||||
"""Return queries for each enabled AFI."""
|
"""Return queries for each enabled AFI."""
|
||||||
query = []
|
query = []
|
||||||
|
|
||||||
for afi in self.afis:
|
rules = [r for r in self.directive.rules if r._passed is True]
|
||||||
if self.transport == "rest":
|
if len(rules) < 1:
|
||||||
query.append(self.json(afi=afi))
|
raise InputInvalid(
|
||||||
else:
|
error="No validation rules matched target '{target}'", query=self.query
|
||||||
query.append(self.scrape(afi=afi))
|
)
|
||||||
|
|
||||||
|
for rule in [r for r in self.directive.rules if r._passed is True]:
|
||||||
|
for command in rule.commands:
|
||||||
|
query.append(self.format(command))
|
||||||
|
|
||||||
log.debug("Constructed query: {}", query)
|
log.debug("Constructed query: {}", query)
|
||||||
return query
|
return query
|
||||||
|
|
|
||||||
|
|
@ -17,8 +17,8 @@ import httpx
|
||||||
from hyperglass.log import log
|
from hyperglass.log import log
|
||||||
from hyperglass.util import parse_exception
|
from hyperglass.util import parse_exception
|
||||||
from hyperglass.encode import jwt_decode, jwt_encode
|
from hyperglass.encode import jwt_decode, jwt_encode
|
||||||
from hyperglass.exceptions import RestError, ResponseEmpty
|
|
||||||
from hyperglass.configuration import params
|
from hyperglass.configuration import params
|
||||||
|
from hyperglass.exceptions.public import RestError, ResponseEmpty
|
||||||
|
|
||||||
# Local
|
# Local
|
||||||
from ._common import Connection
|
from ._common import Connection
|
||||||
|
|
@ -89,51 +89,29 @@ class AgentConnection(Connection):
|
||||||
responses += (decoded,)
|
responses += (decoded,)
|
||||||
|
|
||||||
elif raw_response.status_code == 204:
|
elif raw_response.status_code == 204:
|
||||||
raise ResponseEmpty(
|
raise ResponseEmpty(query=self.query_data)
|
||||||
params.messages.no_output, device_name=self.device.name,
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
log.error(raw_response.text)
|
log.error(raw_response.text)
|
||||||
|
|
||||||
except httpx.exceptions.HTTPError as rest_error:
|
except httpx.exceptions.HTTPError as rest_error:
|
||||||
msg = parse_exception(rest_error)
|
msg = parse_exception(rest_error)
|
||||||
log.error("Error connecting to device {}: {}", self.device.name, msg)
|
raise RestError(error=httpx.exceptions.HTTPError(msg), device=self.device)
|
||||||
raise RestError(
|
|
||||||
params.messages.connection_error,
|
|
||||||
device_name=self.device.name,
|
|
||||||
error=msg,
|
|
||||||
)
|
|
||||||
except OSError as ose:
|
except OSError as ose:
|
||||||
log.critical(str(ose))
|
raise RestError(error=ose, device=self.device)
|
||||||
raise RestError(
|
|
||||||
params.messages.connection_error,
|
|
||||||
device_name=self.device.name,
|
|
||||||
error="System error",
|
|
||||||
)
|
|
||||||
except CertificateError as cert_error:
|
except CertificateError as cert_error:
|
||||||
log.critical(str(cert_error))
|
|
||||||
msg = parse_exception(cert_error)
|
msg = parse_exception(cert_error)
|
||||||
raise RestError(
|
raise RestError(error=CertificateError(cert_error), device=self.device)
|
||||||
params.messages.connection_error,
|
|
||||||
device_name=self.device.name,
|
|
||||||
error=f"{msg}: {cert_error}",
|
|
||||||
)
|
|
||||||
|
|
||||||
if raw_response.status_code != 200:
|
if raw_response.status_code != 200:
|
||||||
log.error("Response code is {}", raw_response.status_code)
|
|
||||||
raise RestError(
|
raise RestError(
|
||||||
params.messages.connection_error,
|
error=ConnectionError(f"Response code {raw_response.status_code}"),
|
||||||
device_name=self.device.name,
|
device=self.device,
|
||||||
error=params.messages.general,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if not responses:
|
if not responses:
|
||||||
log.error("No response from device {}", self.device.name)
|
raise ResponseEmpty(query=self.query_data)
|
||||||
raise RestError(
|
|
||||||
params.messages.connection_error,
|
|
||||||
device_name=self.device.name,
|
|
||||||
error=params.messages.no_response,
|
|
||||||
)
|
|
||||||
|
|
||||||
return responses
|
return responses
|
||||||
|
|
|
||||||
|
|
@ -5,9 +5,9 @@ from typing import Callable
|
||||||
|
|
||||||
# Project
|
# Project
|
||||||
from hyperglass.log import log
|
from hyperglass.log import log
|
||||||
from hyperglass.exceptions import ScrapeError
|
|
||||||
from hyperglass.configuration import params
|
from hyperglass.configuration import params
|
||||||
from hyperglass.compat._sshtunnel import BaseSSHTunnelForwarderError, open_tunnel
|
from hyperglass.compat._sshtunnel import BaseSSHTunnelForwarderError, open_tunnel
|
||||||
|
from hyperglass.exceptions.public import ScrapeError
|
||||||
|
|
||||||
# Local
|
# Local
|
||||||
from ._common import Connection
|
from ._common import Connection
|
||||||
|
|
@ -52,11 +52,6 @@ class SSHConnection(Connection):
|
||||||
f"Error connecting to device {self.device.name} via "
|
f"Error connecting to device {self.device.name} via "
|
||||||
f"proxy {proxy.name}"
|
f"proxy {proxy.name}"
|
||||||
)
|
)
|
||||||
raise ScrapeError(
|
raise ScrapeError(error=scrape_proxy_error, device=self.device)
|
||||||
params.messages.connection_error,
|
|
||||||
device_name=self.device.name,
|
|
||||||
proxy=proxy.name,
|
|
||||||
error=str(scrape_proxy_error),
|
|
||||||
)
|
|
||||||
|
|
||||||
return opener
|
return opener
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ import math
|
||||||
from typing import Iterable
|
from typing import Iterable
|
||||||
|
|
||||||
# Third Party
|
# Third Party
|
||||||
from netmiko import (
|
from netmiko import ( # type: ignore
|
||||||
ConnectHandler,
|
ConnectHandler,
|
||||||
NetMikoTimeoutException,
|
NetMikoTimeoutException,
|
||||||
NetMikoAuthenticationException,
|
NetMikoAuthenticationException,
|
||||||
|
|
@ -16,8 +16,8 @@ from netmiko import (
|
||||||
|
|
||||||
# Project
|
# Project
|
||||||
from hyperglass.log import log
|
from hyperglass.log import log
|
||||||
from hyperglass.exceptions import AuthError, ScrapeError, DeviceTimeout
|
|
||||||
from hyperglass.configuration import params
|
from hyperglass.configuration import params
|
||||||
|
from hyperglass.exceptions.public import AuthError, DeviceTimeout, ResponseEmpty
|
||||||
|
|
||||||
# Local
|
# Local
|
||||||
from .ssh import SSHConnection
|
from .ssh import SSHConnection
|
||||||
|
|
@ -105,32 +105,12 @@ class NetmikoConnection(SSHConnection):
|
||||||
nm_connect_direct.disconnect()
|
nm_connect_direct.disconnect()
|
||||||
|
|
||||||
except NetMikoTimeoutException as scrape_error:
|
except NetMikoTimeoutException as scrape_error:
|
||||||
log.error(str(scrape_error))
|
raise DeviceTimeout(error=scrape_error, device=self.device)
|
||||||
raise DeviceTimeout(
|
|
||||||
params.messages.connection_error,
|
except NetMikoAuthenticationException as auth_error:
|
||||||
device_name=self.device.name,
|
raise AuthError(error=auth_error, device=self.device)
|
||||||
proxy=None,
|
|
||||||
error=params.messages.request_timeout,
|
|
||||||
)
|
|
||||||
except NetMikoAuthenticationException as auth_error:
|
|
||||||
log.error(
|
|
||||||
"Error authenticating to device {loc}: {e}",
|
|
||||||
loc=self.device.name,
|
|
||||||
e=str(auth_error),
|
|
||||||
)
|
|
||||||
|
|
||||||
raise AuthError(
|
|
||||||
params.messages.connection_error,
|
|
||||||
device_name=self.device.name,
|
|
||||||
proxy=None,
|
|
||||||
error=params.messages.authentication_error,
|
|
||||||
)
|
|
||||||
if not responses:
|
if not responses:
|
||||||
raise ScrapeError(
|
raise ResponseEmpty(query=self.query_data)
|
||||||
params.messages.connection_error,
|
|
||||||
device_name=self.device.name,
|
|
||||||
proxy=None,
|
|
||||||
error=params.messages.no_response,
|
|
||||||
)
|
|
||||||
|
|
||||||
return responses
|
return responses
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ https://github.com/carlmontanari/scrapli
|
||||||
|
|
||||||
# Standard Library
|
# Standard Library
|
||||||
import math
|
import math
|
||||||
from typing import Sequence
|
from typing import Tuple
|
||||||
|
|
||||||
# Third Party
|
# Third Party
|
||||||
from scrapli.driver import AsyncGenericDriver
|
from scrapli.driver import AsyncGenericDriver
|
||||||
|
|
@ -24,13 +24,14 @@ from scrapli.driver.core import (
|
||||||
|
|
||||||
# Project
|
# Project
|
||||||
from hyperglass.log import log
|
from hyperglass.log import log
|
||||||
from hyperglass.exceptions import (
|
from hyperglass.configuration import params
|
||||||
|
from hyperglass.exceptions.public import (
|
||||||
AuthError,
|
AuthError,
|
||||||
ScrapeError,
|
ScrapeError,
|
||||||
DeviceTimeout,
|
DeviceTimeout,
|
||||||
UnsupportedDevice,
|
ResponseEmpty,
|
||||||
)
|
)
|
||||||
from hyperglass.configuration import params
|
from hyperglass.exceptions.private import UnsupportedDevice
|
||||||
|
|
||||||
# Local
|
# Local
|
||||||
from .ssh import SSHConnection
|
from .ssh import SSHConnection
|
||||||
|
|
@ -64,7 +65,7 @@ def _map_driver(nos: str) -> AsyncGenericDriver:
|
||||||
class ScrapliConnection(SSHConnection):
|
class ScrapliConnection(SSHConnection):
|
||||||
"""Handle a device connection via Scrapli."""
|
"""Handle a device connection via Scrapli."""
|
||||||
|
|
||||||
async def collect(self, host: str = None, port: int = None) -> Sequence:
|
async def collect(self, host: str = None, port: int = None) -> Tuple[str, ...]:
|
||||||
"""Connect directly to a device.
|
"""Connect directly to a device.
|
||||||
|
|
||||||
Directly connects to the router via Netmiko library, returns the
|
Directly connects to the router via Netmiko library, returns the
|
||||||
|
|
@ -124,37 +125,15 @@ class ScrapliConnection(SSHConnection):
|
||||||
log.debug(f'Raw response for command "{query}":\n{raw.result}')
|
log.debug(f'Raw response for command "{query}":\n{raw.result}')
|
||||||
|
|
||||||
except ScrapliTimeout as err:
|
except ScrapliTimeout as err:
|
||||||
log.error(err)
|
raise DeviceTimeout(error=err, device=self.device)
|
||||||
raise DeviceTimeout(
|
|
||||||
params.messages.connection_error,
|
except ScrapliAuthenticationFailed as err:
|
||||||
device_name=self.device.name,
|
raise AuthError(error=err, device=self.device)
|
||||||
error=params.messages.request_timeout,
|
|
||||||
)
|
|
||||||
except ScrapliAuthenticationFailed as err:
|
|
||||||
log.error(
|
|
||||||
"Error authenticating to device {loc}: {e}",
|
|
||||||
loc=self.device.name,
|
|
||||||
e=str(err),
|
|
||||||
)
|
|
||||||
|
|
||||||
raise AuthError(
|
|
||||||
params.messages.connection_error,
|
|
||||||
device_name=self.device.name,
|
|
||||||
error=params.messages.authentication_error,
|
|
||||||
)
|
|
||||||
except ScrapliException as err:
|
except ScrapliException as err:
|
||||||
log.error(err)
|
raise ScrapeError(error=err, device=self.device)
|
||||||
raise ScrapeError(
|
|
||||||
params.messages.connection_error,
|
|
||||||
device_name=self.device.name,
|
|
||||||
error=params.messages.no_response,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not responses:
|
if not responses:
|
||||||
raise ScrapeError(
|
raise ResponseEmpty(query=self.query_data)
|
||||||
params.messages.connection_error,
|
|
||||||
device_name=self.device.name,
|
|
||||||
error=params.messages.no_response,
|
|
||||||
)
|
|
||||||
|
|
||||||
return responses
|
return responses
|
||||||
|
|
|
||||||
|
|
@ -12,9 +12,9 @@ from typing import Any, Dict, Union, Callable, Sequence
|
||||||
|
|
||||||
# Project
|
# Project
|
||||||
from hyperglass.log import log
|
from hyperglass.log import log
|
||||||
from hyperglass.exceptions import DeviceTimeout, ResponseEmpty
|
|
||||||
from hyperglass.models.api import Query
|
from hyperglass.models.api import Query
|
||||||
from hyperglass.configuration import params
|
from hyperglass.configuration import params
|
||||||
|
from hyperglass.exceptions.public import DeviceTimeout, ResponseEmpty
|
||||||
|
|
||||||
# Local
|
# Local
|
||||||
from .drivers import Connection, AgentConnection, NetmikoConnection, ScrapliConnection
|
from .drivers import Connection, AgentConnection, NetmikoConnection, ScrapliConnection
|
||||||
|
|
@ -52,16 +52,9 @@ async def execute(query: Query) -> Union[str, Sequence[Dict]]:
|
||||||
mapped_driver = map_driver(query.device.driver)
|
mapped_driver = map_driver(query.device.driver)
|
||||||
driver = mapped_driver(query.device, query)
|
driver = mapped_driver(query.device, query)
|
||||||
|
|
||||||
timeout_args = {
|
signal.signal(
|
||||||
"unformatted_msg": params.messages.connection_error,
|
signal.SIGALRM, handle_timeout(error=TimeoutError(), device=query.device)
|
||||||
"device_name": query.device.name,
|
)
|
||||||
"error": params.messages.request_timeout,
|
|
||||||
}
|
|
||||||
|
|
||||||
if query.device.proxy:
|
|
||||||
timeout_args["proxy"] = query.device.proxy.name
|
|
||||||
|
|
||||||
signal.signal(signal.SIGALRM, handle_timeout(**timeout_args))
|
|
||||||
signal.alarm(params.request_timeout - 1)
|
signal.alarm(params.request_timeout - 1)
|
||||||
|
|
||||||
if query.device.proxy:
|
if query.device.proxy:
|
||||||
|
|
@ -79,16 +72,13 @@ async def execute(query: Query) -> Union[str, Sequence[Dict]]:
|
||||||
# If the output is a string (not structured) and is empty,
|
# If the output is a string (not structured) and is empty,
|
||||||
# produce an error.
|
# produce an error.
|
||||||
if output == "" or output == "\n":
|
if output == "" or output == "\n":
|
||||||
raise ResponseEmpty(
|
raise ResponseEmpty(query=query)
|
||||||
params.messages.no_output, device_name=query.device.name
|
|
||||||
)
|
|
||||||
elif isinstance(output, Dict):
|
elif isinstance(output, Dict):
|
||||||
# If the output an empty dict, responses have data, produce an
|
# If the output an empty dict, responses have data, produce an
|
||||||
# error.
|
# error.
|
||||||
if not output:
|
if not output:
|
||||||
raise ResponseEmpty(
|
raise ResponseEmpty(query=query)
|
||||||
params.messages.no_output, device_name=query.device.name
|
|
||||||
)
|
|
||||||
|
|
||||||
log.debug("Output for query: {}:\n{}", query.json(), repr(output))
|
log.debug("Output for query: {}:\n{}", query.json(), repr(output))
|
||||||
signal.alarm(0)
|
signal.alarm(0)
|
||||||
|
|
|
||||||
4
hyperglass/external/_base.py
vendored
4
hyperglass/external/_base.py
vendored
|
|
@ -15,7 +15,7 @@ from httpx import StatusCode
|
||||||
from hyperglass.log import log
|
from hyperglass.log import log
|
||||||
from hyperglass.util import make_repr, parse_exception
|
from hyperglass.util import make_repr, parse_exception
|
||||||
from hyperglass.constants import __version__
|
from hyperglass.constants import __version__
|
||||||
from hyperglass.exceptions import HyperglassError
|
from hyperglass.exceptions.private import ExternalError
|
||||||
|
|
||||||
|
|
||||||
def _prepare_dict(_dict):
|
def _prepare_dict(_dict):
|
||||||
|
|
@ -101,7 +101,7 @@ class BaseExternal:
|
||||||
if exc is not None:
|
if exc is not None:
|
||||||
message = f"{str(message)}: {str(exc)}"
|
message = f"{str(message)}: {str(exc)}"
|
||||||
|
|
||||||
return HyperglassError(message, str(level), **kwargs)
|
return ExternalError(message=message, level=level, **kwargs)
|
||||||
|
|
||||||
def _parse_response(self, response):
|
def _parse_response(self, response):
|
||||||
if self.parse:
|
if self.parse:
|
||||||
|
|
|
||||||
7
hyperglass/external/webhooks.py
vendored
7
hyperglass/external/webhooks.py
vendored
|
|
@ -1,11 +1,11 @@
|
||||||
"""Convenience functions for webhooks."""
|
"""Convenience functions for webhooks."""
|
||||||
|
|
||||||
# Project
|
# Project
|
||||||
from hyperglass.exceptions import HyperglassError
|
|
||||||
from hyperglass.external._base import BaseExternal
|
from hyperglass.external._base import BaseExternal
|
||||||
from hyperglass.external.slack import SlackHook
|
from hyperglass.external.slack import SlackHook
|
||||||
from hyperglass.external.generic import GenericHook
|
from hyperglass.external.generic import GenericHook
|
||||||
from hyperglass.external.msteams import MSTeams
|
from hyperglass.external.msteams import MSTeams
|
||||||
|
from hyperglass.exceptions.private import UnsupportedError
|
||||||
|
|
||||||
PROVIDER_MAP = {
|
PROVIDER_MAP = {
|
||||||
"generic": GenericHook,
|
"generic": GenericHook,
|
||||||
|
|
@ -23,6 +23,7 @@ class Webhook(BaseExternal):
|
||||||
provider_class = PROVIDER_MAP[config.provider]
|
provider_class = PROVIDER_MAP[config.provider]
|
||||||
return provider_class(config)
|
return provider_class(config)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise HyperglassError(
|
raise UnsupportedError(
|
||||||
f"'{config.provider.title()}' is not yet supported as a webhook target."
|
message="{p} is not yet supported as a webhook target.",
|
||||||
|
p=config.provider.title(),
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -8,9 +8,9 @@ import logging
|
||||||
import platform
|
import platform
|
||||||
|
|
||||||
# Third Party
|
# Third Party
|
||||||
from gunicorn.arbiter import Arbiter
|
from gunicorn.arbiter import Arbiter # type: ignore
|
||||||
from gunicorn.app.base import BaseApplication
|
from gunicorn.app.base import BaseApplication # type: ignore
|
||||||
from gunicorn.glogging import Logger
|
from gunicorn.glogging import Logger # type: ignore
|
||||||
|
|
||||||
# Local
|
# Local
|
||||||
from .log import log, setup_lib_logging
|
from .log import log, setup_lib_logging
|
||||||
|
|
|
||||||
|
|
@ -4,68 +4,56 @@
|
||||||
import json
|
import json
|
||||||
import hashlib
|
import hashlib
|
||||||
import secrets
|
import secrets
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
# Third Party
|
# Third Party
|
||||||
from pydantic import BaseModel, StrictStr, constr, validator
|
from pydantic import BaseModel, StrictStr, constr, validator
|
||||||
|
|
||||||
# Project
|
# Project
|
||||||
from hyperglass.exceptions import InputInvalid
|
from hyperglass.log import log
|
||||||
|
from hyperglass.util import snake_to_camel
|
||||||
from hyperglass.configuration import params, devices
|
from hyperglass.configuration import params, devices
|
||||||
|
from hyperglass.exceptions.public import (
|
||||||
|
InputInvalid,
|
||||||
|
QueryTypeNotFound,
|
||||||
|
QueryGroupNotFound,
|
||||||
|
QueryLocationNotFound,
|
||||||
|
)
|
||||||
|
from hyperglass.exceptions.private import InputValidationError
|
||||||
|
|
||||||
# Local
|
# Local
|
||||||
from .types import SupportedQuery
|
from ..config.devices import Device
|
||||||
from .validators import (
|
|
||||||
validate_ip,
|
|
||||||
validate_aspath,
|
|
||||||
validate_community_input,
|
|
||||||
validate_community_select,
|
|
||||||
)
|
|
||||||
from ..config.vrf import Vrf
|
|
||||||
from ..commands.generic import Directive
|
from ..commands.generic import Directive
|
||||||
|
|
||||||
|
DIRECTIVE_IDS = [
|
||||||
|
directive.id for device in devices.objects for directive in device.commands
|
||||||
|
]
|
||||||
|
|
||||||
def get_vrf_object(vrf_name: str) -> Vrf:
|
DIRECTIVE_GROUPS = {
|
||||||
"""Match VRF object from VRF name."""
|
group
|
||||||
|
for device in devices.objects
|
||||||
for vrf_obj in devices.vrf_objects:
|
for directive in device.commands
|
||||||
if vrf_name is not None:
|
for group in directive.groups
|
||||||
if vrf_name == vrf_obj._id or vrf_name == vrf_obj.display_name:
|
}
|
||||||
return vrf_obj
|
|
||||||
|
|
||||||
elif vrf_name == "__hyperglass_default" and vrf_obj.default:
|
|
||||||
return vrf_obj
|
|
||||||
elif vrf_name is None:
|
|
||||||
if vrf_obj.default:
|
|
||||||
return vrf_obj
|
|
||||||
|
|
||||||
raise InputInvalid(params.messages.vrf_not_found, vrf_name=vrf_name)
|
|
||||||
|
|
||||||
|
|
||||||
def get_directive(group: str) -> Optional[Directive]:
|
|
||||||
for device in devices.objects:
|
|
||||||
for command in device.commands:
|
|
||||||
if group in command.groups:
|
|
||||||
return command
|
|
||||||
# TODO: Move this to a param
|
|
||||||
# raise InputInvalid("Group {group} not found", group=group)
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class Query(BaseModel):
|
class Query(BaseModel):
|
||||||
"""Validation model for input query parameters."""
|
"""Validation model for input query parameters."""
|
||||||
|
|
||||||
|
# Device `name` field
|
||||||
query_location: StrictStr
|
query_location: StrictStr
|
||||||
query_type: SupportedQuery
|
# Directive `id` field
|
||||||
# query_vrf: StrictStr
|
query_type: StrictStr
|
||||||
query_group: StrictStr
|
# Directive `groups` member
|
||||||
|
query_group: Optional[StrictStr]
|
||||||
query_target: constr(strip_whitespace=True, min_length=1)
|
query_target: constr(strip_whitespace=True, min_length=1)
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
"""Pydantic model configuration."""
|
"""Pydantic model configuration."""
|
||||||
|
|
||||||
extra = "allow"
|
extra = "allow"
|
||||||
|
alias_generator = snake_to_camel
|
||||||
fields = {
|
fields = {
|
||||||
"query_location": {
|
"query_location": {
|
||||||
"title": params.web.text.query_location,
|
"title": params.web.text.query_location,
|
||||||
|
|
@ -77,13 +65,8 @@ class Query(BaseModel):
|
||||||
"description": "Type of Query to Execute",
|
"description": "Type of Query to Execute",
|
||||||
"example": "bgp_route",
|
"example": "bgp_route",
|
||||||
},
|
},
|
||||||
# "query_vrf": {
|
|
||||||
# "title": params.web.text.query_vrf,
|
|
||||||
# "description": "Routing Table/VRF",
|
|
||||||
# "example": "default",
|
|
||||||
# },
|
|
||||||
"query_group": {
|
"query_group": {
|
||||||
"title": params.web.text.query_vrf,
|
"title": params.web.text.query_group,
|
||||||
"description": "Routing Table/VRF",
|
"description": "Routing Table/VRF",
|
||||||
"example": "default",
|
"example": "default",
|
||||||
},
|
},
|
||||||
|
|
@ -101,13 +84,17 @@ class Query(BaseModel):
|
||||||
"""Initialize the query with a UTC timestamp at initialization time."""
|
"""Initialize the query with a UTC timestamp at initialization time."""
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
self.timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
|
self.timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
try:
|
||||||
|
self.validate_query_target()
|
||||||
|
except InputValidationError as err:
|
||||||
|
raise InputInvalid(**err.kwargs)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
"""Represent only the query fields."""
|
"""Represent only the query fields."""
|
||||||
return (
|
return (
|
||||||
f"Query(query_location={str(self.query_location)}, "
|
f'Query(query_location="{str(self.query_location)}", '
|
||||||
f"query_type={str(self.query_type)}, query_group={str(self.query_group)}, "
|
f'query_type="{str(self.query_type)}", query_group="{str(self.query_group)}", '
|
||||||
f"query_target={str(self.query_target)})"
|
f'query_target="{str(self.query_target)}")'
|
||||||
)
|
)
|
||||||
|
|
||||||
def digest(self):
|
def digest(self):
|
||||||
|
|
@ -120,6 +107,11 @@ class Query(BaseModel):
|
||||||
secrets.token_bytes(8) + repr(self).encode() + secrets.token_bytes(8)
|
secrets.token_bytes(8) + repr(self).encode() + secrets.token_bytes(8)
|
||||||
).hexdigest()
|
).hexdigest()
|
||||||
|
|
||||||
|
def validate_query_target(self):
|
||||||
|
"""Validate a query target after all fields/relationships havebeen initialized."""
|
||||||
|
self.directive.validate_target(self.query_target)
|
||||||
|
log.debug("Validation passed for query {}", repr(self))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def summary(self):
|
def summary(self):
|
||||||
"""Create abbreviated representation of instance."""
|
"""Create abbreviated representation of instance."""
|
||||||
|
|
@ -132,14 +124,18 @@ class Query(BaseModel):
|
||||||
return f'Query({", ".join(items)})'
|
return f'Query({", ".join(items)})'
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def device(self):
|
def device(self) -> Device:
|
||||||
"""Get this query's device object by query_location."""
|
"""Get this query's device object by query_location."""
|
||||||
return devices[self.query_location]
|
return devices[self.query_location]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def query(self):
|
def directive(self) -> Directive:
|
||||||
"""Get this query's configuration object."""
|
"""Get this query's directive."""
|
||||||
return params.queries[self.query_type]
|
|
||||||
|
for command in self.device.commands:
|
||||||
|
if command.id == self.query_type:
|
||||||
|
return command
|
||||||
|
raise QueryTypeNotFound(query_type=self.query_type)
|
||||||
|
|
||||||
def export_dict(self, pretty=False):
|
def export_dict(self, pretty=False):
|
||||||
"""Create dictionary representation of instance."""
|
"""Create dictionary representation of instance."""
|
||||||
|
|
@ -166,18 +162,11 @@ class Query(BaseModel):
|
||||||
|
|
||||||
@validator("query_type")
|
@validator("query_type")
|
||||||
def validate_query_type(cls, value):
|
def validate_query_type(cls, value):
|
||||||
"""Ensure query_type is enabled."""
|
"""Ensure a requested query type exists."""
|
||||||
|
if value in DIRECTIVE_IDS:
|
||||||
|
return value
|
||||||
|
|
||||||
query = params.queries[value]
|
raise QueryTypeNotFound(name=value)
|
||||||
|
|
||||||
if not query.enable:
|
|
||||||
raise InputInvalid(
|
|
||||||
params.messages.feature_not_enabled,
|
|
||||||
level="warning",
|
|
||||||
feature=query.display_name,
|
|
||||||
)
|
|
||||||
|
|
||||||
return value
|
|
||||||
|
|
||||||
@validator("query_location")
|
@validator("query_location")
|
||||||
def validate_query_location(cls, value):
|
def validate_query_location(cls, value):
|
||||||
|
|
@ -187,71 +176,14 @@ class Query(BaseModel):
|
||||||
valid_hostname = value in devices.hostnames
|
valid_hostname = value in devices.hostnames
|
||||||
|
|
||||||
if not any((valid_id, valid_hostname)):
|
if not any((valid_id, valid_hostname)):
|
||||||
raise InputInvalid(
|
raise QueryLocationNotFound(location=value)
|
||||||
params.messages.invalid_field,
|
|
||||||
level="warning",
|
|
||||||
input=value,
|
|
||||||
field=params.web.text.query_location,
|
|
||||||
)
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
# @validator("query_vrf")
|
@validator("query_group")
|
||||||
# def validate_query_vrf(cls, value, values):
|
def validate_query_group(cls, value):
|
||||||
# """Ensure query_vrf is defined."""
|
"""Ensure query_group is defined."""
|
||||||
|
if value in DIRECTIVE_GROUPS:
|
||||||
|
return value
|
||||||
|
|
||||||
# vrf_object = get_vrf_object(value)
|
raise QueryGroupNotFound(group=value)
|
||||||
# device = devices[values["query_location"]]
|
|
||||||
# device_vrf = None
|
|
||||||
|
|
||||||
# for vrf in device.vrfs:
|
|
||||||
# if vrf == vrf_object:
|
|
||||||
# device_vrf = vrf
|
|
||||||
# break
|
|
||||||
|
|
||||||
# if device_vrf is None:
|
|
||||||
# raise InputInvalid(
|
|
||||||
# params.messages.vrf_not_associated,
|
|
||||||
# vrf_name=vrf_object.display_name,
|
|
||||||
# device_name=device.name,
|
|
||||||
# )
|
|
||||||
# return device_vrf
|
|
||||||
|
|
||||||
# @validator("query_group")
|
|
||||||
# def validate_query_group(cls, value, values):
|
|
||||||
# """Ensure query_vrf is defined."""
|
|
||||||
|
|
||||||
# obj = get_directive(value)
|
|
||||||
# if obj is not None:
|
|
||||||
# ...
|
|
||||||
# return device_vrf
|
|
||||||
|
|
||||||
@validator("query_target")
|
|
||||||
def validate_query_target(cls, value, values):
|
|
||||||
"""Validate query target value based on query_type."""
|
|
||||||
|
|
||||||
query_type = values["query_type"]
|
|
||||||
value = value.strip()
|
|
||||||
|
|
||||||
# Use relevant function based on query_type.
|
|
||||||
validator_map = {
|
|
||||||
"bgp_aspath": validate_aspath,
|
|
||||||
"bgp_community": validate_community_input,
|
|
||||||
"bgp_route": validate_ip,
|
|
||||||
"ping": validate_ip,
|
|
||||||
"traceroute": validate_ip,
|
|
||||||
}
|
|
||||||
validator_args_map = {
|
|
||||||
"bgp_aspath": (value,),
|
|
||||||
"bgp_community": (value,),
|
|
||||||
"bgp_route": (value, values["query_type"], values["query_vrf"]),
|
|
||||||
"ping": (value, values["query_type"], values["query_vrf"]),
|
|
||||||
"traceroute": (value, values["query_type"], values["query_vrf"]),
|
|
||||||
}
|
|
||||||
|
|
||||||
if params.queries.bgp_community.mode == "select":
|
|
||||||
validator_map["bgp_community"] = validate_community_select
|
|
||||||
|
|
||||||
validate_func = validator_map[query_type]
|
|
||||||
validate_args = validator_args_map[query_type]
|
|
||||||
|
|
||||||
return validate_func(*validate_args)
|
|
||||||
|
|
|
||||||
|
|
@ -1,105 +1,261 @@
|
||||||
|
"""Generic command models."""
|
||||||
|
|
||||||
|
# Standard Library
|
||||||
|
import re
|
||||||
import json
|
import json
|
||||||
from ipaddress import IPv4Network, IPv6Network
|
from typing import Dict, List, Union, Literal, Optional
|
||||||
from typing import Optional, Sequence, Union, Dict
|
from ipaddress import IPv4Network, IPv6Network, ip_network
|
||||||
from typing_extensions import Literal
|
|
||||||
from pydantic import StrictStr, PrivateAttr, conint, validator, FilePath
|
# Third Party
|
||||||
|
from pydantic import (
|
||||||
|
Field,
|
||||||
|
FilePath,
|
||||||
|
StrictStr,
|
||||||
|
StrictBool,
|
||||||
|
PrivateAttr,
|
||||||
|
conint,
|
||||||
|
validator,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Project
|
||||||
|
from hyperglass.log import log
|
||||||
|
from hyperglass.exceptions.private import InputValidationError
|
||||||
|
|
||||||
|
# Local
|
||||||
from ..main import HyperglassModel
|
from ..main import HyperglassModel
|
||||||
|
from ..fields import Action
|
||||||
from ..config.params import Params
|
from ..config.params import Params
|
||||||
from hyperglass.configuration.markdown import get_markdown
|
|
||||||
|
|
||||||
IPv4PrefixLength = conint(ge=0, le=32)
|
IPv4PrefixLength = conint(ge=0, le=32)
|
||||||
IPv6PrefixLength = conint(ge=0, le=128)
|
IPv6PrefixLength = conint(ge=0, le=128)
|
||||||
|
IPNetwork = Union[IPv4Network, IPv6Network]
|
||||||
|
StringOrArray = Union[StrictStr, List[StrictStr]]
|
||||||
class Policy(HyperglassModel):
|
Condition = Union[IPv4Network, IPv6Network, StrictStr]
|
||||||
network: Union[IPv4Network, IPv6Network]
|
RuleValidation = Union[Literal["ipv4", "ipv6", "pattern"], None]
|
||||||
action: Literal["permit", "deny"]
|
PassedValidation = Union[bool, None]
|
||||||
|
|
||||||
@validator("ge", check_fields=False)
|
|
||||||
def validate_ge(cls, value: int, values: Dict) -> int:
|
|
||||||
"""Ensure ge is at least the size of the input prefix."""
|
|
||||||
|
|
||||||
network_len = values["network"].prefixlen
|
|
||||||
|
|
||||||
if network_len > value:
|
|
||||||
value = network_len
|
|
||||||
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
class Policy4(Policy):
|
|
||||||
ge: IPv4PrefixLength = 0
|
|
||||||
le: IPv4PrefixLength = 32
|
|
||||||
|
|
||||||
|
|
||||||
class Policy6(Policy):
|
|
||||||
ge: IPv6PrefixLength = 0
|
|
||||||
le: IPv6PrefixLength = 128
|
|
||||||
|
|
||||||
|
|
||||||
class Input(HyperglassModel):
|
class Input(HyperglassModel):
|
||||||
|
"""Base input field."""
|
||||||
|
|
||||||
_type: PrivateAttr
|
_type: PrivateAttr
|
||||||
description: StrictStr
|
description: StrictStr
|
||||||
|
|
||||||
|
@property
|
||||||
def is_select(self) -> bool:
|
def is_select(self) -> bool:
|
||||||
|
"""Determine if this field is a select field."""
|
||||||
return self._type == "select"
|
return self._type == "select"
|
||||||
|
|
||||||
|
@property
|
||||||
def is_text(self) -> bool:
|
def is_text(self) -> bool:
|
||||||
|
"""Determine if this field is an input/text field."""
|
||||||
return self._type == "text"
|
return self._type == "text"
|
||||||
|
|
||||||
def is_ip(self) -> bool:
|
|
||||||
return self._type == "ip"
|
|
||||||
|
|
||||||
|
|
||||||
class Text(Input):
|
class Text(Input):
|
||||||
_type: PrivateAttr = "text"
|
"""Text/input field model."""
|
||||||
|
|
||||||
|
_type: PrivateAttr = PrivateAttr("text")
|
||||||
validation: Optional[StrictStr]
|
validation: Optional[StrictStr]
|
||||||
|
|
||||||
|
|
||||||
class IPInput(Input):
|
|
||||||
_type: PrivateAttr = "ip"
|
|
||||||
validation: Union[Policy4, Policy6]
|
|
||||||
|
|
||||||
|
|
||||||
class Option(HyperglassModel):
|
class Option(HyperglassModel):
|
||||||
|
"""Select option model."""
|
||||||
|
|
||||||
name: Optional[StrictStr]
|
name: Optional[StrictStr]
|
||||||
|
description: Optional[StrictStr]
|
||||||
value: StrictStr
|
value: StrictStr
|
||||||
|
|
||||||
|
|
||||||
class Select(Input):
|
class Select(Input):
|
||||||
_type: PrivateAttr = "select"
|
"""Select field model."""
|
||||||
options: Sequence[Option]
|
|
||||||
|
_type: PrivateAttr = PrivateAttr("select")
|
||||||
|
options: List[Option]
|
||||||
|
|
||||||
|
|
||||||
class Directive(HyperglassModel):
|
class Rule(HyperglassModel, allow_population_by_field_name=True):
|
||||||
id: StrictStr
|
"""Base rule."""
|
||||||
name: StrictStr
|
|
||||||
command: Union[StrictStr, Sequence[StrictStr]]
|
|
||||||
field: Union[Text, Select, IPInput, None]
|
|
||||||
info: Optional[FilePath]
|
|
||||||
attrs: Dict = {}
|
|
||||||
groups: Sequence[
|
|
||||||
StrictStr
|
|
||||||
] = [] # TODO: Flesh this out. Replace VRFs, but use same logic in React to filter available commands for multi-device queries.
|
|
||||||
|
|
||||||
@validator("command")
|
_validation: RuleValidation = PrivateAttr()
|
||||||
def validate_command(cls, value: Union[str, Sequence[str]]) -> Sequence[str]:
|
_passed: PassedValidation = PrivateAttr(None)
|
||||||
|
condition: Condition
|
||||||
|
action: Action = Action("permit")
|
||||||
|
commands: List[str] = Field([], alias="command")
|
||||||
|
|
||||||
|
@validator("commands", pre=True, allow_reuse=True)
|
||||||
|
def validate_commands(cls, value: Union[str, List[str]]) -> List[str]:
|
||||||
|
"""Ensure commands is a list."""
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
return [value]
|
return [value]
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def get_commands(self, target: str) -> Sequence[str]:
|
def validate_target(self, target: str) -> bool:
|
||||||
return [s.format(target=target, **self.attrs) for s in self.command]
|
"""Validate a query target (Placeholder signature)."""
|
||||||
|
raise NotImplementedError(
|
||||||
|
f"{self._validation} rule does not implement a 'validate_target()' method"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RuleWithIP(Rule):
|
||||||
|
"""Base IP-based rule."""
|
||||||
|
|
||||||
|
_family: PrivateAttr
|
||||||
|
condition: IPNetwork
|
||||||
|
allow_reserved: StrictBool = False
|
||||||
|
allow_unspecified: StrictBool = False
|
||||||
|
allow_loopback: StrictBool = False
|
||||||
|
ge: int
|
||||||
|
le: int
|
||||||
|
|
||||||
|
def membership(self, target: IPNetwork, network: IPNetwork) -> bool:
|
||||||
|
"""Check if IP address belongs to network."""
|
||||||
|
log.debug("Checking membership of {} for {}", str(target), str(network))
|
||||||
|
if (
|
||||||
|
network.network_address <= target.network_address
|
||||||
|
and network.broadcast_address >= target.broadcast_address
|
||||||
|
):
|
||||||
|
log.debug("{} is a member of {}", target, network)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def in_range(self, target: IPNetwork) -> bool:
|
||||||
|
"""Verify if target prefix length is within ge/le threshold."""
|
||||||
|
if target.prefixlen <= self.le and target.prefixlen >= self.ge:
|
||||||
|
log.debug("{} is in range {}-{}", target, self.ge, self.le)
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def validate_target(self, target: str) -> bool:
|
||||||
|
"""Validate an IP address target against this rule's conditions."""
|
||||||
|
try:
|
||||||
|
# Attempt to use IP object factory to create an IP address object
|
||||||
|
valid_target = ip_network(target)
|
||||||
|
|
||||||
|
except ValueError as err:
|
||||||
|
raise InputValidationError(error=str(err), target=target)
|
||||||
|
|
||||||
|
is_member = self.membership(valid_target, self.condition)
|
||||||
|
in_range = self.in_range(valid_target)
|
||||||
|
|
||||||
|
if all((is_member, in_range, self.action == "permit")):
|
||||||
|
self._passed = True
|
||||||
|
return True
|
||||||
|
|
||||||
|
elif is_member and not in_range:
|
||||||
|
self._passed = False
|
||||||
|
raise InputValidationError(
|
||||||
|
error="Prefix-length is not within range {ge}-{le}",
|
||||||
|
target=target,
|
||||||
|
ge=self.ge,
|
||||||
|
le=self.le,
|
||||||
|
)
|
||||||
|
|
||||||
|
elif is_member and self.action == "deny":
|
||||||
|
self._passed = False
|
||||||
|
raise InputValidationError(
|
||||||
|
error="Member of denied network '{network}'",
|
||||||
|
target=target,
|
||||||
|
network=str(self.condition),
|
||||||
|
)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class RuleWithIPv4(RuleWithIP):
|
||||||
|
"""A rule by which to evaluate an IPv4 target."""
|
||||||
|
|
||||||
|
_family: PrivateAttr = PrivateAttr("ipv4")
|
||||||
|
_validation: RuleValidation = PrivateAttr("ipv4")
|
||||||
|
condition: IPv4Network
|
||||||
|
ge: IPv4PrefixLength = 0
|
||||||
|
le: IPv4PrefixLength = 32
|
||||||
|
|
||||||
|
|
||||||
|
class RuleWithIPv6(RuleWithIP):
|
||||||
|
"""A rule by which to evaluate an IPv6 target."""
|
||||||
|
|
||||||
|
_family: PrivateAttr = PrivateAttr("ipv6")
|
||||||
|
_validation: RuleValidation = PrivateAttr("ipv6")
|
||||||
|
condition: IPv6Network
|
||||||
|
ge: IPv6PrefixLength = 0
|
||||||
|
le: IPv6PrefixLength = 128
|
||||||
|
|
||||||
|
|
||||||
|
class RuleWithPattern(Rule):
|
||||||
|
"""A rule validated by a regular expression pattern."""
|
||||||
|
|
||||||
|
_validation: RuleValidation = PrivateAttr("pattern")
|
||||||
|
condition: StrictStr
|
||||||
|
|
||||||
|
def validate_target(self, target: str) -> str:
|
||||||
|
"""Validate a string target against configured regex patterns."""
|
||||||
|
|
||||||
|
if self.condition == "*":
|
||||||
|
pattern = re.compile(".+", re.IGNORECASE)
|
||||||
|
else:
|
||||||
|
pattern = re.compile(self.condition, re.IGNORECASE)
|
||||||
|
|
||||||
|
is_match = pattern.match(target)
|
||||||
|
if is_match and self.action == "permit":
|
||||||
|
self._passed = True
|
||||||
|
return True
|
||||||
|
elif is_match and self.action == "deny":
|
||||||
|
self._passed = False
|
||||||
|
raise InputValidationError(target=target, error="Denied")
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class RuleWithoutValidation(Rule):
|
||||||
|
"""A rule with no validation."""
|
||||||
|
|
||||||
|
_validation: RuleValidation = PrivateAttr(None)
|
||||||
|
condition: None
|
||||||
|
|
||||||
|
def validate_target(self, target: str) -> Literal[True]:
|
||||||
|
"""Don't validate a target. Always returns `True`."""
|
||||||
|
self._passed = True
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
Rules = Union[RuleWithIPv4, RuleWithIPv6, RuleWithPattern, RuleWithoutValidation]
|
||||||
|
|
||||||
|
|
||||||
|
class Directive(HyperglassModel):
|
||||||
|
"""A directive contains commands that can be run on a device, as long as defined rules are met."""
|
||||||
|
|
||||||
|
id: StrictStr
|
||||||
|
name: StrictStr
|
||||||
|
rules: List[Rules]
|
||||||
|
field: Union[Text, Select, None]
|
||||||
|
info: Optional[FilePath]
|
||||||
|
groups: List[
|
||||||
|
StrictStr
|
||||||
|
] = [] # TODO: Flesh this out. Replace VRFs, but use same logic in React to filter available commands for multi-device queries.
|
||||||
|
|
||||||
|
def validate_target(self, target: str) -> bool:
|
||||||
|
"""Validate a target against all configured rules."""
|
||||||
|
for rule in self.rules:
|
||||||
|
valid = rule.validate_target(target)
|
||||||
|
if valid is True:
|
||||||
|
return True
|
||||||
|
continue
|
||||||
|
raise InputValidationError(error="No matched validation rules", target=target)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def field_type(self) -> Literal["text", "select", None]:
|
def field_type(self) -> Literal["text", "select", None]:
|
||||||
if self.field.is_select():
|
"""Get the linked field type."""
|
||||||
|
|
||||||
|
if self.field.is_select:
|
||||||
return "select"
|
return "select"
|
||||||
elif self.field.is_text() or self.field.is_ip():
|
elif self.field.is_text or self.field.is_ip:
|
||||||
return "text"
|
return "text"
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def frontend(self, params: Params) -> Dict:
|
def frontend(self, params: Params) -> Dict:
|
||||||
|
"""Prepare a representation of the directive for the UI."""
|
||||||
|
|
||||||
value = {
|
value = {
|
||||||
"id": self.id,
|
"id": self.id,
|
||||||
|
|
@ -128,7 +284,9 @@ class Directive(HyperglassModel):
|
||||||
"content": md.read(),
|
"content": md.read(),
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.field_type == "select":
|
if self.field.is_select:
|
||||||
value["options"]: [o.export_dict() for o in self.field.options]
|
value["options"] = [
|
||||||
|
o.export_dict() for o in self.field.options if o is not None
|
||||||
|
]
|
||||||
|
|
||||||
return value
|
return value
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@
|
||||||
# Standard Library
|
# Standard Library
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from typing import Any, Dict, List, Tuple, Union, Optional, Sequence
|
from typing import Any, Dict, List, Tuple, Union, Optional
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from ipaddress import IPv4Address, IPv6Address
|
from ipaddress import IPv4Address, IPv6Address
|
||||||
|
|
||||||
|
|
@ -19,66 +19,21 @@ from pydantic import (
|
||||||
|
|
||||||
# Project
|
# Project
|
||||||
from hyperglass.log import log
|
from hyperglass.log import log
|
||||||
from hyperglass.util import get_driver, validate_nos, resolve_hostname
|
from hyperglass.util import get_driver, get_fmt_keys, validate_nos, resolve_hostname
|
||||||
from hyperglass.constants import SCRAPE_HELPERS, SUPPORTED_STRUCTURED_OUTPUT
|
from hyperglass.constants import SCRAPE_HELPERS, SUPPORTED_STRUCTURED_OUTPUT
|
||||||
from hyperglass.exceptions import ConfigError, UnsupportedDevice
|
from hyperglass.exceptions.private import ConfigError, UnsupportedDevice
|
||||||
from hyperglass.models.commands.generic import Directive
|
from hyperglass.models.commands.generic import Directive
|
||||||
|
|
||||||
|
|
||||||
# Local
|
# Local
|
||||||
from .ssl import Ssl
|
from .ssl import Ssl
|
||||||
from .vrf import Vrf, Info
|
|
||||||
from ..main import HyperglassModel, HyperglassModelExtra
|
from ..main import HyperglassModel, HyperglassModelExtra
|
||||||
from .proxy import Proxy
|
from .proxy import Proxy
|
||||||
from ..fields import SupportedDriver
|
from ..fields import SupportedDriver
|
||||||
from .network import Network
|
from .network import Network
|
||||||
from .credential import Credential
|
from .credential import Credential
|
||||||
|
|
||||||
_default_vrf = {
|
|
||||||
"name": "default",
|
|
||||||
"display_name": "Global",
|
|
||||||
"info": Info(),
|
|
||||||
"ipv4": {
|
|
||||||
"source_address": None,
|
|
||||||
"access_list": [
|
|
||||||
{"network": "0.0.0.0/0", "action": "permit", "ge": 0, "le": 32}
|
|
||||||
],
|
|
||||||
},
|
|
||||||
"ipv6": {
|
|
||||||
"source_address": None,
|
|
||||||
"access_list": [{"network": "::/0", "action": "permit", "ge": 0, "le": 128}],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
class Device(HyperglassModelExtra):
|
||||||
def find_device_id(values: Dict) -> Tuple[str, Dict]:
|
|
||||||
"""Generate device id & handle legacy display_name field."""
|
|
||||||
|
|
||||||
def generate_id(name: str) -> str:
|
|
||||||
scrubbed = re.sub(r"[^A-Za-z0-9\_\-\s]", "", name)
|
|
||||||
return "_".join(scrubbed.split()).lower()
|
|
||||||
|
|
||||||
name = values.pop("name", None)
|
|
||||||
|
|
||||||
if name is None:
|
|
||||||
raise ValueError("name is required.")
|
|
||||||
|
|
||||||
legacy_display_name = values.pop("display_name", None)
|
|
||||||
|
|
||||||
if legacy_display_name is not None:
|
|
||||||
log.warning(
|
|
||||||
"The 'display_name' field is deprecated. Use the 'name' field instead."
|
|
||||||
)
|
|
||||||
device_id = generate_id(legacy_display_name)
|
|
||||||
display_name = legacy_display_name
|
|
||||||
else:
|
|
||||||
device_id = generate_id(name)
|
|
||||||
display_name = name
|
|
||||||
|
|
||||||
return device_id, {"name": display_name, "display_name": None, **values}
|
|
||||||
|
|
||||||
|
|
||||||
class Device(HyperglassModel):
|
|
||||||
"""Validation model for per-router config in devices.yaml."""
|
"""Validation model for per-router config in devices.yaml."""
|
||||||
|
|
||||||
_id: StrictStr = PrivateAttr()
|
_id: StrictStr = PrivateAttr()
|
||||||
|
|
@ -91,16 +46,17 @@ class Device(HyperglassModel):
|
||||||
port: StrictInt = 22
|
port: StrictInt = 22
|
||||||
ssl: Optional[Ssl]
|
ssl: Optional[Ssl]
|
||||||
nos: StrictStr
|
nos: StrictStr
|
||||||
commands: Sequence[Directive]
|
commands: List[Directive]
|
||||||
vrfs: List[Vrf] = [_default_vrf]
|
|
||||||
structured_output: Optional[StrictBool]
|
structured_output: Optional[StrictBool]
|
||||||
driver: Optional[SupportedDriver]
|
driver: Optional[SupportedDriver]
|
||||||
|
attrs: Dict[str, str] = {}
|
||||||
|
|
||||||
def __init__(self, **kwargs) -> None:
|
def __init__(self, **kwargs) -> None:
|
||||||
"""Set the device ID."""
|
"""Set the device ID."""
|
||||||
_id, values = find_device_id(kwargs)
|
_id, values = self._generate_id(kwargs)
|
||||||
super().__init__(**values)
|
super().__init__(**values)
|
||||||
self._id = _id
|
self._id = _id
|
||||||
|
self._validate_directive_attrs()
|
||||||
|
|
||||||
def __hash__(self) -> int:
|
def __hash__(self) -> int:
|
||||||
"""Make device object hashable so the object can be deduplicated with set()."""
|
"""Make device object hashable so the object can be deduplicated with set()."""
|
||||||
|
|
@ -119,9 +75,66 @@ class Device(HyperglassModel):
|
||||||
def _target(self):
|
def _target(self):
|
||||||
return str(self.address)
|
return str(self.address)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _generate_id(values: Dict) -> Tuple[str, Dict]:
|
||||||
|
"""Generate device id & handle legacy display_name field."""
|
||||||
|
|
||||||
|
def generate_id(name: str) -> str:
|
||||||
|
scrubbed = re.sub(r"[^A-Za-z0-9\_\-\s]", "", name)
|
||||||
|
return "_".join(scrubbed.split()).lower()
|
||||||
|
|
||||||
|
name = values.pop("name", None)
|
||||||
|
|
||||||
|
if name is None:
|
||||||
|
raise ValueError("name is required.")
|
||||||
|
|
||||||
|
legacy_display_name = values.pop("display_name", None)
|
||||||
|
|
||||||
|
if legacy_display_name is not None:
|
||||||
|
log.warning(
|
||||||
|
"The 'display_name' field is deprecated. Use the 'name' field instead."
|
||||||
|
)
|
||||||
|
device_id = generate_id(legacy_display_name)
|
||||||
|
display_name = legacy_display_name
|
||||||
|
else:
|
||||||
|
device_id = generate_id(name)
|
||||||
|
display_name = name
|
||||||
|
|
||||||
|
return device_id, {"name": display_name, "display_name": None, **values}
|
||||||
|
|
||||||
|
def _validate_directive_attrs(self) -> None:
|
||||||
|
|
||||||
|
# Get all commands associated with the device.
|
||||||
|
commands = [
|
||||||
|
command
|
||||||
|
for directive in self.commands
|
||||||
|
for rule in directive.rules
|
||||||
|
for command in rule.commands
|
||||||
|
]
|
||||||
|
|
||||||
|
# Set of all keys except for built-in key `target`.
|
||||||
|
keys = {
|
||||||
|
key
|
||||||
|
for group in [get_fmt_keys(command) for command in commands]
|
||||||
|
for key in group
|
||||||
|
if key != "target"
|
||||||
|
}
|
||||||
|
|
||||||
|
attrs = {k: v for k, v in self.attrs.items() if k in keys}
|
||||||
|
|
||||||
|
# Verify all keys in associated commands contain values in device's `attrs`.
|
||||||
|
for key in keys:
|
||||||
|
if key not in attrs:
|
||||||
|
raise ConfigError(
|
||||||
|
"Device '{d}' has a command that references attribute '{a}', but '{a}' is missing from device attributes",
|
||||||
|
d=self.name,
|
||||||
|
a=key,
|
||||||
|
)
|
||||||
|
|
||||||
@validator("address")
|
@validator("address")
|
||||||
def validate_address(cls, value, values):
|
def validate_address(cls, value, values):
|
||||||
"""Ensure a hostname is resolvable."""
|
"""Ensure a hostname is resolvable."""
|
||||||
|
|
||||||
if not isinstance(value, (IPv4Address, IPv6Address)):
|
if not isinstance(value, (IPv4Address, IPv6Address)):
|
||||||
if not any(resolve_hostname(value)):
|
if not any(resolve_hostname(value)):
|
||||||
raise ConfigError(
|
raise ConfigError(
|
||||||
|
|
@ -152,15 +165,8 @@ class Device(HyperglassModel):
|
||||||
|
|
||||||
@validator("ssl")
|
@validator("ssl")
|
||||||
def validate_ssl(cls, value, values):
|
def validate_ssl(cls, value, values):
|
||||||
"""Set default cert file location if undefined.
|
"""Set default cert file location if undefined."""
|
||||||
|
|
||||||
Arguments:
|
|
||||||
value {object} -- SSL object
|
|
||||||
values {dict} -- Other already-validated fields
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
{object} -- SSL configuration
|
|
||||||
"""
|
|
||||||
if value is not None:
|
if value is not None:
|
||||||
if value.enable and value.cert is None:
|
if value.enable and value.cert is None:
|
||||||
app_path = Path(os.environ["hyperglass_directory"])
|
app_path = Path(os.environ["hyperglass_directory"])
|
||||||
|
|
@ -179,7 +185,7 @@ class Device(HyperglassModel):
|
||||||
if not nos:
|
if not nos:
|
||||||
# Ensure nos is defined.
|
# Ensure nos is defined.
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f'Device {values["name"]} is missing a `nos` (Network Operating System).'
|
f"Device {values['name']} is missing a 'nos' (Network Operating System) property."
|
||||||
)
|
)
|
||||||
|
|
||||||
if nos in SCRAPE_HELPERS.keys():
|
if nos in SCRAPE_HELPERS.keys():
|
||||||
|
|
@ -189,7 +195,7 @@ class Device(HyperglassModel):
|
||||||
# Verify NOS is supported by hyperglass.
|
# Verify NOS is supported by hyperglass.
|
||||||
supported, _ = validate_nos(nos)
|
supported, _ = validate_nos(nos)
|
||||||
if not supported:
|
if not supported:
|
||||||
raise UnsupportedDevice('"{nos}" is not supported.', nos=nos)
|
raise UnsupportedDevice(nos=nos)
|
||||||
|
|
||||||
values["nos"] = nos
|
values["nos"] = nos
|
||||||
|
|
||||||
|
|
@ -209,73 +215,6 @@ class Device(HyperglassModel):
|
||||||
|
|
||||||
return values
|
return values
|
||||||
|
|
||||||
@validator("vrfs", pre=True)
|
|
||||||
def validate_vrfs(cls, value, values):
|
|
||||||
"""Validate VRF definitions.
|
|
||||||
|
|
||||||
- Ensures source IP addresses are set for the default VRF
|
|
||||||
(global routing table).
|
|
||||||
- Initializes the default VRF with the DefaultVRF() class so
|
|
||||||
that specific defaults can be set for the global routing
|
|
||||||
table.
|
|
||||||
- If the 'display_name' is not set for a non-default VRF, try
|
|
||||||
to make one that looks pretty based on the 'name'.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
value {list} -- List of VRFs
|
|
||||||
values {dict} -- Other already-validated fields
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ConfigError: Raised if the VRF is missing a source address
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
{list} -- List of valid VRFs
|
|
||||||
"""
|
|
||||||
vrfs = []
|
|
||||||
for vrf in value:
|
|
||||||
vrf_default = vrf.get("default", False)
|
|
||||||
|
|
||||||
for afi in ("ipv4", "ipv6"):
|
|
||||||
vrf_afi = vrf.get(afi)
|
|
||||||
|
|
||||||
# If AFI is actually defined (enabled), and if the
|
|
||||||
# source_address field is not set, raise an error
|
|
||||||
if vrf_afi is not None and vrf_afi.get("source_address") is None:
|
|
||||||
raise ConfigError(
|
|
||||||
(
|
|
||||||
"VRF '{vrf}' in router '{router}' is missing a source "
|
|
||||||
"{afi} address."
|
|
||||||
),
|
|
||||||
vrf=vrf.get("name"),
|
|
||||||
router=values.get("name"),
|
|
||||||
afi=afi.replace("ip", "IP"),
|
|
||||||
)
|
|
||||||
|
|
||||||
# If no display_name is set for a non-default VRF, try
|
|
||||||
# to make one by replacing non-alphanumeric characters
|
|
||||||
# with whitespaces and using str.title() to make each
|
|
||||||
# word look "pretty".
|
|
||||||
if not vrf_default and not isinstance(vrf.get("display_name"), str):
|
|
||||||
new_name = vrf["name"]
|
|
||||||
new_name = re.sub(r"[^a-zA-Z0-9]", " ", new_name)
|
|
||||||
new_name = re.split(" ", new_name)
|
|
||||||
vrf["display_name"] = " ".join([w.title() for w in new_name])
|
|
||||||
|
|
||||||
log.debug(
|
|
||||||
f'Field "display_name" for VRF "{vrf["name"]}" was not set. '
|
|
||||||
f"Generated '{vrf['display_name']}'"
|
|
||||||
)
|
|
||||||
|
|
||||||
elif vrf_default and vrf.get("display_name") is None:
|
|
||||||
vrf["display_name"] = "Global"
|
|
||||||
|
|
||||||
# Validate the non-default VRF against the standard
|
|
||||||
# Vrf() class.
|
|
||||||
vrf = Vrf(**vrf)
|
|
||||||
|
|
||||||
vrfs.append(vrf)
|
|
||||||
return vrfs
|
|
||||||
|
|
||||||
@validator("driver")
|
@validator("driver")
|
||||||
def validate_driver(cls, value: Optional[str], values: Dict) -> Dict:
|
def validate_driver(cls, value: Optional[str], values: Dict) -> Dict:
|
||||||
"""Set the correct driver and override if supported."""
|
"""Set the correct driver and override if supported."""
|
||||||
|
|
@ -287,11 +226,8 @@ class Devices(HyperglassModelExtra):
|
||||||
|
|
||||||
_ids: List[StrictStr] = []
|
_ids: List[StrictStr] = []
|
||||||
hostnames: List[StrictStr] = []
|
hostnames: List[StrictStr] = []
|
||||||
vrfs: List[StrictStr] = []
|
|
||||||
vrf_objects: List[Vrf] = []
|
|
||||||
objects: List[Device] = []
|
objects: List[Device] = []
|
||||||
all_nos: List[StrictStr] = []
|
all_nos: List[StrictStr] = []
|
||||||
default_vrf: Vrf = Vrf(name="default", display_name="Global")
|
|
||||||
|
|
||||||
def __init__(self, input_params: List[Dict]) -> None:
|
def __init__(self, input_params: List[Dict]) -> None:
|
||||||
"""Import loaded YAML, initialize per-network definitions.
|
"""Import loaded YAML, initialize per-network definitions.
|
||||||
|
|
@ -300,8 +236,6 @@ class Devices(HyperglassModelExtra):
|
||||||
set attributes for the devices class. Builds lists of common
|
set attributes for the devices class. Builds lists of common
|
||||||
attributes for easy access in other modules.
|
attributes for easy access in other modules.
|
||||||
"""
|
"""
|
||||||
vrfs = set()
|
|
||||||
vrf_objects = set()
|
|
||||||
all_nos = set()
|
all_nos = set()
|
||||||
objects = set()
|
objects = set()
|
||||||
hostnames = set()
|
hostnames = set()
|
||||||
|
|
@ -322,38 +256,11 @@ class Devices(HyperglassModelExtra):
|
||||||
objects.add(device)
|
objects.add(device)
|
||||||
all_nos.add(device.nos)
|
all_nos.add(device.nos)
|
||||||
|
|
||||||
for vrf in device.vrfs:
|
|
||||||
|
|
||||||
# For each configured router VRF, add its name and
|
|
||||||
# display_name to a class set (for automatic de-duping).
|
|
||||||
vrfs.add(vrf.name)
|
|
||||||
|
|
||||||
# Add a 'default_vrf' attribute to the devices class
|
|
||||||
# which contains the configured default VRF display name.
|
|
||||||
if vrf.name == "default" and not hasattr(self, "default_vrf"):
|
|
||||||
init_kwargs["default_vrf"] = Vrf(
|
|
||||||
name=vrf.name, display_name=vrf.display_name
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add the native VRF objects to a set (for automatic
|
|
||||||
# de-duping), but exlcude device-specific fields.
|
|
||||||
vrf_objects.add(
|
|
||||||
vrf.copy(
|
|
||||||
deep=True,
|
|
||||||
exclude={
|
|
||||||
"ipv4": {"source_address"},
|
|
||||||
"ipv6": {"source_address"},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Convert the de-duplicated sets to a standard list, add lists
|
# Convert the de-duplicated sets to a standard list, add lists
|
||||||
# as class attributes. Sort router list by router name attribute
|
# as class attributes. Sort router list by router name attribute
|
||||||
init_kwargs["_ids"] = list(_ids)
|
init_kwargs["_ids"] = list(_ids)
|
||||||
init_kwargs["hostnames"] = list(hostnames)
|
init_kwargs["hostnames"] = list(hostnames)
|
||||||
init_kwargs["all_nos"] = list(all_nos)
|
init_kwargs["all_nos"] = list(all_nos)
|
||||||
init_kwargs["vrfs"] = list(vrfs)
|
|
||||||
init_kwargs["vrf_objects"] = list(vrf_objects)
|
|
||||||
init_kwargs["objects"] = sorted(objects, key=lambda x: x.name)
|
init_kwargs["objects"] = sorted(objects, key=lambda x: x.name)
|
||||||
|
|
||||||
super().__init__(**init_kwargs)
|
super().__init__(**init_kwargs)
|
||||||
|
|
|
||||||
|
|
@ -31,9 +31,14 @@ class Messages(HyperglassModel):
|
||||||
description="Displayed when a query type is submitted that is not supported or disabled. The hyperglass UI performs validation of supported query types prior to submitting any requests, so this is primarily relevant to the hyperglass API. `{feature}` may be used to display the disabled feature.",
|
description="Displayed when a query type is submitted that is not supported or disabled. The hyperglass UI performs validation of supported query types prior to submitting any requests, so this is primarily relevant to the hyperglass API. `{feature}` may be used to display the disabled feature.",
|
||||||
)
|
)
|
||||||
invalid_input: StrictStr = Field(
|
invalid_input: StrictStr = Field(
|
||||||
"{target} is not a valid {query_type} target.",
|
"{target} is not valid.",
|
||||||
title="Invalid Input",
|
title="Invalid Input",
|
||||||
description="Displayed when a query target's value is invalid in relation to the corresponding query type. `{target}` and `{query_type}` maybe used to display the invalid target and corresponding query type.",
|
description="Displayed when a query target's value is invalid in relation to the corresponding query type. `{target}` may be used to display the invalid target.",
|
||||||
|
)
|
||||||
|
invalid_query: StrictStr = Field(
|
||||||
|
"{target} is not a valid {query_type} target.",
|
||||||
|
title="Invalid Query",
|
||||||
|
description="Displayed when a query target's value is invalid in relation to the corresponding query type. `{target}` and `{query_type}` may be used to display the invalid target and corresponding query type.",
|
||||||
)
|
)
|
||||||
invalid_field: StrictStr = Field(
|
invalid_field: StrictStr = Field(
|
||||||
"{input} is an invalid {field}.",
|
"{input} is an invalid {field}.",
|
||||||
|
|
@ -45,6 +50,11 @@ class Messages(HyperglassModel):
|
||||||
title="General Error",
|
title="General Error",
|
||||||
description="Displayed when generalized errors occur. Seeing this error message may indicate a bug in hyperglass, as most other errors produced are highly contextual. If you see this in the wild, try enabling [debug mode](/fixme) and review the logs to pinpoint the source of the error.",
|
description="Displayed when generalized errors occur. Seeing this error message may indicate a bug in hyperglass, as most other errors produced are highly contextual. If you see this in the wild, try enabling [debug mode](/fixme) and review the logs to pinpoint the source of the error.",
|
||||||
)
|
)
|
||||||
|
not_found: StrictStr = Field(
|
||||||
|
"{type} '{name}' not found.",
|
||||||
|
title="Not Found",
|
||||||
|
description="Displayed when an object property does not exist in the configuration. `{type}` corresponds to a user-friendly name of the object type (for example, 'Device'), `{name}` corresponds to the object name that was not found.",
|
||||||
|
)
|
||||||
request_timeout: StrictStr = Field(
|
request_timeout: StrictStr = Field(
|
||||||
"Request timed out.",
|
"Request timed out.",
|
||||||
title="Request Timeout",
|
title="Request Timeout",
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,7 @@ from pydantic import StrictInt, StrictStr, validator
|
||||||
|
|
||||||
# Project
|
# Project
|
||||||
from hyperglass.util import resolve_hostname
|
from hyperglass.util import resolve_hostname
|
||||||
from hyperglass.exceptions import ConfigError, UnsupportedDevice
|
from hyperglass.exceptions.private import ConfigError, UnsupportedDevice
|
||||||
|
|
||||||
# Local
|
# Local
|
||||||
from ..main import HyperglassModel
|
from ..main import HyperglassModel
|
||||||
|
|
@ -32,6 +32,7 @@ class Proxy(HyperglassModel):
|
||||||
@validator("address")
|
@validator("address")
|
||||||
def validate_address(cls, value, values):
|
def validate_address(cls, value, values):
|
||||||
"""Ensure a hostname is resolvable."""
|
"""Ensure a hostname is resolvable."""
|
||||||
|
|
||||||
if not isinstance(value, (IPv4Address, IPv6Address)):
|
if not isinstance(value, (IPv4Address, IPv6Address)):
|
||||||
if not any(resolve_hostname(value)):
|
if not any(resolve_hostname(value)):
|
||||||
raise ConfigError(
|
raise ConfigError(
|
||||||
|
|
@ -43,16 +44,12 @@ class Proxy(HyperglassModel):
|
||||||
|
|
||||||
@validator("nos")
|
@validator("nos")
|
||||||
def supported_nos(cls, value, values):
|
def supported_nos(cls, value, values):
|
||||||
"""Verify NOS is supported by hyperglass.
|
"""Verify NOS is supported by hyperglass."""
|
||||||
|
|
||||||
Raises:
|
|
||||||
UnsupportedDevice: Raised if NOS is not supported.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
{str} -- Valid NOS name
|
|
||||||
"""
|
|
||||||
if not value == "linux_ssh":
|
if not value == "linux_ssh":
|
||||||
raise UnsupportedDevice(
|
raise UnsupportedDevice(
|
||||||
f"Proxy '{values['name']}' uses NOS '{value}', which is currently unsupported."
|
"Proxy '{p}' uses NOS '{n}', which is currently unsupported.",
|
||||||
|
p=values["name"],
|
||||||
|
n=value,
|
||||||
)
|
)
|
||||||
return value
|
return value
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
# Standard Library
|
# Standard Library
|
||||||
import re
|
import re
|
||||||
from typing import Dict, List, Union, Optional
|
from typing import Dict, List, Union, Literal, Optional
|
||||||
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
|
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
|
||||||
|
|
||||||
# Third Party
|
# Third Party
|
||||||
|
|
@ -17,7 +17,6 @@ from pydantic import (
|
||||||
validator,
|
validator,
|
||||||
root_validator,
|
root_validator,
|
||||||
)
|
)
|
||||||
from typing_extensions import Literal
|
|
||||||
|
|
||||||
# Project
|
# Project
|
||||||
from hyperglass.log import log
|
from hyperglass.log import log
|
||||||
|
|
|
||||||
|
|
@ -134,7 +134,7 @@ class Text(HyperglassModel):
|
||||||
query_location: StrictStr = "Location"
|
query_location: StrictStr = "Location"
|
||||||
query_type: StrictStr = "Query Type"
|
query_type: StrictStr = "Query Type"
|
||||||
query_target: StrictStr = "Target"
|
query_target: StrictStr = "Target"
|
||||||
query_vrf: StrictStr = "Routing Table"
|
query_group: StrictStr = "Routing Table"
|
||||||
fqdn_tooltip: StrictStr = "Use {protocol}" # Formatted by Javascript
|
fqdn_tooltip: StrictStr = "Use {protocol}" # Formatted by Javascript
|
||||||
fqdn_message: StrictStr = "Your browser has resolved {fqdn} to" # Formatted by Javascript
|
fqdn_message: StrictStr = "Your browser has resolved {fqdn} to" # Formatted by Javascript
|
||||||
fqdn_error: StrictStr = "Unable to resolve {fqdn}" # Formatted by Javascript
|
fqdn_error: StrictStr = "Unable to resolve {fqdn}" # Formatted by Javascript
|
||||||
|
|
|
||||||
|
|
@ -60,7 +60,7 @@ class AnyUri(str):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def __get_validators__(cls):
|
def __get_validators__(cls):
|
||||||
"""Pydantic custim field method."""
|
"""Pydantic custom field method."""
|
||||||
yield cls.validate
|
yield cls.validate
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
@ -79,3 +79,35 @@ class AnyUri(str):
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
"""Stringify custom field representation."""
|
"""Stringify custom field representation."""
|
||||||
return f"AnyUri({super().__repr__()})"
|
return f"AnyUri({super().__repr__()})"
|
||||||
|
|
||||||
|
|
||||||
|
class Action(str):
|
||||||
|
"""Custom field type for policy actions."""
|
||||||
|
|
||||||
|
permits = ("permit", "allow", "accept")
|
||||||
|
denies = ("deny", "block", "reject")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __get_validators__(cls):
|
||||||
|
"""Pydantic custom field method."""
|
||||||
|
yield cls.validate
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def validate(cls, value: str):
|
||||||
|
"""Ensure action is an allowed value or acceptable alias."""
|
||||||
|
if not isinstance(value, str):
|
||||||
|
raise TypeError("Action type must be a string")
|
||||||
|
value = value.strip().lower()
|
||||||
|
|
||||||
|
if value in cls.permits:
|
||||||
|
return cls("permit")
|
||||||
|
elif value in cls.denies:
|
||||||
|
return cls("deny")
|
||||||
|
|
||||||
|
raise ValueError(
|
||||||
|
"Action must be one of '{}'".format(", ".join((*cls.permits, *cls.denies)))
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
"""Stringify custom field representation."""
|
||||||
|
return f"Action({super().__repr__()})"
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,7 @@ from pydantic import ValidationError
|
||||||
|
|
||||||
# Project
|
# Project
|
||||||
from hyperglass.log import log
|
from hyperglass.log import log
|
||||||
from hyperglass.exceptions import ParsingError
|
from hyperglass.exceptions.private import ParsingError
|
||||||
from hyperglass.models.parsing.arista_eos import AristaRoute
|
from hyperglass.models.parsing.arista_eos import AristaRoute
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,12 +5,12 @@ import re
|
||||||
from typing import Dict, List, Sequence, Generator
|
from typing import Dict, List, Sequence, Generator
|
||||||
|
|
||||||
# Third Party
|
# Third Party
|
||||||
import xmltodict
|
import xmltodict # type:ignore
|
||||||
from pydantic import ValidationError
|
from pydantic import ValidationError
|
||||||
|
|
||||||
# Project
|
# Project
|
||||||
from hyperglass.log import log
|
from hyperglass.log import log
|
||||||
from hyperglass.exceptions import ParsingError
|
from hyperglass.exceptions.private import ParsingError
|
||||||
from hyperglass.models.parsing.juniper import JuniperRoute
|
from hyperglass.models.parsing.juniper import JuniperRoute
|
||||||
|
|
||||||
REMOVE_PATTERNS = (
|
REMOVE_PATTERNS = (
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@
|
||||||
import re
|
import re
|
||||||
|
|
||||||
# Project
|
# Project
|
||||||
from hyperglass.exceptions import ParsingError
|
from hyperglass.exceptions.private import ParsingError
|
||||||
|
|
||||||
|
|
||||||
def _process_numbers(numbers):
|
def _process_numbers(numbers):
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,5 @@
|
||||||
export * from './row';
|
export * from './row';
|
||||||
export * from './field';
|
export * from './field';
|
||||||
export * from './queryVrf';
|
|
||||||
export * from './queryType';
|
export * from './queryType';
|
||||||
export * from './queryGroup';
|
export * from './queryGroup';
|
||||||
export * from './queryTarget';
|
export * from './queryTarget';
|
||||||
|
|
|
||||||
|
|
@ -2,46 +2,14 @@ import { useMemo } from 'react';
|
||||||
import { Select } from '~/components';
|
import { Select } from '~/components';
|
||||||
import { useLGMethods, useLGState } from '~/hooks';
|
import { useLGMethods, useLGState } from '~/hooks';
|
||||||
|
|
||||||
import type { TNetwork, TSelectOption } from '~/types';
|
import type { TSelectOption } from '~/types';
|
||||||
import type { TQueryGroup } from './types';
|
import type { TQueryGroup } from './types';
|
||||||
|
|
||||||
// function buildOptions(queryVrfs: TDeviceVrf[]): TSelectOption[] {
|
|
||||||
// return queryVrfs.map(q => ({ value: q._id, label: q.display_name }));
|
|
||||||
// }
|
|
||||||
|
|
||||||
type QueryGroups = Record<string, string[]>;
|
|
||||||
|
|
||||||
function buildOptions(networks: TNetwork[]): QueryGroups {
|
|
||||||
const options = {} as QueryGroups;
|
|
||||||
for (const net of networks) {
|
|
||||||
for (const loc of net.locations) {
|
|
||||||
for (const directive of loc.directives) {
|
|
||||||
for (const group of directive.groups) {
|
|
||||||
if (Object.keys(options).includes(group)) {
|
|
||||||
options[group] = [...options[group], loc.name];
|
|
||||||
} else {
|
|
||||||
options[group] = [loc.name];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return options;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const QueryGroup: React.FC<TQueryGroup> = (props: TQueryGroup) => {
|
export const QueryGroup: React.FC<TQueryGroup> = (props: TQueryGroup) => {
|
||||||
const { groups, onChange, label } = props;
|
const { onChange, label } = props;
|
||||||
const { selections, availableGroups, queryLocation, queryGroup } = useLGState();
|
const { selections, availableGroups, queryLocation } = useLGState();
|
||||||
const { exportState } = useLGMethods();
|
const { exportState } = useLGMethods();
|
||||||
|
|
||||||
// const groups = useMemo(() => buildOptions(networks), []);
|
|
||||||
// const options = useMemo<TSelectOption[]>(
|
|
||||||
// () => Object.keys(groups).map(key => ({ label: key, value: key })),
|
|
||||||
// [groups],
|
|
||||||
// );
|
|
||||||
// const options = useMemo<TSelectOption[]>(() => groups.map(g => ({ label: g, value: g })), [
|
|
||||||
// groups,
|
|
||||||
// ]);
|
|
||||||
const options = useMemo<TSelectOption[]>(
|
const options = useMemo<TSelectOption[]>(
|
||||||
() => availableGroups.map(g => ({ label: g.value, value: g.value })),
|
() => availableGroups.map(g => ({ label: g.value, value: g.value })),
|
||||||
[availableGroups.length, queryLocation.length],
|
[availableGroups.length, queryLocation.length],
|
||||||
|
|
|
||||||
|
|
@ -2,19 +2,23 @@ import { useMemo } from 'react';
|
||||||
import { Input, Text } from '@chakra-ui/react';
|
import { Input, Text } from '@chakra-ui/react';
|
||||||
import { components } from 'react-select';
|
import { components } from 'react-select';
|
||||||
import { If, Select } from '~/components';
|
import { If, Select } from '~/components';
|
||||||
import { useConfig, useColorValue } from '~/context';
|
import { useColorValue } from '~/context';
|
||||||
import { useLGState } from '~/hooks';
|
import { useLGState, useDirective } from '~/hooks';
|
||||||
|
import { isSelectDirective } from '~/types';
|
||||||
|
|
||||||
import type { OptionProps } from 'react-select';
|
import type { OptionProps } from 'react-select';
|
||||||
import type { TBGPCommunity, TSelectOption } from '~/types';
|
import type { TSelectOption, TDirective } from '~/types';
|
||||||
import type { TQueryTarget } from './types';
|
import type { TQueryTarget } from './types';
|
||||||
|
|
||||||
function buildOptions(communities: TBGPCommunity[]): TSelectOption[] {
|
function buildOptions(directive: Nullable<TDirective>): TSelectOption[] {
|
||||||
return communities.map(c => ({
|
if (directive !== null && isSelectDirective(directive)) {
|
||||||
value: c.community,
|
return directive.options.map(o => ({
|
||||||
label: c.display_name,
|
value: o.value,
|
||||||
description: c.description,
|
label: o.name,
|
||||||
}));
|
description: o.description,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
const Option = (props: OptionProps<Dict, false>) => {
|
const Option = (props: OptionProps<Dict, false>) => {
|
||||||
|
|
@ -38,11 +42,10 @@ export const QueryTarget: React.FC<TQueryTarget> = (props: TQueryTarget) => {
|
||||||
const border = useColorValue('gray.100', 'whiteAlpha.50');
|
const border = useColorValue('gray.100', 'whiteAlpha.50');
|
||||||
const placeholderColor = useColorValue('gray.600', 'whiteAlpha.700');
|
const placeholderColor = useColorValue('gray.600', 'whiteAlpha.700');
|
||||||
|
|
||||||
const { queryType, queryTarget, displayTarget } = useLGState();
|
const { queryTarget, displayTarget } = useLGState();
|
||||||
|
const directive = useDirective();
|
||||||
|
|
||||||
const { queries } = useConfig();
|
const options = useMemo(() => buildOptions(directive), [directive, buildOptions]);
|
||||||
|
|
||||||
const options = useMemo(() => buildOptions(queries.bgp_community.communities), []);
|
|
||||||
|
|
||||||
function handleInputChange(e: React.ChangeEvent<HTMLInputElement>): void {
|
function handleInputChange(e: React.ChangeEvent<HTMLInputElement>): void {
|
||||||
displayTarget.set(e.target.value);
|
displayTarget.set(e.target.value);
|
||||||
|
|
@ -58,8 +61,8 @@ export const QueryTarget: React.FC<TQueryTarget> = (props: TQueryTarget) => {
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<input {...register} hidden readOnly value={queryTarget.value} />
|
<input {...register('query_target')} hidden readOnly value={queryTarget.value} />
|
||||||
<If c={queryType.value === 'bgp_community' && queries.bgp_community.mode === 'select'}>
|
<If c={directive !== null && isSelectDirective(directive)}>
|
||||||
<Select
|
<Select
|
||||||
size="lg"
|
size="lg"
|
||||||
name={name}
|
name={name}
|
||||||
|
|
@ -69,7 +72,7 @@ export const QueryTarget: React.FC<TQueryTarget> = (props: TQueryTarget) => {
|
||||||
onChange={handleSelectChange}
|
onChange={handleSelectChange}
|
||||||
/>
|
/>
|
||||||
</If>
|
</If>
|
||||||
<If c={!(queryType.value === 'bgp_community' && queries.bgp_community.mode === 'select')}>
|
<If c={directive === null || !isSelectDirective(directive)}>
|
||||||
<Input
|
<Input
|
||||||
bg={bg}
|
bg={bg}
|
||||||
size="lg"
|
size="lg"
|
||||||
|
|
|
||||||
|
|
@ -1,51 +1,23 @@
|
||||||
import { useMemo } from 'react';
|
import { useMemo } from 'react';
|
||||||
import { useFormContext } from 'react-hook-form';
|
import { useFormContext } from 'react-hook-form';
|
||||||
import { uniqBy } from 'lodash';
|
|
||||||
import { Select } from '~/components';
|
import { Select } from '~/components';
|
||||||
import { useConfig } from '~/context';
|
|
||||||
import { useLGState, useLGMethods } from '~/hooks';
|
import { useLGState, useLGMethods } from '~/hooks';
|
||||||
|
|
||||||
import type { TNetwork, TSelectOption } from '~/types';
|
import type { TSelectOption } from '~/types';
|
||||||
import type { TQuerySelectField } from './types';
|
import type { TQuerySelectField } from './types';
|
||||||
|
|
||||||
// function buildOptions(queryTypes: TQuery[]): TSelectOption[] {
|
|
||||||
// return queryTypes
|
|
||||||
// .filter(q => q.enable === true)
|
|
||||||
// .map(q => ({ value: q.name, label: q.display_name }));
|
|
||||||
// }
|
|
||||||
|
|
||||||
// function* buildOptions(networks: TNetwork[]): Generator<TSelectOption> {
|
|
||||||
// for (const net of networks) {
|
|
||||||
// for (const loc of net.locations) {
|
|
||||||
// for (const directive of loc.directives) {
|
|
||||||
// const { name } = directive;
|
|
||||||
// yield { value: name, label: name };
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
export const QueryType: React.FC<TQuerySelectField> = (props: TQuerySelectField) => {
|
export const QueryType: React.FC<TQuerySelectField> = (props: TQuerySelectField) => {
|
||||||
const { onChange, label } = props;
|
const { onChange, label } = props;
|
||||||
// const {
|
|
||||||
// queries,
|
|
||||||
// networks,
|
|
||||||
// } = useConfig();
|
|
||||||
const {
|
const {
|
||||||
formState: { errors },
|
formState: { errors },
|
||||||
} = useFormContext();
|
} = useFormContext();
|
||||||
const { selections, availableTypes, queryType } = useLGState();
|
const { selections, availableTypes, queryType } = useLGState();
|
||||||
const { exportState } = useLGMethods();
|
const { exportState } = useLGMethods();
|
||||||
|
|
||||||
// const options = useMemo(() => buildOptions(queries.list), [queries.list.length]);
|
const options = useMemo(
|
||||||
// const options = useMemo(() => Array.from(buildOptions(networks)), []);
|
() => availableTypes.map(t => ({ label: t.name.value, value: t.id.value })),
|
||||||
// const options = useMemo(
|
[availableTypes.length],
|
||||||
// () => uniqBy<TSelectOption>(Array.from(buildOptions(networks)), opt => opt?.label),
|
);
|
||||||
// [],
|
|
||||||
// );
|
|
||||||
const options = useMemo(() => availableTypes.map(t => ({ label: t.value, value: t.value })), [
|
|
||||||
availableTypes.length,
|
|
||||||
]);
|
|
||||||
|
|
||||||
function handleChange(e: TSelectOption | TSelectOption[]): void {
|
function handleChange(e: TSelectOption | TSelectOption[]): void {
|
||||||
let value = '';
|
let value = '';
|
||||||
|
|
@ -67,7 +39,6 @@ export const QueryType: React.FC<TQuerySelectField> = (props: TQuerySelectField)
|
||||||
aria-label={label}
|
aria-label={label}
|
||||||
onChange={handleChange}
|
onChange={handleChange}
|
||||||
value={exportState(selections.queryType.value)}
|
value={exportState(selections.queryType.value)}
|
||||||
// isError={typeof errors.query_type !== 'undefined'}
|
|
||||||
isError={'query_type' in errors}
|
isError={'query_type' in errors}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
|
|
|
||||||
|
|
@ -1,38 +0,0 @@
|
||||||
import { useMemo } from 'react';
|
|
||||||
import { Select } from '~/components';
|
|
||||||
import { useLGMethods, useLGState } from '~/hooks';
|
|
||||||
|
|
||||||
import type { TDeviceVrf, TSelectOption } from '~/types';
|
|
||||||
import type { TQueryVrf } from './types';
|
|
||||||
|
|
||||||
function buildOptions(queryVrfs: TDeviceVrf[]): TSelectOption[] {
|
|
||||||
return queryVrfs.map(q => ({ value: q._id, label: q.display_name }));
|
|
||||||
}
|
|
||||||
|
|
||||||
export const QueryVrf: React.FC<TQueryVrf> = (props: TQueryVrf) => {
|
|
||||||
const { vrfs, onChange, label } = props;
|
|
||||||
const { selections } = useLGState();
|
|
||||||
const { exportState } = useLGMethods();
|
|
||||||
|
|
||||||
const options = useMemo(() => buildOptions(vrfs), [vrfs.length]);
|
|
||||||
|
|
||||||
function handleChange(e: TSelectOption | TSelectOption[]): void {
|
|
||||||
if (!Array.isArray(e) && e !== null) {
|
|
||||||
selections.queryVrf.set(e);
|
|
||||||
onChange({ field: 'query_vrf', value: e.value });
|
|
||||||
} else {
|
|
||||||
selections.queryVrf.set(null);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<Select
|
|
||||||
size="lg"
|
|
||||||
name="query_vrf"
|
|
||||||
options={options}
|
|
||||||
aria-label={label}
|
|
||||||
onChange={handleChange}
|
|
||||||
value={exportState(selections.queryVrf.value)}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
import type { FormControlProps } from '@chakra-ui/react';
|
import type { FormControlProps } from '@chakra-ui/react';
|
||||||
import type { UseFormRegister } from 'react-hook-form';
|
import type { UseFormRegister } from 'react-hook-form';
|
||||||
import type { TDeviceVrf, TBGPCommunity, OnChangeArgs, TFormData } from '~/types';
|
import type { TBGPCommunity, OnChangeArgs, TFormData } from '~/types';
|
||||||
|
|
||||||
export interface TField extends FormControlProps {
|
export interface TField extends FormControlProps {
|
||||||
name: string;
|
name: string;
|
||||||
|
|
@ -17,10 +17,6 @@ export interface TQuerySelectField {
|
||||||
label: string;
|
label: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface TQueryVrf extends TQuerySelectField {
|
|
||||||
vrfs: TDeviceVrf[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TQueryGroup extends TQuerySelectField {
|
export interface TQueryGroup extends TQuerySelectField {
|
||||||
groups: string[];
|
groups: string[];
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -19,9 +19,10 @@ import {
|
||||||
} from '~/components';
|
} from '~/components';
|
||||||
import { useConfig } from '~/context';
|
import { useConfig } from '~/context';
|
||||||
import { useStrf, useGreeting, useDevice, useLGState, useLGMethods } from '~/hooks';
|
import { useStrf, useGreeting, useDevice, useLGState, useLGMethods } from '~/hooks';
|
||||||
import { isQueryType, isQueryContent, isString, isQueryField, TDirective } from '~/types';
|
import { dedupObjectArray } from '~/util';
|
||||||
|
import { isString, isQueryField, TDirective } from '~/types';
|
||||||
|
|
||||||
import type { TFormData, TDeviceVrf, OnChangeArgs } from '~/types';
|
import type { TFormData, OnChangeArgs } from '~/types';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Don't set the global flag on this.
|
* Don't set the global flag on this.
|
||||||
|
|
@ -105,16 +106,12 @@ export const LookingGlass: React.FC = () => {
|
||||||
if (queryType.value === '') {
|
if (queryType.value === '') {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
for (const loc of queryLocation) {
|
const directive = getDirective(queryType.value);
|
||||||
const device = getDevice(loc.value);
|
if (directive !== null) {
|
||||||
for (const directive of device.directives) {
|
return directive;
|
||||||
if (directive.name === queryType.value) {
|
|
||||||
return directive;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}, [queryType.value]);
|
}, [queryType.value, queryGroup.value]);
|
||||||
|
|
||||||
function submitHandler() {
|
function submitHandler() {
|
||||||
console.table({
|
console.table({
|
||||||
|
|
@ -159,7 +156,6 @@ export const LookingGlass: React.FC = () => {
|
||||||
|
|
||||||
function handleLocChange(locations: string[]): void {
|
function handleLocChange(locations: string[]): void {
|
||||||
clearErrors('query_location');
|
clearErrors('query_location');
|
||||||
const allVrfs = [] as TDeviceVrf[][];
|
|
||||||
const locationNames = [] as string[];
|
const locationNames = [] as string[];
|
||||||
const allGroups = [] as string[][];
|
const allGroups = [] as string[][];
|
||||||
const allTypes = [] as TDirective[][];
|
const allTypes = [] as TDirective[][];
|
||||||
|
|
@ -171,7 +167,6 @@ export const LookingGlass: React.FC = () => {
|
||||||
for (const loc of locations) {
|
for (const loc of locations) {
|
||||||
const device = getDevice(loc);
|
const device = getDevice(loc);
|
||||||
locationNames.push(device.name);
|
locationNames.push(device.name);
|
||||||
allVrfs.push(device.vrfs);
|
|
||||||
allDevices.push(device);
|
allDevices.push(device);
|
||||||
const groups = new Set<string>();
|
const groups = new Set<string>();
|
||||||
for (const directive of device.directives) {
|
for (const directive of device.directives) {
|
||||||
|
|
@ -231,18 +226,19 @@ export const LookingGlass: React.FC = () => {
|
||||||
|
|
||||||
function handleGroupChange(group: string): void {
|
function handleGroupChange(group: string): void {
|
||||||
queryGroup.set(group);
|
queryGroup.set(group);
|
||||||
const availTypes = new Set<string>();
|
let availTypes = new Array<TDirective>();
|
||||||
for (const loc of queryLocation) {
|
for (const loc of queryLocation) {
|
||||||
const device = getDevice(loc.value);
|
const device = getDevice(loc.value);
|
||||||
for (const directive of device.directives) {
|
for (const directive of device.directives) {
|
||||||
if (directive.groups.includes(group)) {
|
if (directive.groups.includes(group)) {
|
||||||
availTypes.add(directive.name);
|
availTypes.push(directive);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
availableTypes.set(Array.from(availTypes));
|
availTypes = dedupObjectArray<TDirective>(availTypes, 'id');
|
||||||
|
availableTypes.set(availTypes);
|
||||||
if (availableTypes.length === 1) {
|
if (availableTypes.length === 1) {
|
||||||
queryType.set(availableTypes[0].value);
|
queryType.set(availableTypes[0].name.value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -277,7 +273,7 @@ export const LookingGlass: React.FC = () => {
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
register('query_location', { required: true });
|
register('query_location', { required: true });
|
||||||
register('query_target', { required: true });
|
// register('query_target', { required: true });
|
||||||
register('query_type', { required: true });
|
register('query_type', { required: true });
|
||||||
register('query_group');
|
register('query_group');
|
||||||
}, [register]);
|
}, [register]);
|
||||||
|
|
@ -303,9 +299,9 @@ export const LookingGlass: React.FC = () => {
|
||||||
<QueryLocation onChange={handleChange} label={web.text.query_location} />
|
<QueryLocation onChange={handleChange} label={web.text.query_location} />
|
||||||
</FormField>
|
</FormField>
|
||||||
<If c={availableGroups.length > 1}>
|
<If c={availableGroups.length > 1}>
|
||||||
<FormField label={web.text.query_vrf} name="query_group">
|
<FormField label={web.text.query_group} name="query_group">
|
||||||
<QueryGroup
|
<QueryGroup
|
||||||
label={web.text.query_vrf}
|
label={web.text.query_group}
|
||||||
groups={availableGroups.value}
|
groups={availableGroups.value}
|
||||||
onChange={handleChange}
|
onChange={handleChange}
|
||||||
/>
|
/>
|
||||||
|
|
@ -319,8 +315,8 @@ export const LookingGlass: React.FC = () => {
|
||||||
label={web.text.query_type}
|
label={web.text.query_type}
|
||||||
labelAddOn={
|
labelAddOn={
|
||||||
<HelpModal
|
<HelpModal
|
||||||
visible={selectedDirective?.info !== null}
|
visible={selectedDirective?.info.value !== null}
|
||||||
item={selectedDirective?.info ?? null}
|
item={selectedDirective?.info.value ?? null}
|
||||||
name="query_type"
|
name="query_type"
|
||||||
/>
|
/>
|
||||||
}
|
}
|
||||||
|
|
@ -335,7 +331,7 @@ export const LookingGlass: React.FC = () => {
|
||||||
name="query_target"
|
name="query_target"
|
||||||
register={register}
|
register={register}
|
||||||
onChange={handleChange}
|
onChange={handleChange}
|
||||||
placeholder={selectedDirective.description}
|
placeholder={selectedDirective.description.value}
|
||||||
/>
|
/>
|
||||||
</FormField>
|
</FormField>
|
||||||
)}
|
)}
|
||||||
|
|
|
||||||
|
|
@ -1,17 +1,29 @@
|
||||||
|
import { useMemo } from 'react';
|
||||||
import { Box, Stack, useToken } from '@chakra-ui/react';
|
import { Box, Stack, useToken } from '@chakra-ui/react';
|
||||||
import { motion, AnimatePresence } from 'framer-motion';
|
import { motion, AnimatePresence } from 'framer-motion';
|
||||||
import { Label } from '~/components';
|
import { Label } from '~/components';
|
||||||
import { useConfig, useBreakpointValue } from '~/context';
|
import { useConfig, useBreakpointValue } from '~/context';
|
||||||
import { useLGState, useVrf } from '~/hooks';
|
import { useLGState, useLGMethods } from '~/hooks';
|
||||||
import { isQueryType } from '~/types';
|
|
||||||
|
|
||||||
import type { Transition } from 'framer-motion';
|
import type { Transition } from 'framer-motion';
|
||||||
|
|
||||||
const transition = { duration: 0.3, delay: 0.5 } as Transition;
|
const transition = { duration: 0.3, delay: 0.5 } as Transition;
|
||||||
|
|
||||||
export const Tags: React.FC = () => {
|
export const Tags: React.FC = () => {
|
||||||
const { queries, web } = useConfig();
|
const { web } = useConfig();
|
||||||
const { queryLocation, queryTarget, queryType, queryVrf } = useLGState();
|
const { queryLocation, queryTarget, queryType, queryGroup } = useLGState();
|
||||||
|
const { getDirective } = useLGMethods();
|
||||||
|
|
||||||
|
const selectedDirective = useMemo(() => {
|
||||||
|
if (queryType.value === '') {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const directive = getDirective(queryType.value);
|
||||||
|
if (directive !== null) {
|
||||||
|
return directive;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}, [queryType.value, queryGroup.value]);
|
||||||
|
|
||||||
const targetBg = useToken('colors', 'teal.600');
|
const targetBg = useToken('colors', 'teal.600');
|
||||||
const queryBg = useToken('colors', 'cyan.500');
|
const queryBg = useToken('colors', 'cyan.500');
|
||||||
|
|
@ -59,14 +71,6 @@ export const Tags: React.FC = () => {
|
||||||
xl: { opacity: 0, x: '100%' },
|
xl: { opacity: 0, x: '100%' },
|
||||||
});
|
});
|
||||||
|
|
||||||
let queryTypeLabel = '';
|
|
||||||
if (isQueryType(queryType.value)) {
|
|
||||||
queryTypeLabel = queries[queryType.value].display_name;
|
|
||||||
}
|
|
||||||
|
|
||||||
// const getVrf = useVrf();
|
|
||||||
// const vrf = getVrf(queryVrf.value);
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Box
|
<Box
|
||||||
p={0}
|
p={0}
|
||||||
|
|
@ -90,7 +94,7 @@ export const Tags: React.FC = () => {
|
||||||
bg={queryBg}
|
bg={queryBg}
|
||||||
label={web.text.query_type}
|
label={web.text.query_type}
|
||||||
fontSize={{ base: 'xs', md: 'sm' }}
|
fontSize={{ base: 'xs', md: 'sm' }}
|
||||||
value={queryTypeLabel}
|
value={selectedDirective?.value.name ?? 'None'}
|
||||||
/>
|
/>
|
||||||
</motion.div>
|
</motion.div>
|
||||||
<motion.div
|
<motion.div
|
||||||
|
|
@ -114,9 +118,8 @@ export const Tags: React.FC = () => {
|
||||||
>
|
>
|
||||||
<Label
|
<Label
|
||||||
bg={vrfBg}
|
bg={vrfBg}
|
||||||
label={web.text.query_vrf}
|
label={web.text.query_group}
|
||||||
// value={vrf.display_name}
|
value={queryGroup.value}
|
||||||
value="fix me"
|
|
||||||
fontSize={{ base: 'xs', md: 'sm' }}
|
fontSize={{ base: 'xs', md: 'sm' }}
|
||||||
/>
|
/>
|
||||||
</motion.div>
|
</motion.div>
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
export * from './useASNDetail';
|
export * from './useASNDetail';
|
||||||
export * from './useBooleanValue';
|
export * from './useBooleanValue';
|
||||||
export * from './useDevice';
|
export * from './useDevice';
|
||||||
|
export * from './useDirective';
|
||||||
export * from './useDNSQuery';
|
export * from './useDNSQuery';
|
||||||
export * from './useGoogleAnalytics';
|
export * from './useGoogleAnalytics';
|
||||||
export * from './useGreeting';
|
export * from './useGreeting';
|
||||||
|
|
@ -9,4 +10,3 @@ export * from './useLGState';
|
||||||
export * from './useOpposingColor';
|
export * from './useOpposingColor';
|
||||||
export * from './useStrf';
|
export * from './useStrf';
|
||||||
export * from './useTableToString';
|
export * from './useTableToString';
|
||||||
export * from './useVrf';
|
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,6 @@
|
||||||
import type { State } from '@hookstate/core';
|
import type { State } from '@hookstate/core';
|
||||||
import type * as ReactGA from 'react-ga';
|
import type * as ReactGA from 'react-ga';
|
||||||
import type {
|
import type { TDevice, Families, TFormQuery, TDeviceVrf, TSelectOption, TDirective } from '~/types';
|
||||||
TDevice,
|
|
||||||
Families,
|
|
||||||
TFormQuery,
|
|
||||||
TDeviceVrf,
|
|
||||||
TQueryTypes,
|
|
||||||
TSelectOption,
|
|
||||||
TDirective,
|
|
||||||
} from '~/types';
|
|
||||||
|
|
||||||
export type LGQueryKey = [string, TFormQuery];
|
export type LGQueryKey = [string, TFormQuery];
|
||||||
export type DNSQueryKey = [string, { target: string | null; family: 4 | 6 }];
|
export type DNSQueryKey = [string, { target: string | null; family: 4 | 6 }];
|
||||||
|
|
|
||||||
20
hyperglass/ui/hooks/useDirective.ts
Normal file
20
hyperglass/ui/hooks/useDirective.ts
Normal file
|
|
@ -0,0 +1,20 @@
|
||||||
|
import { useMemo } from 'react';
|
||||||
|
import { useLGMethods, useLGState } from './useLGState';
|
||||||
|
|
||||||
|
import type { TDirective } from '~/types';
|
||||||
|
|
||||||
|
export function useDirective(): Nullable<TDirective> {
|
||||||
|
const { queryType, queryGroup } = useLGState();
|
||||||
|
const { getDirective } = useLGMethods();
|
||||||
|
|
||||||
|
return useMemo((): Nullable<TDirective> => {
|
||||||
|
if (queryType.value === '') {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const directive = getDirective(queryType.value);
|
||||||
|
if (directive !== null) {
|
||||||
|
return directive.value;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}, [queryType.value, queryGroup.value]);
|
||||||
|
}
|
||||||
|
|
@ -23,24 +23,24 @@ export function useLGQuery(query: TFormQuery): QueryObserverResult<TQueryRespons
|
||||||
dimension1: query.queryLocation,
|
dimension1: query.queryLocation,
|
||||||
dimension2: query.queryTarget,
|
dimension2: query.queryTarget,
|
||||||
dimension3: query.queryType,
|
dimension3: query.queryType,
|
||||||
dimension4: query.queryVrf,
|
dimension4: query.queryGroup,
|
||||||
});
|
});
|
||||||
|
|
||||||
const runQuery: QueryFunction<TQueryResponse, LGQueryKey> = async (
|
const runQuery: QueryFunction<TQueryResponse, LGQueryKey> = async (
|
||||||
ctx: QueryFunctionContext<LGQueryKey>,
|
ctx: QueryFunctionContext<LGQueryKey>,
|
||||||
): Promise<TQueryResponse> => {
|
): Promise<TQueryResponse> => {
|
||||||
const [url, data] = ctx.queryKey;
|
const [url, data] = ctx.queryKey;
|
||||||
const { queryLocation, queryTarget, queryType, queryVrf } = data;
|
const { queryLocation, queryTarget, queryType, queryGroup } = data;
|
||||||
const res = await fetchWithTimeout(
|
const res = await fetchWithTimeout(
|
||||||
url,
|
url,
|
||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'content-type': 'application/json' },
|
headers: { 'content-type': 'application/json' },
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
query_location: queryLocation,
|
queryLocation,
|
||||||
query_target: queryTarget,
|
queryTarget,
|
||||||
query_type: queryType,
|
queryType,
|
||||||
query_vrf: queryVrf,
|
queryGroup,
|
||||||
}),
|
}),
|
||||||
mode: 'cors',
|
mode: 'cors',
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -39,7 +39,7 @@ class MethodsInstance {
|
||||||
}
|
}
|
||||||
|
|
||||||
public getDirective(state: State<TLGState>, name: string): Nullable<State<TDirective>> {
|
public getDirective(state: State<TLGState>, name: string): Nullable<State<TDirective>> {
|
||||||
const [directive] = state.availableTypes.filter(t => t.name.value === name);
|
const [directive] = state.availableTypes.filter(t => t.id.value === name);
|
||||||
if (typeof directive !== 'undefined') {
|
if (typeof directive !== 'undefined') {
|
||||||
return directive;
|
return directive;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,33 +0,0 @@
|
||||||
import { useCallback, useMemo } from 'react';
|
|
||||||
import { useConfig } from '~/context';
|
|
||||||
|
|
||||||
import type { TDeviceVrf } from '~/types';
|
|
||||||
import type { TUseVrf } from './types';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get a VRF configuration from the global configuration context based on its name.
|
|
||||||
*/
|
|
||||||
export function useVrf(): TUseVrf {
|
|
||||||
const { networks } = useConfig();
|
|
||||||
|
|
||||||
const vrfs = useMemo(() => networks.map(n => n.locations.map(l => l.vrfs).flat()).flat(), []);
|
|
||||||
|
|
||||||
function getVrf(id: string): TDeviceVrf {
|
|
||||||
const matching = vrfs.find(vrf => vrf._id === id);
|
|
||||||
if (typeof matching === 'undefined') {
|
|
||||||
if (id === '__hyperglass_default') {
|
|
||||||
const anyDefault = vrfs.find(vrf => vrf.default === true);
|
|
||||||
if (typeof anyDefault !== 'undefined') {
|
|
||||||
return anyDefault;
|
|
||||||
} else {
|
|
||||||
throw new Error(`No matching VRF found for '${id}'`);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
throw new Error(`No matching VRF found for '${id}'`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return matching;
|
|
||||||
}
|
|
||||||
|
|
||||||
return useCallback(getVrf, []);
|
|
||||||
}
|
|
||||||
3
hyperglass/ui/package.json
vendored
3
hyperglass/ui/package.json
vendored
|
|
@ -11,9 +11,6 @@
|
||||||
"start": "next start",
|
"start": "next start",
|
||||||
"typecheck": "tsc --noEmit",
|
"typecheck": "tsc --noEmit",
|
||||||
"format": "prettier -c .",
|
"format": "prettier -c .",
|
||||||
"clean": "rimraf --no-glob ./.next ./out",
|
|
||||||
"check:es:export": "es-check es5 './out/**/*.js' -v",
|
|
||||||
"check:es:build": "es-check es5 './.next/static/**/*.js' -v",
|
|
||||||
"build": "next build && next export -o ../hyperglass/static/ui"
|
"build": "next build && next export -o ../hyperglass/static/ui"
|
||||||
},
|
},
|
||||||
"browserslist": "> 0.25%, not dead",
|
"browserslist": "> 0.25%, not dead",
|
||||||
|
|
|
||||||
|
|
@ -16,8 +16,6 @@ export interface IConfigMessages {
|
||||||
connection_error: string;
|
connection_error: string;
|
||||||
authentication_error: string;
|
authentication_error: string;
|
||||||
no_response: string;
|
no_response: string;
|
||||||
vrf_not_associated: string;
|
|
||||||
vrf_not_found: string;
|
|
||||||
no_output: string;
|
no_output: string;
|
||||||
parsing_error: string;
|
parsing_error: string;
|
||||||
}
|
}
|
||||||
|
|
@ -35,7 +33,7 @@ export interface IConfigWebText {
|
||||||
query_location: string;
|
query_location: string;
|
||||||
query_type: string;
|
query_type: string;
|
||||||
query_target: string;
|
query_target: string;
|
||||||
query_vrf: string;
|
query_group: string;
|
||||||
fqdn_tooltip: string;
|
fqdn_tooltip: string;
|
||||||
fqdn_message: string;
|
fqdn_message: string;
|
||||||
fqdn_error: string;
|
fqdn_error: string;
|
||||||
|
|
@ -133,40 +131,35 @@ export interface TDeviceVrf extends TDeviceVrfBase {
|
||||||
ipv6: boolean;
|
ipv6: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface TDirectiveBase {
|
type TDirectiveBase = {
|
||||||
id: string;
|
id: string;
|
||||||
name: string;
|
name: string;
|
||||||
field_type: 'text' | 'select' | null;
|
field_type: 'text' | 'select' | null;
|
||||||
description: string;
|
description: string;
|
||||||
groups: string[];
|
groups: string[];
|
||||||
info: TQueryContent | null;
|
info: TQueryContent | null;
|
||||||
}
|
};
|
||||||
|
|
||||||
interface TDirectiveOption {
|
export type TDirectiveOption = {
|
||||||
name: string;
|
name: string;
|
||||||
value: string;
|
value: string;
|
||||||
}
|
description: string | null;
|
||||||
|
};
|
||||||
|
|
||||||
interface TDirectiveSelect extends TDirectiveBase {
|
export type TDirectiveSelect = TDirectiveBase & {
|
||||||
options: TDirectiveOption[];
|
options: TDirectiveOption[];
|
||||||
}
|
};
|
||||||
|
|
||||||
export type TDirective = TDirectiveBase | TDirectiveSelect;
|
export type TDirective = TDirectiveBase | TDirectiveSelect;
|
||||||
|
|
||||||
interface TDeviceBase {
|
export interface TDevice {
|
||||||
_id: string;
|
_id: string;
|
||||||
name: string;
|
name: string;
|
||||||
network: string;
|
network: string;
|
||||||
directives: TDirective[];
|
directives: TDirective[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface TDevice extends TDeviceBase {
|
export interface TNetworkLocation extends TDevice {}
|
||||||
vrfs: TDeviceVrf[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TNetworkLocation extends TDeviceBase {
|
|
||||||
vrfs: TDeviceVrf[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TNetwork {
|
export interface TNetwork {
|
||||||
display_name: string;
|
display_name: string;
|
||||||
|
|
@ -190,15 +183,6 @@ export interface TQueryContent {
|
||||||
export interface IConfigContent {
|
export interface IConfigContent {
|
||||||
credit: string;
|
credit: string;
|
||||||
greeting: string;
|
greeting: string;
|
||||||
vrf: {
|
|
||||||
[k: string]: {
|
|
||||||
bgp_route: TQueryContent;
|
|
||||||
bgp_community: TQueryContent;
|
|
||||||
bgp_aspath: TQueryContent;
|
|
||||||
ping: TQueryContent;
|
|
||||||
traceroute: TQueryContent;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface IConfig {
|
export interface IConfig {
|
||||||
|
|
@ -218,7 +202,6 @@ export interface IConfig {
|
||||||
queries: TConfigQueries;
|
queries: TConfigQueries;
|
||||||
devices: TDevice[];
|
devices: TDevice[];
|
||||||
networks: TNetwork[];
|
networks: TNetwork[];
|
||||||
vrfs: TDeviceVrfBase[];
|
|
||||||
parsed_data_fields: TParsedDataField[];
|
parsed_data_fields: TParsedDataField[];
|
||||||
content: IConfigContent;
|
content: IConfigContent;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@ export interface TFormData {
|
||||||
|
|
||||||
export interface TFormState {
|
export interface TFormState {
|
||||||
queryLocation: string[];
|
queryLocation: string[];
|
||||||
queryType: TQueryTypes;
|
queryType: string;
|
||||||
queryVrf: string;
|
queryVrf: string;
|
||||||
queryTarget: string;
|
queryTarget: string;
|
||||||
queryGroup: string;
|
queryGroup: string;
|
||||||
|
|
|
||||||
|
|
@ -1,61 +1,52 @@
|
||||||
/* eslint @typescript-eslint/explicit-module-boundary-types: off */
|
|
||||||
/* eslint @typescript-eslint/no-explicit-any: off */
|
|
||||||
import type { State } from '@hookstate/core';
|
import type { State } from '@hookstate/core';
|
||||||
import type { TFormData, TValidQueryTypes, TStringTableData, TQueryResponseString } from './data';
|
import type { TFormData, TStringTableData, TQueryResponseString } from './data';
|
||||||
import type { TSelectOption } from './common';
|
import type { TSelectOption } from './common';
|
||||||
import type { TQueryContent } from './config';
|
import type { TQueryContent, TDirectiveSelect, TDirective } from './config';
|
||||||
|
|
||||||
export function isQueryType(q: unknown): q is TValidQueryTypes {
|
|
||||||
let result = false;
|
|
||||||
if (
|
|
||||||
typeof q === 'string' &&
|
|
||||||
['bgp_route', 'bgp_community', 'bgp_aspath', 'ping', 'traceroute'].includes(q)
|
|
||||||
) {
|
|
||||||
result = true;
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function isString(a: unknown): a is string {
|
export function isString(a: unknown): a is string {
|
||||||
return typeof a === 'string';
|
return typeof a === 'string';
|
||||||
}
|
}
|
||||||
|
|
||||||
export function isStructuredOutput(data: any): data is TStringTableData {
|
/**
|
||||||
return typeof data !== 'undefined' && 'output' in data && typeof data.output !== 'string';
|
* Type Guard to determine if an argument is an object, e.g. `{}` (`Record<string, unknown>`).
|
||||||
|
* Maintains type of object if a type argument is provided.
|
||||||
|
*/
|
||||||
|
export function isObject<T extends unknown = unknown>(
|
||||||
|
obj: unknown,
|
||||||
|
): obj is { [P in keyof T]: T[P] } {
|
||||||
|
return typeof obj === 'object' && obj !== null && !Array.isArray(obj);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function isStringOutput(data: any): data is TQueryResponseString {
|
export function isStructuredOutput(data: unknown): data is TStringTableData {
|
||||||
return typeof data !== 'undefined' && 'output' in data && typeof data.output === 'string';
|
return isObject(data) && 'output' in data;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function isQueryContent(c: any): c is TQueryContent {
|
export function isStringOutput(data: unknown): data is TQueryResponseString {
|
||||||
return typeof c !== 'undefined' && c !== null && 'content' in c;
|
return (
|
||||||
|
isObject(data) && 'output' in data && typeof (data as { output: unknown }).output === 'string'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isQueryContent(content: unknown): content is TQueryContent {
|
||||||
|
return isObject(content) && 'content' in content;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Determine if an object is a Select option.
|
* Determine if an object is a Select option.
|
||||||
*/
|
*/
|
||||||
export function isSelectOption(a: any): a is NonNullable<TSelectOption> {
|
export function isSelectOption(a: unknown): a is NonNullable<TSelectOption> {
|
||||||
return typeof a !== 'undefined' && a !== null && 'label' in a && 'value' in a;
|
return isObject(a) && 'label' in a && 'value' in a;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Determine if an object is a HookState Proxy.
|
* Determine if an object is a HookState Proxy.
|
||||||
*/
|
*/
|
||||||
export function isState<S>(a: any): a is State<NonNullable<S>> {
|
export function isState<S>(a: unknown): a is State<NonNullable<S>> {
|
||||||
let result = false;
|
if (isObject(a) && 'get' in a && 'set' in a && 'promised' in a) {
|
||||||
if (typeof a !== 'undefined' && a !== null) {
|
const obj = a as { get: never; set: never; promised: never };
|
||||||
if (
|
return typeof obj.get === 'function' && typeof obj.set === 'function';
|
||||||
'get' in a &&
|
|
||||||
typeof a.get === 'function' &&
|
|
||||||
'set' in a &&
|
|
||||||
typeof a.set === 'function' &&
|
|
||||||
'promised' in a
|
|
||||||
) {
|
|
||||||
result = true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return result;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -64,3 +55,10 @@ export function isState<S>(a: any): a is State<NonNullable<S>> {
|
||||||
export function isQueryField(field: string): field is keyof TFormData {
|
export function isQueryField(field: string): field is keyof TFormData {
|
||||||
return ['query_location', 'query_type', 'query_group', 'query_target'].includes(field);
|
return ['query_location', 'query_type', 'query_group', 'query_target'].includes(field);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determine if a directive is a select directive.
|
||||||
|
*/
|
||||||
|
export function isSelectDirective(directive: TDirective): directive is TDirectiveSelect {
|
||||||
|
return directive.field_type === 'select';
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -99,3 +99,17 @@ export async function fetchWithTimeout(
|
||||||
}, timeout);
|
}, timeout);
|
||||||
return await fetch(uri, config);
|
return await fetch(uri, config);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function dedupObjectArray<E extends Record<string, unknown>, P extends keyof E = keyof E>(
|
||||||
|
arr: E[],
|
||||||
|
property: P,
|
||||||
|
): E[] {
|
||||||
|
return arr.reduce((acc: E[], current: E) => {
|
||||||
|
const x = acc.find(item => item[property] === current[property]);
|
||||||
|
if (!x) {
|
||||||
|
return acc.concat([current]);
|
||||||
|
} else {
|
||||||
|
return acc;
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,16 +4,17 @@
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import json
|
import json
|
||||||
|
import string
|
||||||
import platform
|
import platform
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
from typing import Dict, Union, Optional, Generator
|
from typing import Dict, Union, Optional, Sequence, Generator
|
||||||
from asyncio import iscoroutine
|
from asyncio import iscoroutine
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from ipaddress import IPv4Address, IPv6Address, ip_address
|
from ipaddress import IPv4Address, IPv6Address, ip_address
|
||||||
|
|
||||||
# Third Party
|
# Third Party
|
||||||
from loguru._logger import Logger as LoguruLogger
|
from loguru._logger import Logger as LoguruLogger
|
||||||
from netmiko.ssh_dispatcher import CLASS_MAPPER
|
from netmiko.ssh_dispatcher import CLASS_MAPPER # type: ignore
|
||||||
|
|
||||||
# Project
|
# Project
|
||||||
from hyperglass.log import log
|
from hyperglass.log import log
|
||||||
|
|
@ -62,7 +63,7 @@ async def write_env(variables: Dict) -> str:
|
||||||
async def clear_redis_cache(db: int, config: Dict) -> bool:
|
async def clear_redis_cache(db: int, config: Dict) -> bool:
|
||||||
"""Clear the Redis cache."""
|
"""Clear the Redis cache."""
|
||||||
# Third Party
|
# Third Party
|
||||||
import aredis
|
import aredis # type: ignore
|
||||||
|
|
||||||
try:
|
try:
|
||||||
redis_instance = aredis.StrictRedis(db=db, **config)
|
redis_instance = aredis.StrictRedis(db=db, **config)
|
||||||
|
|
@ -316,3 +317,24 @@ def resolve_hostname(hostname: str) -> Generator:
|
||||||
|
|
||||||
yield ip4
|
yield ip4
|
||||||
yield ip6
|
yield ip6
|
||||||
|
|
||||||
|
|
||||||
|
def snake_to_camel(value: str) -> str:
|
||||||
|
"""Convert a string from snake_case to camelCase."""
|
||||||
|
parts = value.split("_")
|
||||||
|
humps = (hump.capitalize() for hump in parts[1:])
|
||||||
|
return "".join((parts[0], *humps))
|
||||||
|
|
||||||
|
|
||||||
|
def get_fmt_keys(template: str) -> Sequence[str]:
|
||||||
|
"""Get a list of str.format keys.
|
||||||
|
|
||||||
|
For example, string `"The value of {key} is {value}"` returns
|
||||||
|
`["key", "value"]`.
|
||||||
|
"""
|
||||||
|
keys = []
|
||||||
|
for block in string.Formatter.parse("", template):
|
||||||
|
key = block[1]
|
||||||
|
if key:
|
||||||
|
keys.append(key)
|
||||||
|
return keys
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ from typing import Dict, Tuple, Union
|
||||||
|
|
||||||
# Third Party
|
# Third Party
|
||||||
import psutil as _psutil
|
import psutil as _psutil
|
||||||
from cpuinfo import get_cpu_info as _get_cpu_info
|
from cpuinfo import get_cpu_info as _get_cpu_info # type: ignore
|
||||||
|
|
||||||
# Project
|
# Project
|
||||||
from hyperglass.constants import __version__
|
from hyperglass.constants import __version__
|
||||||
|
|
|
||||||
80
poetry.lock
generated
80
poetry.lock
generated
|
|
@ -65,7 +65,7 @@ python-versions = ">=3.5"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "asyncssh"
|
name = "asyncssh"
|
||||||
version = "2.5.0"
|
version = "2.7.0"
|
||||||
description = "AsyncSSH: Asynchronous SSHv2 client and server library"
|
description = "AsyncSSH: Asynchronous SSHv2 client and server library"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
|
|
@ -76,7 +76,7 @@ cryptography = ">=2.8"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
bcrypt = ["bcrypt (>=3.1.3)"]
|
bcrypt = ["bcrypt (>=3.1.3)"]
|
||||||
fido2 = ["fido2 (>=0.8.1)"]
|
fido2 = ["fido2 (==0.9.1)"]
|
||||||
gssapi = ["gssapi (>=1.2.0)"]
|
gssapi = ["gssapi (>=1.2.0)"]
|
||||||
libnacl = ["libnacl (>=1.4.2)"]
|
libnacl = ["libnacl (>=1.4.2)"]
|
||||||
pkcs11 = ["python-pkcs11 (>=0.7.0)"]
|
pkcs11 = ["python-pkcs11 (>=0.7.0)"]
|
||||||
|
|
@ -774,7 +774,7 @@ test = ["pyyaml (>=5.1.2)", "pytest (>=5.1.2)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nodeenv"
|
name = "nodeenv"
|
||||||
version = "1.5.0"
|
version = "1.6.0"
|
||||||
description = "Node.js virtual environment builder"
|
description = "Node.js virtual environment builder"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
|
|
@ -902,7 +902,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pydantic"
|
name = "pydantic"
|
||||||
version = "1.8.1"
|
version = "1.8.2"
|
||||||
description = "Data validation and settings management using python 3.6 type hinting"
|
description = "Data validation and settings management using python 3.6 type hinting"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
|
|
@ -1051,7 +1051,7 @@ pillow = ">=4.0.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rfc3986"
|
name = "rfc3986"
|
||||||
version = "1.4.0"
|
version = "1.5.0"
|
||||||
description = "Validating URI References per RFC 3986"
|
description = "Validating URI References per RFC 3986"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
|
|
@ -1094,7 +1094,7 @@ paramiko = "*"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "scrapli"
|
name = "scrapli"
|
||||||
version = "2021.1.30"
|
version = "2021.7.30"
|
||||||
description = "Fast, flexible, sync/async, Python 3.6+ screen scraping client specifically for network devices"
|
description = "Fast, flexible, sync/async, Python 3.6+ screen scraping client specifically for network devices"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
|
|
@ -1107,12 +1107,12 @@ dataclasses = {version = ">=0.7,<1.0", markers = "python_version < \"3.7\""}
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
asyncssh = ["asyncssh (>=2.2.1,<3.0.0)"]
|
asyncssh = ["asyncssh (>=2.2.1,<3.0.0)"]
|
||||||
community = ["scrapli-community (>=2021.01.30a1)"]
|
community = ["scrapli-community (>=2021.01.30)"]
|
||||||
full = ["textfsm (>=1.1.0,<2.0.0)", "ntc-templates (>=1.1.0,<2.0.0)", "ttp (>=0.5.0,<1.0.0)", "paramiko (>=2.6.0,<3.0.0)", "ssh2-python (>=0.23.0,<1.0.0)", "asyncssh (>=2.2.1,<3.0.0)", "scrapli-community (>=2021.01.30a1)", "genie (>=20.2)", "pyats (>=20.2)"]
|
full = ["ntc-templates (>=1.1.0,<3.0.0)", "textfsm (>=1.1.0,<2.0.0)", "ttp (>=0.5.0,<1.0.0)", "paramiko (>=2.6.0,<3.0.0)", "asyncssh (>=2.2.1,<3.0.0)", "scrapli-community (>=2021.01.30)", "ssh2-python (>=0.23.0,<1.0.0)", "genie (>=20.2)", "pyats (>=20.2)"]
|
||||||
genie = ["genie (>=20.2)", "pyats (>=20.2)"]
|
genie = ["genie (>=20.2)", "pyats (>=20.2)"]
|
||||||
paramiko = ["paramiko (>=2.6.0,<3.0.0)"]
|
paramiko = ["paramiko (>=2.6.0,<3.0.0)"]
|
||||||
ssh2 = ["ssh2-python (>=0.23.0,<1.0.0)"]
|
ssh2 = ["ssh2-python (>=0.23.0,<1.0.0)"]
|
||||||
textfsm = ["textfsm (>=1.1.0,<2.0.0)", "ntc-templates (>=1.1.0,<2.0.0)"]
|
textfsm = ["ntc-templates (>=1.1.0,<3.0.0)", "textfsm (>=1.1.0,<2.0.0)"]
|
||||||
ttp = ["ttp (>=0.5.0,<1.0.0)"]
|
ttp = ["ttp (>=0.5.0,<1.0.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -1407,7 +1407,7 @@ testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "1.1"
|
lock-version = "1.1"
|
||||||
python-versions = ">=3.6.1,<4.0"
|
python-versions = ">=3.6.1,<4.0"
|
||||||
content-hash = "39564830e6fe6f4ba7253c516dd9d0dc0089e60512cd0c94ae798a4464be4505"
|
content-hash = "c36e22b0981b31fb48f071ac413e8919ad946ef9ff08628b813370ae0f6b1cfd"
|
||||||
|
|
||||||
[metadata.files]
|
[metadata.files]
|
||||||
aiocontextvars = [
|
aiocontextvars = [
|
||||||
|
|
@ -1438,8 +1438,8 @@ async-generator = [
|
||||||
{file = "async_generator-1.10.tar.gz", hash = "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"},
|
{file = "async_generator-1.10.tar.gz", hash = "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"},
|
||||||
]
|
]
|
||||||
asyncssh = [
|
asyncssh = [
|
||||||
{file = "asyncssh-2.5.0-py3-none-any.whl", hash = "sha256:5bbb313e1d2f181c1598c4722673670b4ea8840b725b2b261fa5a1da8fa38886"},
|
{file = "asyncssh-2.7.0-py3-none-any.whl", hash = "sha256:ccc62a1b311c71d4bf8e4bc3ac141eb00ebb28b324e375aed1d0a03232893ca1"},
|
||||||
{file = "asyncssh-2.5.0.tar.gz", hash = "sha256:0b65e2af73a2e39a271bd627abbe4f7e4b0345486ed403e65987d79c72fcb70b"},
|
{file = "asyncssh-2.7.0.tar.gz", hash = "sha256:185013d8e67747c3c0f01b72416b8bd78417da1df48c71f76da53c607ef541b6"},
|
||||||
]
|
]
|
||||||
attrs = [
|
attrs = [
|
||||||
{file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"},
|
{file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"},
|
||||||
|
|
@ -1768,8 +1768,8 @@ netmiko = [
|
||||||
{file = "netmiko-3.4.0.tar.gz", hash = "sha256:acadb9dd97864ee848e2032f1f0e301c7b31e7a4153757d98f5c8ba1b9614993"},
|
{file = "netmiko-3.4.0.tar.gz", hash = "sha256:acadb9dd97864ee848e2032f1f0e301c7b31e7a4153757d98f5c8ba1b9614993"},
|
||||||
]
|
]
|
||||||
nodeenv = [
|
nodeenv = [
|
||||||
{file = "nodeenv-1.5.0-py2.py3-none-any.whl", hash = "sha256:5304d424c529c997bc888453aeaa6362d242b6b4631e90f3d4bf1b290f1c84a9"},
|
{file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"},
|
||||||
{file = "nodeenv-1.5.0.tar.gz", hash = "sha256:ab45090ae383b716c4ef89e690c41ff8c2b257b85b309f01f3654df3d084bd7c"},
|
{file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"},
|
||||||
]
|
]
|
||||||
ntc-templates = [
|
ntc-templates = [
|
||||||
{file = "ntc_templates-2.0.0-py3-none-any.whl", hash = "sha256:6617f36aaa842179e94d8b8e6527e652baf4a18a5b2f94b26b6505e5722fbc95"},
|
{file = "ntc_templates-2.0.0-py3-none-any.whl", hash = "sha256:6617f36aaa842179e94d8b8e6527e652baf4a18a5b2f94b26b6505e5722fbc95"},
|
||||||
|
|
@ -1850,28 +1850,28 @@ pycparser = [
|
||||||
{file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"},
|
{file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"},
|
||||||
]
|
]
|
||||||
pydantic = [
|
pydantic = [
|
||||||
{file = "pydantic-1.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0c40162796fc8d0aa744875b60e4dc36834db9f2a25dbf9ba9664b1915a23850"},
|
{file = "pydantic-1.8.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:05ddfd37c1720c392f4e0d43c484217b7521558302e7069ce8d318438d297739"},
|
||||||
{file = "pydantic-1.8.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:fff29fe54ec419338c522b908154a2efabeee4f483e48990f87e189661f31ce3"},
|
{file = "pydantic-1.8.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a7c6002203fe2c5a1b5cbb141bb85060cbff88c2d78eccbc72d97eb7022c43e4"},
|
||||||
{file = "pydantic-1.8.1-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:fbfb608febde1afd4743c6822c19060a8dbdd3eb30f98e36061ba4973308059e"},
|
{file = "pydantic-1.8.2-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:589eb6cd6361e8ac341db97602eb7f354551482368a37f4fd086c0733548308e"},
|
||||||
{file = "pydantic-1.8.1-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:eb8ccf12295113ce0de38f80b25f736d62f0a8d87c6b88aca645f168f9c78771"},
|
{file = "pydantic-1.8.2-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:10e5622224245941efc193ad1d159887872776df7a8fd592ed746aa25d071840"},
|
||||||
{file = "pydantic-1.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:20d42f1be7c7acc352b3d09b0cf505a9fab9deb93125061b376fbe1f06a5459f"},
|
{file = "pydantic-1.8.2-cp36-cp36m-win_amd64.whl", hash = "sha256:99a9fc39470010c45c161a1dc584997f1feb13f689ecf645f59bb4ba623e586b"},
|
||||||
{file = "pydantic-1.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dde4ca368e82791de97c2ec019681ffb437728090c0ff0c3852708cf923e0c7d"},
|
{file = "pydantic-1.8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a83db7205f60c6a86f2c44a61791d993dff4b73135df1973ecd9eed5ea0bda20"},
|
||||||
{file = "pydantic-1.8.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:3bbd023c981cbe26e6e21c8d2ce78485f85c2e77f7bab5ec15b7d2a1f491918f"},
|
{file = "pydantic-1.8.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:41b542c0b3c42dc17da70554bc6f38cbc30d7066d2c2815a94499b5684582ecb"},
|
||||||
{file = "pydantic-1.8.1-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:830ef1a148012b640186bf4d9789a206c56071ff38f2460a32ae67ca21880eb8"},
|
{file = "pydantic-1.8.2-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:ea5cb40a3b23b3265f6325727ddfc45141b08ed665458be8c6285e7b85bd73a1"},
|
||||||
{file = "pydantic-1.8.1-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:fb77f7a7e111db1832ae3f8f44203691e15b1fa7e5a1cb9691d4e2659aee41c4"},
|
{file = "pydantic-1.8.2-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:18b5ea242dd3e62dbf89b2b0ec9ba6c7b5abaf6af85b95a97b00279f65845a23"},
|
||||||
{file = "pydantic-1.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:3bcb9d7e1f9849a6bdbd027aabb3a06414abd6068cb3b21c49427956cce5038a"},
|
{file = "pydantic-1.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:234a6c19f1c14e25e362cb05c68afb7f183eb931dd3cd4605eafff055ebbf287"},
|
||||||
{file = "pydantic-1.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2287ebff0018eec3cc69b1d09d4b7cebf277726fa1bd96b45806283c1d808683"},
|
{file = "pydantic-1.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:021ea0e4133e8c824775a0cfe098677acf6fa5a3cbf9206a376eed3fc09302cd"},
|
||||||
{file = "pydantic-1.8.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:4bbc47cf7925c86a345d03b07086696ed916c7663cb76aa409edaa54546e53e2"},
|
{file = "pydantic-1.8.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e710876437bc07bd414ff453ac8ec63d219e7690128d925c6e82889d674bb505"},
|
||||||
{file = "pydantic-1.8.1-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:6388ef4ef1435364c8cc9a8192238aed030595e873d8462447ccef2e17387125"},
|
{file = "pydantic-1.8.2-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:ac8eed4ca3bd3aadc58a13c2aa93cd8a884bcf21cb019f8cfecaae3b6ce3746e"},
|
||||||
{file = "pydantic-1.8.1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:dd4888b300769ecec194ca8f2699415f5f7760365ddbe243d4fd6581485fa5f0"},
|
{file = "pydantic-1.8.2-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:4a03cbbe743e9c7247ceae6f0d8898f7a64bb65800a45cbdc52d65e370570820"},
|
||||||
{file = "pydantic-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:8fbb677e4e89c8ab3d450df7b1d9caed23f254072e8597c33279460eeae59b99"},
|
{file = "pydantic-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:8621559dcf5afacf0069ed194278f35c255dc1a1385c28b32dd6c110fd6531b3"},
|
||||||
{file = "pydantic-1.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2f2736d9a996b976cfdfe52455ad27462308c9d3d0ae21a2aa8b4cd1a78f47b9"},
|
{file = "pydantic-1.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8b223557f9510cf0bfd8b01316bf6dd281cf41826607eada99662f5e4963f316"},
|
||||||
{file = "pydantic-1.8.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:3114d74329873af0a0e8004627f5389f3bb27f956b965ddd3e355fe984a1789c"},
|
{file = "pydantic-1.8.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:244ad78eeb388a43b0c927e74d3af78008e944074b7d0f4f696ddd5b2af43c62"},
|
||||||
{file = "pydantic-1.8.1-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:258576f2d997ee4573469633592e8b99aa13bda182fcc28e875f866016c8e07e"},
|
{file = "pydantic-1.8.2-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:05ef5246a7ffd2ce12a619cbb29f3307b7c4509307b1b49f456657b43529dc6f"},
|
||||||
{file = "pydantic-1.8.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:c17a0b35c854049e67c68b48d55e026c84f35593c66d69b278b8b49e2484346f"},
|
{file = "pydantic-1.8.2-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:54cd5121383f4a461ff7644c7ca20c0419d58052db70d8791eacbbe31528916b"},
|
||||||
{file = "pydantic-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:e8bc082afef97c5fd3903d05c6f7bb3a6af9fc18631b4cc9fedeb4720efb0c58"},
|
{file = "pydantic-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:4be75bebf676a5f0f87937c6ddb061fa39cbea067240d98e298508c1bda6f3f3"},
|
||||||
{file = "pydantic-1.8.1-py3-none-any.whl", hash = "sha256:e3f8790c47ac42549dc8b045a67b0ca371c7f66e73040d0197ce6172b385e520"},
|
{file = "pydantic-1.8.2-py3-none-any.whl", hash = "sha256:fec866a0b59f372b7e776f2d7308511784dace622e0992a0b59ea3ccee0ae833"},
|
||||||
{file = "pydantic-1.8.1.tar.gz", hash = "sha256:26cf3cb2e68ec6c0cfcb6293e69fb3450c5fd1ace87f46b64f678b0d29eac4c3"},
|
{file = "pydantic-1.8.2.tar.gz", hash = "sha256:26464e57ccaafe72b7ad156fdaa4e9b9ef051f69e175dbbb463283000c05ab7b"},
|
||||||
]
|
]
|
||||||
pydocstyle = [
|
pydocstyle = [
|
||||||
{file = "pydocstyle-5.1.1-py3-none-any.whl", hash = "sha256:aca749e190a01726a4fb472dd4ef23b5c9da7b9205c0a7857c06533de13fd678"},
|
{file = "pydocstyle-5.1.1-py3-none-any.whl", hash = "sha256:aca749e190a01726a4fb472dd4ef23b5c9da7b9205c0a7857c06533de13fd678"},
|
||||||
|
|
@ -2029,8 +2029,8 @@ reportlab = [
|
||||||
{file = "reportlab-3.5.53.tar.gz", hash = "sha256:49e32586d3a814a5f77407c0590504a72743ca278518b3c0f90182430f2d87af"},
|
{file = "reportlab-3.5.53.tar.gz", hash = "sha256:49e32586d3a814a5f77407c0590504a72743ca278518b3c0f90182430f2d87af"},
|
||||||
]
|
]
|
||||||
rfc3986 = [
|
rfc3986 = [
|
||||||
{file = "rfc3986-1.4.0-py2.py3-none-any.whl", hash = "sha256:af9147e9aceda37c91a05f4deb128d4b4b49d6b199775fd2d2927768abdc8f50"},
|
{file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"},
|
||||||
{file = "rfc3986-1.4.0.tar.gz", hash = "sha256:112398da31a3344dc25dbf477d8df6cb34f9278a94fee2625d89e4514be8bb9d"},
|
{file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"},
|
||||||
]
|
]
|
||||||
rich = [
|
rich = [
|
||||||
{file = "rich-8.0.0-py3-none-any.whl", hash = "sha256:3c5e4bb1e48c647bc75bc4ae7c125d399bec5b6ed2a319f0d447361635f02a9a"},
|
{file = "rich-8.0.0-py3-none-any.whl", hash = "sha256:3c5e4bb1e48c647bc75bc4ae7c125d399bec5b6ed2a319f0d447361635f02a9a"},
|
||||||
|
|
@ -2041,8 +2041,8 @@ scp = [
|
||||||
{file = "scp-0.13.3.tar.gz", hash = "sha256:8bd748293d7362073169b96ce4b8c4f93bcc62cfc5f7e1d949e01e406a025bd4"},
|
{file = "scp-0.13.3.tar.gz", hash = "sha256:8bd748293d7362073169b96ce4b8c4f93bcc62cfc5f7e1d949e01e406a025bd4"},
|
||||||
]
|
]
|
||||||
scrapli = [
|
scrapli = [
|
||||||
{file = "scrapli-2021.1.30-py3-none-any.whl", hash = "sha256:31a35daa75212953efb8cf7d7ff582f93aae12d2b957056c9ec185d4f6f5e586"},
|
{file = "scrapli-2021.7.30-py3-none-any.whl", hash = "sha256:7bdf482a79d0a3d24a9a776b8d82686bc201a4c828fd14a917453177c0008d98"},
|
||||||
{file = "scrapli-2021.1.30.tar.gz", hash = "sha256:aac7e8ae764f098a77d8d14fa4bda1cd886318b7293507e56a05f007d3e2e6c4"},
|
{file = "scrapli-2021.7.30.tar.gz", hash = "sha256:fa1e27a7f6281e6ea8ae8bb096b637b2f5b0ecf37251160b839577a1c0cef40f"},
|
||||||
]
|
]
|
||||||
six = [
|
six = [
|
||||||
{file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"},
|
{file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"},
|
||||||
|
|
|
||||||
|
|
@ -23,7 +23,7 @@ license = "BSD-3-Clause-Clear"
|
||||||
name = "hyperglass"
|
name = "hyperglass"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
repository = "https://github.com/thatmattlove/hyperglass"
|
repository = "https://github.com/thatmattlove/hyperglass"
|
||||||
version = "1.0.4"
|
version = "2.0.0-dev"
|
||||||
|
|
||||||
[tool.poetry.scripts]
|
[tool.poetry.scripts]
|
||||||
hyperglass = "hyperglass.console:CLI"
|
hyperglass = "hyperglass.console:CLI"
|
||||||
|
|
@ -47,14 +47,14 @@ netmiko = "^3.4.0"
|
||||||
paramiko = "^2.7.2"
|
paramiko = "^2.7.2"
|
||||||
psutil = "^5.7.2"
|
psutil = "^5.7.2"
|
||||||
py-cpuinfo = "^7.0.0"
|
py-cpuinfo = "^7.0.0"
|
||||||
pydantic = "^1.8.1"
|
pydantic = "1.8.2"
|
||||||
python = ">=3.6.1,<4.0"
|
python = ">=3.8.1,<4.0"
|
||||||
redis = "^3.5.3"
|
redis = "^3.5.3"
|
||||||
scrapli = {extras = ["asyncssh"], version = "^2021.1.30"}
|
scrapli = {version = "2021.07.30", extras = ["asyncssh"]}
|
||||||
|
typing-extensions = "^3.7.4"
|
||||||
uvicorn = {extras = ["standard"], version = "^0.13.4"}
|
uvicorn = {extras = ["standard"], version = "^0.13.4"}
|
||||||
uvloop = "^0.14.0"
|
uvloop = "^0.14.0"
|
||||||
xmltodict = "^0.12.0"
|
xmltodict = "^0.12.0"
|
||||||
typing-extensions = "^3.7.4"
|
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
[tool.poetry.dev-dependencies]
|
||||||
bandit = "^1.6.2"
|
bandit = "^1.6.2"
|
||||||
|
|
@ -83,3 +83,14 @@ stackprinter = "^0.2.3"
|
||||||
|
|
||||||
[tool.black]
|
[tool.black]
|
||||||
line-length = 88
|
line-length = 88
|
||||||
|
|
||||||
|
[tool.pyright]
|
||||||
|
exclude = [
|
||||||
|
"**/node_modules",
|
||||||
|
"**/ui",
|
||||||
|
"**/__pycache__",
|
||||||
|
]
|
||||||
|
include = ["hyperglass"]
|
||||||
|
pythonVersion = "3.6"
|
||||||
|
reportMissingImports = true
|
||||||
|
reportMissingTypeStubs = true
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue