Implement: Pydantic, Custom exceptions, constants, YAML
|
|
@ -1,7 +1,6 @@
|
|||
# https://github.com/checktheroads/hyperglass
|
||||
"""
|
||||
Constructs SSH commands or API call parameters based on front end input, executes the
|
||||
commands/calls, returns the output to front end
|
||||
Constructs SSH commands or API call parameters based on front end
|
||||
input, executes the commands/calls, returns the output to front end.
|
||||
"""
|
||||
from hyperglass.command import execute
|
||||
from hyperglass.command import construct
|
||||
|
|
|
|||
|
|
@ -5,8 +5,7 @@ API call parameters for hyperglass-frr
|
|||
"""
|
||||
# Standard Imports
|
||||
import json
|
||||
import inspect
|
||||
import logging
|
||||
import operator
|
||||
|
||||
# Module Imports
|
||||
import logzero
|
||||
|
|
@ -18,48 +17,37 @@ from netaddr import IPNetwork, IPAddress # pylint: disable=unused-import
|
|||
# IPAddress("192.0.2.1"), so I do actually need this import. <3, -ML
|
||||
|
||||
# Project Imports
|
||||
from hyperglass import configuration
|
||||
|
||||
# Configuration Imports
|
||||
codes = configuration.codes()
|
||||
|
||||
# Logzero Configuration
|
||||
if configuration.debug_state():
|
||||
logzero.loglevel(logging.DEBUG)
|
||||
else:
|
||||
logzero.loglevel(logging.INFO)
|
||||
|
||||
|
||||
def current_function():
|
||||
"""Returns name of current function"""
|
||||
this_function = inspect.stack()[1][3]
|
||||
return this_function
|
||||
from hyperglass.configuration import params, commands, logzero_config
|
||||
|
||||
|
||||
class Construct:
|
||||
"""Constructor for FRRouting API"""
|
||||
"""
|
||||
Constructs SSH commands or REST API queries based on validated
|
||||
input parameters.
|
||||
"""
|
||||
|
||||
def __init__(self, device):
|
||||
self.device = device
|
||||
self.d_address = self.device["address"]
|
||||
self.d_src_addr_ipv4 = self.device["src_addr_ipv4"]
|
||||
self.d_src_addr_ipv6 = self.device["src_addr_ipv6"]
|
||||
self.d_type = self.device["type"]
|
||||
self.command = configuration.command(self.d_type)
|
||||
|
||||
def get_src(self, ver):
|
||||
"""Returns source IP based on IP version."""
|
||||
"""
|
||||
Returns source IP based on IP version of query destination.
|
||||
"""
|
||||
src = None
|
||||
if ver == 4:
|
||||
src = self.d_src_addr_ipv4
|
||||
src = self.device.src_addr_ipv4.exploded
|
||||
if ver == 6:
|
||||
src = self.d_src_addr_ipv6
|
||||
logger.debug(f"Source IPv{ver}: {src}")
|
||||
src = self.device.src_addr_ipv6.exploded
|
||||
logger.debug(f"IPv{ver} Source: {src}")
|
||||
return src
|
||||
|
||||
def device_commands(self, nos, afi, query_type):
|
||||
cmd_path = f"{nos}.{afi}.{query_type}"
|
||||
return operator.attrgetter(cmd_path)(commands)
|
||||
|
||||
def ping(self, transport, target):
|
||||
"""Constructs ping query parameters from pre-validated input"""
|
||||
query_type = current_function()
|
||||
query_type = "ping"
|
||||
logger.debug(f"Constructing {query_type} query for {target} via {transport}...")
|
||||
query = None
|
||||
ip_version = IPNetwork(target).ip.version
|
||||
|
|
@ -74,16 +62,18 @@ class Construct:
|
|||
"target": target,
|
||||
}
|
||||
)
|
||||
if transport == "scrape":
|
||||
conf_command = self.command[afi][query_type]
|
||||
elif transport == "scrape":
|
||||
conf_command = self.device_commands(self.device.nos, afi, query_type)
|
||||
fmt_command = conf_command.format(target=target, source=source)
|
||||
query = (self.d_address, self.d_type, fmt_command)
|
||||
query = (self.device.address.exploded, self.device.nos, fmt_command)
|
||||
logger.debug(f"Constructed query: {query}")
|
||||
return query
|
||||
|
||||
def traceroute(self, transport, target):
|
||||
"""Constructs traceroute query parameters from pre-validated input"""
|
||||
query_type = current_function()
|
||||
"""
|
||||
Constructs traceroute query parameters from pre-validated input.
|
||||
"""
|
||||
query_type = "traceroute"
|
||||
logger.debug(f"Constructing {query_type} query for {target} via {transport}...")
|
||||
query = None
|
||||
ip_version = IPNetwork(target).ip.version
|
||||
|
|
@ -99,16 +89,18 @@ class Construct:
|
|||
}
|
||||
)
|
||||
|
||||
if transport == "scrape":
|
||||
conf_command = self.command[afi][query_type]
|
||||
elif transport == "scrape":
|
||||
conf_command = self.device_commands(self.device.nos, afi, query_type)
|
||||
fmt_command = conf_command.format(target=target, source=source)
|
||||
query = (self.d_address, self.d_type, fmt_command)
|
||||
query = (self.device.address.exploded, self.device.nos, fmt_command)
|
||||
logger.debug(f"Constructed query: {query}")
|
||||
return query
|
||||
|
||||
def bgp_route(self, transport, target):
|
||||
"""Constructs bgp_route query parameters from pre-validated input"""
|
||||
query_type = current_function()
|
||||
"""
|
||||
Constructs bgp_route query parameters from pre-validated input.
|
||||
"""
|
||||
query_type = "bgp_route"
|
||||
logger.debug(f"Constructing {query_type} query for {target} via {transport}...")
|
||||
query = None
|
||||
ip_version = IPNetwork(target).ip.version
|
||||
|
|
@ -116,38 +108,43 @@ class Construct:
|
|||
if transport == "rest":
|
||||
query = json.dumps({"query_type": query_type, "afi": afi, "target": target})
|
||||
if transport == "scrape":
|
||||
conf_command = self.command[afi][query_type]
|
||||
conf_command = self.device_commands(self.device.nos, afi, query_type)
|
||||
fmt_command = conf_command.format(target=target)
|
||||
query = (self.d_address, self.d_type, fmt_command)
|
||||
query = (self.device.address.exploded, self.device.nos, fmt_command)
|
||||
logger.debug(f"Constructed query: {query}")
|
||||
return query
|
||||
|
||||
def bgp_community(self, transport, target):
|
||||
"""Constructs bgp_community query parameters from pre-validated input"""
|
||||
query_type = current_function()
|
||||
"""
|
||||
Constructs bgp_community query parameters from pre-validated
|
||||
input.
|
||||
"""
|
||||
query_type = "bgp_community"
|
||||
logger.debug(f"Constructing {query_type} query for {target} via {transport}...")
|
||||
afi = "dual"
|
||||
query = None
|
||||
if transport == "rest":
|
||||
query = json.dumps({"query_type": query_type, "afi": afi, "target": target})
|
||||
if transport == "scrape":
|
||||
conf_command = self.command[afi][query_type]
|
||||
conf_command = self.device_commands(self.device.nos, afi, query_type)
|
||||
fmt_command = conf_command.format(target=target)
|
||||
query = (self.d_address, self.d_type, fmt_command)
|
||||
query = (self.device.address.exploded, self.device.nos, fmt_command)
|
||||
logger.debug(f"Constructed query: {query}")
|
||||
return query
|
||||
|
||||
def bgp_aspath(self, transport, target):
|
||||
"""Constructs bgp_aspath query parameters from pre-validated input"""
|
||||
query_type = current_function()
|
||||
"""
|
||||
Constructs bgp_aspath query parameters from pre-validated input.
|
||||
"""
|
||||
query_type = "bgp_aspath"
|
||||
logger.debug(f"Constructing {query_type} query for {target} via {transport}...")
|
||||
afi = "dual"
|
||||
query = None
|
||||
if transport == "rest":
|
||||
query = json.dumps({"query_type": query_type, "afi": afi, "target": target})
|
||||
if transport == "scrape":
|
||||
conf_command = self.command[afi][query_type]
|
||||
conf_command = self.device_commands(self.device.nos, afi, query_type)
|
||||
fmt_command = conf_command.format(target=target)
|
||||
query = (self.d_address, self.d_type, fmt_command)
|
||||
query = (self.device.address.exploded, self.device.nos, fmt_command)
|
||||
logger.debug(f"Constructed query: {query}")
|
||||
return query
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ connectoins or hyperglass-frr API calls, returns the output back to the front en
|
|||
# Standard Imports
|
||||
import json
|
||||
import time
|
||||
import logging
|
||||
|
||||
# Module Imports
|
||||
import requests
|
||||
|
|
@ -24,19 +23,16 @@ from netmiko import (
|
|||
)
|
||||
|
||||
# Project Imports
|
||||
from hyperglass import configuration
|
||||
from hyperglass.constants import code, Supported
|
||||
from hyperglass.command.construct import Construct
|
||||
from hyperglass.command.validate import Validate
|
||||
|
||||
codes = configuration.codes()
|
||||
config = configuration.params()
|
||||
# config = configuration.general()
|
||||
|
||||
# Logzero Configuration
|
||||
if configuration.debug_state():
|
||||
logzero.loglevel(logging.DEBUG)
|
||||
else:
|
||||
logzero.loglevel(logging.INFO)
|
||||
from hyperglass.configuration import (
|
||||
params,
|
||||
devices,
|
||||
credentials,
|
||||
proxies,
|
||||
logzero_config,
|
||||
)
|
||||
|
||||
|
||||
class Rest:
|
||||
|
|
@ -50,7 +46,7 @@ class Rest:
|
|||
self.device = device
|
||||
self.query_type = query_type
|
||||
self.target = target
|
||||
self.cred = configuration.credential(self.device["credential"])
|
||||
self.cred = getattr(credentials, self.device.credential)
|
||||
self.query = getattr(Construct(self.device), self.query_type)(
|
||||
self.transport, self.target
|
||||
)
|
||||
|
|
@ -64,30 +60,30 @@ class Rest:
|
|||
try:
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"X-API-Key": self.cred["password"],
|
||||
"X-API-Key": self.cred.password.get_secret_value(),
|
||||
}
|
||||
json_query = json.dumps(self.query)
|
||||
frr_endpoint = f'http://{self.device["address"]}:{self.device["port"]}/frr'
|
||||
# Debug
|
||||
logger.debug(f"HTTP Headers:\n{headers}")
|
||||
logger.debug(f"JSON query:\n{json_query}")
|
||||
frr_endpoint = (
|
||||
f"http://{self.device.address.exploded}:{self.device.port}/frr"
|
||||
)
|
||||
logger.debug(f"HTTP Headers: {headers}")
|
||||
logger.debug(f"JSON query: {json_query}")
|
||||
logger.debug(f"FRR endpoint: {frr_endpoint}")
|
||||
# End Debug
|
||||
frr_response = requests.post(
|
||||
frr_endpoint, headers=headers, data=json_query, timeout=7
|
||||
)
|
||||
response = frr_response.text
|
||||
status = frr_response.status_code
|
||||
# Debug
|
||||
logger.debug(f"FRR response text:\n{response}")
|
||||
logger.debug(f"FRR status code: {status}")
|
||||
logger.debug(f"FRR response text:\n{response}")
|
||||
# End Debug
|
||||
except requests.exceptions.RequestException as requests_exception:
|
||||
except requests.exceptions.RequestException as rest_error:
|
||||
logger.error(
|
||||
f"Error connecting to device {self.device}: {requests_exception}"
|
||||
f"Error connecting to device {self.device.location}: {rest_error}"
|
||||
)
|
||||
response = config["messages"]["general"]
|
||||
status = codes["danger"]
|
||||
response = params.messages.general
|
||||
status = code.invalid
|
||||
return response, status
|
||||
|
||||
def bird(self):
|
||||
|
|
@ -99,32 +95,30 @@ class Rest:
|
|||
try:
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"X-API-Key": self.cred["password"],
|
||||
"X-API-Key": self.cred.password.get_secret_value(),
|
||||
}
|
||||
json_query = json.dumps(self.query)
|
||||
bird_endpoint = (
|
||||
f'http://{self.device["address"]}:{self.device["port"]}/bird'
|
||||
f"http://{self.device.address.exploded}:{self.device.port}/bird"
|
||||
)
|
||||
# Debug
|
||||
logger.debug(f"HTTP Headers:\n{headers}")
|
||||
logger.debug(f"JSON query:\n{json_query}")
|
||||
logger.debug(f"HTTP Headers: {headers}")
|
||||
logger.debug(f"JSON query: {json_query}")
|
||||
logger.debug(f"BIRD endpoint: {bird_endpoint}")
|
||||
# End Debug
|
||||
bird_response = requests.post(
|
||||
bird_endpoint, headers=headers, data=json_query, timeout=7
|
||||
)
|
||||
response = bird_response.text
|
||||
status = bird_response.status_code
|
||||
# Debug
|
||||
logger.debug(f"BIRD response text:\n{response}")
|
||||
logger.debug(f"BIRD status code: {status}")
|
||||
logger.debug(f"BIRD response text:\n{response}")
|
||||
# End Debug
|
||||
except requests.exceptions.RequestException as requests_exception:
|
||||
logger.error(
|
||||
f"Error connecting to device {self.device}: {requests_exception}"
|
||||
)
|
||||
response = config["messages"]["general"]
|
||||
status = codes["danger"]
|
||||
response = params.messages.general
|
||||
status = code.invalid
|
||||
return response, status
|
||||
|
||||
|
||||
|
|
@ -137,61 +131,57 @@ class Netmiko:
|
|||
def __init__(self, transport, device, query_type, target):
|
||||
self.device = device
|
||||
self.target = target
|
||||
self.cred = configuration.credential(self.device["credential"])
|
||||
self.params = getattr(Construct(device), query_type)(transport, target)
|
||||
self.location = self.params[0]
|
||||
self.nos = self.params[1]
|
||||
self.command = self.params[2]
|
||||
self.cred = getattr(credentials, self.device.credential)
|
||||
self.location, self.nos, self.command = getattr(Construct(device), query_type)(
|
||||
transport, target
|
||||
)
|
||||
self.nm_host = {
|
||||
"host": self.location,
|
||||
"device_type": self.nos,
|
||||
"username": self.cred["username"],
|
||||
"password": self.cred["password"],
|
||||
"username": self.cred.username,
|
||||
"password": self.cred.password.get_secret_value(),
|
||||
"global_delay_factor": 0.5,
|
||||
}
|
||||
|
||||
def direct(self):
|
||||
"""Connects to the router via netmiko library, return the command output"""
|
||||
# Debug
|
||||
logger.debug(f"Netmiko host: {self.nm_host}")
|
||||
logger.debug(f"Connecting to host via Netmiko library...")
|
||||
# End Debug
|
||||
"""
|
||||
Connects to the router via netmiko library, return the command
|
||||
output.
|
||||
"""
|
||||
logger.debug(f"Connecting to {self.device.location} via Netmiko library...")
|
||||
try:
|
||||
nm_connect_direct = ConnectHandler(**self.nm_host)
|
||||
response = nm_connect_direct.send_command(self.command)
|
||||
status = codes["success"]
|
||||
logger.debug(
|
||||
f"Response for direction connection with command {self.command}:\n{response}"
|
||||
)
|
||||
status = code.valid
|
||||
logger.debug(f"Response for direct command {self.command}:\n{response}")
|
||||
except (
|
||||
NetMikoAuthenticationException,
|
||||
NetMikoTimeoutException,
|
||||
NetmikoAuthError,
|
||||
NetmikoTimeoutError,
|
||||
) as netmiko_exception:
|
||||
response = config["messages"]["general"]
|
||||
status = codes["danger"]
|
||||
response = params.messages.general
|
||||
status = code.invalid
|
||||
logger.error(f"{netmiko_exception}, {status}")
|
||||
return response, status
|
||||
|
||||
def proxied(self):
|
||||
"""
|
||||
Connects to the proxy server via netmiko library, then logs into the router via \
|
||||
standard SSH
|
||||
Connects to the proxy server via netmiko library, then logs
|
||||
into the router via SSH.
|
||||
"""
|
||||
proxy_name = self.device["proxy"]
|
||||
device_proxy = configuration.proxy(proxy_name)
|
||||
device_proxy = getattr(proxies, self.device.proxy)
|
||||
nm_proxy = {
|
||||
"host": device_proxy["address"],
|
||||
"username": device_proxy["username"],
|
||||
"password": device_proxy["password"],
|
||||
"device_type": device_proxy["type"],
|
||||
"host": device_proxy.address.exploded,
|
||||
"username": device_proxy.username,
|
||||
"password": device_proxy.password.get_secret_value(),
|
||||
"device_type": device_proxy.nos,
|
||||
"global_delay_factor": 0.5,
|
||||
}
|
||||
nm_connect_proxied = ConnectHandler(**nm_proxy)
|
||||
nm_ssh_command = device_proxy["ssh_command"].format(**self.nm_host) + "\n"
|
||||
nm_ssh_command = device_proxy.ssh_command.format(**self.nm_host) + "\n"
|
||||
# Debug
|
||||
logger.debug(f"Netmiko proxy {proxy_name}:\n{nm_proxy}")
|
||||
logger.debug(f"Netmiko proxy {self.device.proxy}")
|
||||
logger.debug(f"Proxy SSH command: {nm_ssh_command}")
|
||||
# End Debug
|
||||
nm_connect_proxied.write_channel(nm_ssh_command)
|
||||
|
|
@ -215,7 +205,7 @@ class Netmiko:
|
|||
)
|
||||
redispatch(nm_connect_proxied, self.nm_host["device_type"])
|
||||
response = nm_connect_proxied.send_command(self.command)
|
||||
status = codes["success"]
|
||||
status = code.valid
|
||||
logger.debug(f"Netmiko proxied response:\n{response}")
|
||||
except (
|
||||
NetMikoAuthenticationException,
|
||||
|
|
@ -223,18 +213,17 @@ class Netmiko:
|
|||
NetmikoAuthError,
|
||||
NetmikoTimeoutError,
|
||||
) as netmiko_exception:
|
||||
response = config["messages"]["general"]
|
||||
status = codes["danger"]
|
||||
logger.error(
|
||||
f'{netmiko_exception}, {status},Proxy: {self.nm_host["proxy"]}'
|
||||
)
|
||||
response = params.messages.general
|
||||
status = code.invalid
|
||||
logger.error(f"{netmiko_exception}, {status},Proxy: {self.device.proxy}")
|
||||
return response, status
|
||||
|
||||
|
||||
class Execute:
|
||||
"""
|
||||
Ingests user input, runs blacklist check, runs prefix length check (if enabled), pulls all \
|
||||
configuraiton variables for the input router.
|
||||
Ingests user input, runs blacklist check, runs prefix length check
|
||||
(if enabled), pulls all configuraiton variables for the input
|
||||
router.
|
||||
"""
|
||||
|
||||
def __init__(self, lg_data):
|
||||
|
|
@ -244,18 +233,20 @@ class Execute:
|
|||
self.input_target = self.input_data["target"]
|
||||
|
||||
def parse(self, output, nos):
|
||||
"""Splits BGP output by AFI, returns only IPv4 & IPv6 output for protocol-agnostic \
|
||||
commands (Community & AS_PATH Lookups)"""
|
||||
logger.debug(f"Parsing output...")
|
||||
"""
|
||||
Splits BGP output by AFI, returns only IPv4 & IPv6 output for
|
||||
protocol-agnostic commands (Community & AS_PATH Lookups).
|
||||
"""
|
||||
logger.debug("Parsing output...")
|
||||
parsed = output
|
||||
if self.input_type in ["bgp_community", "bgp_aspath"]:
|
||||
if nos in ["cisco_ios"]:
|
||||
if self.input_type in ("bgp_community", "bgp_aspath"):
|
||||
if nos in ("cisco_ios",):
|
||||
logger.debug(f"Parsing output for device type {nos}")
|
||||
delimiter = "For address family: "
|
||||
parsed_ipv4 = output.split(delimiter)[1]
|
||||
parsed_ipv6 = output.split(delimiter)[2]
|
||||
parsed = delimiter + parsed_ipv4 + delimiter + parsed_ipv6
|
||||
if nos in ["cisco_xr"]:
|
||||
elif nos in ("cisco_xr",):
|
||||
logger.debug(f"Parsing output for device type {nos}")
|
||||
delimiter = "Address Family: "
|
||||
parsed_ipv4 = output.split(delimiter)[1]
|
||||
|
|
@ -265,49 +256,40 @@ class Execute:
|
|||
|
||||
def response(self):
|
||||
"""
|
||||
Initializes Execute.filter(), if input fails to pass filter, returns errors to front end. \
|
||||
Otherwise, executes queries.
|
||||
Initializes Execute.filter(), if input fails to pass filter,
|
||||
returns errors to front end. Otherwise, executes queries.
|
||||
"""
|
||||
device_config = configuration.device(self.input_location)
|
||||
# Debug
|
||||
device_config = getattr(devices, self.input_location)
|
||||
logger.debug(f"Received query for {self.input_data}")
|
||||
logger.debug(f"Matched device config:\n{device_config}")
|
||||
# End Debug
|
||||
# Run query parameters through validity checks
|
||||
validity, msg, status = getattr(Validate(device_config), self.input_type)(
|
||||
self.input_target
|
||||
)
|
||||
if not validity:
|
||||
logger.debug(f"Invalid query")
|
||||
## return msg, status, self.input_data
|
||||
logger.debug("Invalid query")
|
||||
return {"output": msg, "status": status}
|
||||
connection = None
|
||||
output = config["messages"]["general"]
|
||||
output = params.messages.general
|
||||
info = self.input_data
|
||||
logger.debug(f"Validity: {validity}, Message: {msg}, Status: {status}")
|
||||
if device_config["type"] in configuration.rest_list():
|
||||
if Supported.is_rest(device_config.nos):
|
||||
connection = Rest("rest", device_config, self.input_type, self.input_target)
|
||||
raw_output, status = getattr(connection, device_config["type"])()
|
||||
output = self.parse(raw_output, device_config["type"])
|
||||
## return output, status, info
|
||||
return {"output": output, "status": status}
|
||||
if device_config["type"] in configuration.scrape_list():
|
||||
raw_output, status = getattr(connection, device_config.nos)()
|
||||
output = self.parse(raw_output, device_config.nos)
|
||||
# return {"output": output, "status": status}
|
||||
elif Supported.is_scrape(device_config.nos):
|
||||
logger.debug(f"Initializing Netmiko...")
|
||||
connection = Netmiko(
|
||||
"scrape", device_config, self.input_type, self.input_target
|
||||
)
|
||||
if device_config["proxy"]:
|
||||
if device_config.proxy:
|
||||
raw_output, status = connection.proxied()
|
||||
else:
|
||||
elif not device_config.proxy:
|
||||
raw_output, status = connection.direct()
|
||||
output = self.parse(raw_output, device_config["type"])
|
||||
output = self.parse(raw_output, device_config.nos)
|
||||
logger.debug(
|
||||
f'Parsed output for device type {device_config["type"]}:\n{output}'
|
||||
f"Parsed output for device type {device_config.nos}:\n{output}"
|
||||
)
|
||||
## return output, status, info
|
||||
return {"output": output, "status": status}
|
||||
if device_config["type"] not in configuration.supported_nos():
|
||||
logger.error(
|
||||
f"Device not supported, or no commands for device configured. {status}, {info}"
|
||||
)
|
||||
## return output, status, info
|
||||
# return {"output": output, "status": status}
|
||||
return {"output": output, "status": status}
|
||||
|
|
|
|||
|
|
@ -1,40 +1,27 @@
|
|||
# https://github.com/checktheroads/hyperglass
|
||||
"""
|
||||
Accepts raw input data from execute.py, passes it through specific filters based on query type, \
|
||||
returns validity boolean and specific error message.
|
||||
Accepts raw input data from execute.py, passes it through specific
|
||||
filters based on query type, returns validity boolean and specific
|
||||
error message.
|
||||
"""
|
||||
# Standard Imports
|
||||
import re
|
||||
import inspect
|
||||
import logging
|
||||
import ipaddress
|
||||
|
||||
# Module Imports
|
||||
import logzero
|
||||
from logzero import logger
|
||||
from netaddr.core import AddrFormatError
|
||||
from netaddr import IPNetwork, IPAddress, IPSet # pylint: disable=unused-import
|
||||
|
||||
# Dear PyLint, the netaddr library is a special snowflake. You might not see `IPAddress` get used, \
|
||||
# but when you use something like `IPNetwork("192.0.2.1/24").ip`, the returned value is \
|
||||
# IPAddress("192.0.2.1"), so I do actually need this import. <3, -ML
|
||||
|
||||
# Project Imports
|
||||
from hyperglass import configuration
|
||||
|
||||
# Configuration Imports
|
||||
config = configuration.params()
|
||||
|
||||
# Logzero Configuration
|
||||
if configuration.debug_state():
|
||||
logzero.loglevel(logging.DEBUG)
|
||||
else:
|
||||
logzero.loglevel(logging.INFO)
|
||||
from hyperglass.constants import code
|
||||
from hyperglass.configuration import params, logzero_config
|
||||
|
||||
|
||||
class IPType:
|
||||
"""
|
||||
Passes input through IPv4/IPv6 regex patterns to determine if input is formatted as a host \
|
||||
(e.g. 192.0.2.1), or as CIDR (e.g. 192.0.2.0/24). is_host() and is_cidr() return a boolean.
|
||||
Passes input through IPv4/IPv6 regex patterns to determine if input
|
||||
is formatted as a host (e.g. 192.0.2.1), or as CIDR
|
||||
(e.g. 192.0.2.0/24). is_host() and is_cidr() return a boolean.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
|
|
@ -72,7 +59,7 @@ class IPType:
|
|||
|
||||
def is_host(self, target):
|
||||
"""Tests input to see if formatted as host"""
|
||||
ip_version = IPNetwork(target).ip.version
|
||||
ip_version = ipaddress.ip_network(target).version
|
||||
state = False
|
||||
if ip_version == 4 and re.match(self.ipv4_host, target):
|
||||
logger.debug(f"{target} is an IPv{ip_version} host.")
|
||||
|
|
@ -84,7 +71,7 @@ class IPType:
|
|||
|
||||
def is_cidr(self, target):
|
||||
"""Tests input to see if formatted as CIDR"""
|
||||
ip_version = IPNetwork(target).ip.version
|
||||
ip_version = ipaddress.ip_network(target).version
|
||||
state = False
|
||||
if ip_version == 4 and re.match(self.ipv4_cidr, target):
|
||||
state = True
|
||||
|
|
@ -97,38 +84,47 @@ def ip_validate(target):
|
|||
"""Validates if input is a valid IP address"""
|
||||
validity = False
|
||||
try:
|
||||
valid_ip = IPNetwork(target).ip
|
||||
if (
|
||||
valid_ip.is_reserved()
|
||||
or valid_ip.is_netmask()
|
||||
or valid_ip.is_hostmask()
|
||||
or valid_ip.is_loopback()
|
||||
):
|
||||
valid_ip = ipaddress.ip_network(target)
|
||||
if valid_ip.is_reserved or valid_ip.is_unspecified or valid_ip.is_loopback:
|
||||
validity = False
|
||||
logger.debug(f"IP {valid_ip} is invalid")
|
||||
if valid_ip.is_unicast():
|
||||
if valid_ip.is_global:
|
||||
validity = True
|
||||
logger.debug(f"IP {valid_ip} is valid")
|
||||
except AddrFormatError:
|
||||
except (ipaddress.AddressValueError, ValueError):
|
||||
logger.debug(f"IP {target} is invalid")
|
||||
validity = False
|
||||
return validity
|
||||
|
||||
|
||||
def ip_blacklist(target):
|
||||
"""Check blacklist list for prefixes/IPs, return boolean based on list membership"""
|
||||
blacklist = IPSet(configuration.blacklist())
|
||||
logger.debug(f"Blacklist: {blacklist}")
|
||||
"""
|
||||
Check blacklist list for prefixes/IPs, return boolean based on list
|
||||
membership.
|
||||
"""
|
||||
logger.debug(f"Blacklist Enabled: {params.features.blacklist.enable}")
|
||||
membership = False
|
||||
if target in blacklist:
|
||||
membership = True
|
||||
if params.features.blacklist.enable:
|
||||
target_ver = ipaddress.ip_network(target).version
|
||||
user_blacklist = params.features.blacklist.networks
|
||||
networks = [net for net in user_blacklist if net.version == target_ver]
|
||||
logger.debug(f"IPv{target_ver} Blacklist Networks: {networks}")
|
||||
while not membership:
|
||||
for net in networks:
|
||||
if ipaddress.ip_network(target).subnet_of(net):
|
||||
membership = True
|
||||
logger.debug(f"Blacklist Match Found for {target} in {net}")
|
||||
break
|
||||
break
|
||||
return membership
|
||||
|
||||
|
||||
def ip_attributes(target):
|
||||
"""Construct dictionary of validated IP attributes for repeated use"""
|
||||
network = IPNetwork(target)
|
||||
addr = network.ip
|
||||
"""
|
||||
Construct dictionary of validated IP attributes for repeated use.
|
||||
"""
|
||||
network = ipaddress.ip_network(target)
|
||||
addr = network.network_address
|
||||
ip_version = addr.version
|
||||
afi = f"ipv{ip_version}"
|
||||
afi_pretty = f"IPv{ip_version}"
|
||||
|
|
@ -148,9 +144,8 @@ def ip_type_check(query_type, target, device):
|
|||
"""Checks multiple IP address related validation parameters"""
|
||||
prefix_attr = ip_attributes(target)
|
||||
logger.debug(f"IP Attributes:\n{prefix_attr}")
|
||||
requires_ipv6_cidr = configuration.requires_ipv6_cidr(device["type"])
|
||||
validity = False
|
||||
msg = config["messages"]["not_allowed"].format(i=target)
|
||||
msg = params.messages.not_allowed.format(i=target)
|
||||
# If target is a member of the blacklist, return an error.
|
||||
if ip_blacklist(target):
|
||||
validity = False
|
||||
|
|
@ -158,11 +153,11 @@ def ip_type_check(query_type, target, device):
|
|||
return (validity, msg)
|
||||
# If enable_max_prefix feature enabled, require that BGP Route queries be smaller than\
|
||||
# configured size limit.
|
||||
if query_type == "bgp_route" and config["features"]["max_prefix"]["enable"]:
|
||||
max_length = config["features"]["max_prefix"][prefix_attr["afi"]]
|
||||
if query_type == "bgp_route" and params.features.max_prefix.enable:
|
||||
max_length = getattr(params.features.max_prefix, prefix_attr["afi"])
|
||||
if prefix_attr["length"] > max_length:
|
||||
validity = False
|
||||
msg = config["features"]["max_prefix"]["message"].format(
|
||||
msg = params.features.max_prefixmessage.format(
|
||||
m=max_length, i=prefix_attr["network"]
|
||||
)
|
||||
logger.debug(f"Failed max prefix length check")
|
||||
|
|
@ -172,16 +167,16 @@ def ip_type_check(query_type, target, device):
|
|||
if (
|
||||
query_type == "bgp_route"
|
||||
and prefix_attr["version"] == 6
|
||||
and requires_ipv6_cidr
|
||||
and device.nos in params.general.requires_ipv6_cidr
|
||||
and IPType().is_host(target)
|
||||
):
|
||||
msg = config["messages"]["requires_ipv6_cidr"].format(d=device["display_name"])
|
||||
msg = params.messages.requires_ipv6_cidr.format(d=device.display_name)
|
||||
validity = False
|
||||
logger.debug(f"Failed requires IPv6 CIDR check")
|
||||
return (validity, msg)
|
||||
# If query type is ping or traceroute, and query target is in CIDR format, return an error.
|
||||
if query_type in ["ping", "traceroute"] and IPType().is_cidr(target):
|
||||
msg = config["messages"]["directed_cidr"].format(q=query_type.capitalize())
|
||||
if query_type in ("ping", "traceroute") and IPType().is_cidr(target):
|
||||
msg = params.messages.directed_cidr.format(q=query_type.capitalize())
|
||||
validity = False
|
||||
logger.debug(f"Failed CIDR format for ping/traceroute check")
|
||||
return (validity, msg)
|
||||
|
|
@ -191,31 +186,34 @@ def ip_type_check(query_type, target, device):
|
|||
|
||||
|
||||
def current_function():
|
||||
"""Returns name of current function for easy initialization & calling."""
|
||||
"""
|
||||
Returns name of current function for easy initialization & calling.
|
||||
"""
|
||||
this_function = inspect.stack()[1][3]
|
||||
return this_function
|
||||
|
||||
|
||||
class Validate:
|
||||
"""Accepts raw input and associated device parameters from execute.py and validates the input \
|
||||
based on specific query type. Returns boolean for validity, specific error message, and status \
|
||||
code."""
|
||||
"""
|
||||
Accepts raw input and associated device parameters from execute.py
|
||||
and validates the input based on specific query type. Returns
|
||||
boolean for validity, specific error message, and status code.
|
||||
"""
|
||||
|
||||
def __init__(self, device):
|
||||
"""Initialize device parameters and error codes."""
|
||||
self.device = device
|
||||
self.codes = configuration.codes()
|
||||
|
||||
def ping(self, target):
|
||||
"""Ping Query: Input Validation & Error Handling"""
|
||||
query_type = current_function()
|
||||
logger.debug(f"Validating {query_type} query for target {target}...")
|
||||
validity = False
|
||||
msg = config["messages"]["invalid_ip"].format(i=target)
|
||||
status = self.codes["warning"]
|
||||
msg = params.messages.invalid_ip.format(i=target)
|
||||
status = code.not_allowed
|
||||
# Perform basic validation of an IP address, return error if not a valid IP.
|
||||
if not ip_validate(target):
|
||||
status = self.codes["danger"]
|
||||
status = code.invalid
|
||||
logger.error(f"{msg}, {status}")
|
||||
return (validity, msg, status)
|
||||
# Perform further validation of a valid IP address, return an error upon failure.
|
||||
|
|
@ -223,7 +221,7 @@ class Validate:
|
|||
if valid_query:
|
||||
validity = True
|
||||
msg = f"{target} is a valid {query_type} query."
|
||||
status = self.codes["success"]
|
||||
status = code.valid
|
||||
logger.debug(f"{msg}, {status}")
|
||||
return (validity, msg, status)
|
||||
return (validity, msg, status)
|
||||
|
|
@ -233,11 +231,11 @@ class Validate:
|
|||
query_type = current_function()
|
||||
logger.debug(f"Validating {query_type} query for target {target}...")
|
||||
validity = False
|
||||
msg = config["messages"]["invalid_ip"].format(i=target)
|
||||
status = self.codes["warning"]
|
||||
msg = params.messages.invalid_ip.format(i=target)
|
||||
status = code.not_allowed
|
||||
# Perform basic validation of an IP address, return error if not a valid IP.
|
||||
if not ip_validate(target):
|
||||
status = self.codes["danger"]
|
||||
status = code.invalid
|
||||
logger.error(f"{msg}, {status}")
|
||||
return (validity, msg, status)
|
||||
# Perform further validation of a valid IP address, return an error upon failure.
|
||||
|
|
@ -245,7 +243,7 @@ class Validate:
|
|||
if valid_query:
|
||||
validity = True
|
||||
msg = f"{target} is a valid {query_type} query."
|
||||
status = self.codes["success"]
|
||||
status = code.valid
|
||||
logger.debug(f"{msg}, {status}")
|
||||
return (validity, msg, status)
|
||||
return (validity, msg, status)
|
||||
|
|
@ -255,11 +253,11 @@ class Validate:
|
|||
query_type = current_function()
|
||||
logger.debug(f"Validating {query_type} query for target {target}...")
|
||||
validity = False
|
||||
msg = config["messages"]["invalid_ip"].format(i=target)
|
||||
status = self.codes["warning"]
|
||||
msg = params.messages.invalid_ip.format(i=target)
|
||||
status = code.not_allowed
|
||||
# Perform basic validation of an IP address, return error if not a valid IP.
|
||||
if not ip_validate(target):
|
||||
status = self.codes["danger"]
|
||||
status = code.invalid
|
||||
logger.error(f"{msg}, {status}")
|
||||
return (validity, msg, status)
|
||||
# Perform further validation of a valid IP address, return an error upon failure.
|
||||
|
|
@ -267,7 +265,7 @@ class Validate:
|
|||
if valid_query:
|
||||
validity = True
|
||||
msg = f"{target} is a valid {query_type} query."
|
||||
status = self.codes["success"]
|
||||
status = code.valid
|
||||
logger.debug(f"{msg}, {status}")
|
||||
return (validity, msg, status)
|
||||
return (validity, msg, status)
|
||||
|
|
@ -277,24 +275,24 @@ class Validate:
|
|||
query_type = current_function()
|
||||
logger.debug(f"Validating {query_type} query for target {target}...")
|
||||
validity = False
|
||||
msg = config["messages"]["invalid_dual"].format(i=target, qt="BGP Community")
|
||||
status = self.codes["danger"]
|
||||
msg = params.messages.invalid_dual.format(i=target, qt="BGP Community")
|
||||
status = code.invalid
|
||||
# Validate input communities against configured or default regex pattern
|
||||
# Extended Communities, new-format
|
||||
if re.match(config["features"][query_type]["regex"]["extended_as"], target):
|
||||
if re.match(params.features.bgp_community.regex.extended_as, target):
|
||||
validity = True
|
||||
msg = f"{target} matched extended AS format community."
|
||||
status = self.codes["success"]
|
||||
status = code.valid
|
||||
# Extended Communities, 32 bit format
|
||||
if re.match(config["features"][query_type]["regex"]["decimal"], target):
|
||||
elif re.match(params.features.bgp_community.regex.decimal, target):
|
||||
validity = True
|
||||
msg = f"{target} matched decimal format community."
|
||||
status = self.codes["success"]
|
||||
status = code.valid
|
||||
# RFC 8092 Large Community Support
|
||||
if re.match(config["features"][query_type]["regex"]["large"], target):
|
||||
elif re.match(params.features.bgp_community.regex.large, target):
|
||||
validity = True
|
||||
msg = f"{target} matched large community."
|
||||
status = self.codes["success"]
|
||||
status = code.valid
|
||||
if not validity:
|
||||
logger.error(f"{msg}, {status}")
|
||||
logger.debug(f"{msg}, {status}")
|
||||
|
|
@ -305,13 +303,15 @@ class Validate:
|
|||
query_type = current_function()
|
||||
logger.debug(f"Validating {query_type} query for target {target}...")
|
||||
validity = False
|
||||
msg = config["messages"]["invalid_dual"].format(i=target, qt="AS Path")
|
||||
status = self.codes["danger"]
|
||||
msg = params.messages.invalid_dual.format(i=target, qt="AS Path")
|
||||
status = code.invalid
|
||||
# Validate input AS_PATH regex pattern against configured or default regex pattern
|
||||
if re.match(config["features"][query_type]["regex"]["pattern"], target):
|
||||
mode = getattr(params.features.bgp_aspath.regex, "mode")
|
||||
pattern = getattr(params.features.bgp_aspath.regex, mode)
|
||||
if re.match(pattern, target):
|
||||
validity = True
|
||||
msg = f"{target} matched AS_PATH regex."
|
||||
status = self.codes["success"]
|
||||
status = code.valid
|
||||
if not validity:
|
||||
logger.error(f"{msg}, {status}")
|
||||
logger.debug(f"{msg}, {status}")
|
||||
|
|
|
|||
1
hyperglass/configuration/.gitignore
vendored
|
|
@ -1,2 +1,3 @@
|
|||
.DS_Store
|
||||
*.toml
|
||||
*.yaml
|
||||
|
|
|
|||
|
|
@ -1,461 +1,86 @@
|
|||
# https://github.com/checktheroads/hyperglass
|
||||
"""
|
||||
Imports configuration varibles from configuration files and returns default values if undefined.
|
||||
Imports configuration varibles from configuration files and returns
|
||||
default values if undefined.
|
||||
"""
|
||||
# Standard Imports
|
||||
import os
|
||||
import math
|
||||
import logging
|
||||
|
||||
# Module Imports
|
||||
import toml
|
||||
__all__ = ("params", "commands", "devices", "credentials", "proxies", "logzero_config")
|
||||
|
||||
# Standard Library Imports
|
||||
import math
|
||||
from pathlib import Path
|
||||
|
||||
# Third Party Module Imports
|
||||
import yaml
|
||||
import logzero
|
||||
from logzero import logger
|
||||
from pydantic import ValidationError
|
||||
|
||||
# Project Imports
|
||||
import hyperglass
|
||||
from hyperglass.configuration import models
|
||||
from hyperglass.exceptions import ConfigError, UnsupportedDevice
|
||||
from hyperglass.constants import Supported
|
||||
|
||||
# Project Directories
|
||||
working_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
hyperglass_root = os.path.dirname(hyperglass.__file__)
|
||||
working_dir = Path(__file__).resolve().parent
|
||||
|
||||
# TOML Imports
|
||||
config = toml.load(os.path.join(working_dir, "configuration.toml"))
|
||||
devices = toml.load(os.path.join(working_dir, "devices.toml"))
|
||||
|
||||
|
||||
def debug_state():
|
||||
"""Returns string for logzero log level"""
|
||||
state = config.get("debug", False)
|
||||
return state
|
||||
# Import main hyperglass configuration file
|
||||
try:
|
||||
with open(working_dir.joinpath("hyperglass.yaml")) as config_yaml:
|
||||
user_config = yaml.safe_load(config_yaml)
|
||||
except FileNotFoundError as no_config_error:
|
||||
logger.error(no_config_error)
|
||||
logger.error("Default configuration will be used")
|
||||
pass
|
||||
# Import device commands file
|
||||
try:
|
||||
with open(working_dir.joinpath("commands.yaml")) as commands_yaml:
|
||||
user_commands = yaml.safe_load(commands_yaml)
|
||||
except FileNotFoundError:
|
||||
logger.info(
|
||||
(
|
||||
f'No commands found in {working_dir.joinpath("commands.yaml")}. '
|
||||
"Defaults will be used."
|
||||
)
|
||||
)
|
||||
pass
|
||||
# Import device configuration file
|
||||
try:
|
||||
with open(working_dir.joinpath("devices.yaml")) as devices_yaml:
|
||||
user_devices = yaml.safe_load(devices_yaml)
|
||||
except FileNotFoundError as no_devices_error:
|
||||
logger.error(no_devices_error)
|
||||
raise ConfigError(
|
||||
(
|
||||
f'"{working_dir.joinpath("devices.yaml")}" not found. '
|
||||
"Devices are required to start hyperglass, please consult "
|
||||
"the installation documentation."
|
||||
)
|
||||
)
|
||||
|
||||
# Map imported user config files to expected schema:
|
||||
try:
|
||||
params = models.Params(**user_config)
|
||||
commands = models.Commands.import_params(user_commands)
|
||||
devices = models.Routers.import_params(user_devices["router"])
|
||||
credentials = models.Credentials.import_params(user_devices["credential"])
|
||||
proxies = models.Proxies.import_params(user_devices["proxy"])
|
||||
except ValidationError as validation_errors:
|
||||
errors = validation_errors.errors()
|
||||
for error in errors:
|
||||
raise ConfigError(
|
||||
f'The value of {error["loc"][0]} field is invalid: {error["msg"]} '
|
||||
)
|
||||
|
||||
# Logzero Configuration
|
||||
if debug_state():
|
||||
logzero.loglevel(logging.DEBUG)
|
||||
else:
|
||||
logzero.loglevel(logging.INFO)
|
||||
|
||||
|
||||
def blacklist():
|
||||
"""Returns list of subnets/IPs defined in blacklist.toml"""
|
||||
blacklist_config = config["blacklist"]
|
||||
return blacklist_config
|
||||
|
||||
|
||||
def requires_ipv6_cidr(nos):
|
||||
"""Returns boolean for input NOS association with the NOS list defined in \
|
||||
requires_ipv6_cidr.toml"""
|
||||
nos_list = config["requires_ipv6_cidr"]
|
||||
return bool(nos in nos_list)
|
||||
|
||||
|
||||
def networks():
|
||||
"""Returns dictionary of ASNs as keys, list of associated locations as values. Imported as a \
|
||||
Jinja2 variable on the main page that populates the network/ASN select class."""
|
||||
asn_dict = {}
|
||||
routers_list = devices["router"]
|
||||
for router_config in routers_list.values():
|
||||
asn = router_config["asn"]
|
||||
if asn in asn_dict:
|
||||
asn_dict[asn].append(router_config["location"])
|
||||
else:
|
||||
asn_dict[asn] = [router_config["location"]]
|
||||
return asn_dict
|
||||
|
||||
|
||||
def hostnames():
|
||||
"""Returns list of all router hostnames for input validation"""
|
||||
hostname_list = []
|
||||
routers_list = devices["router"]
|
||||
for router in routers_list:
|
||||
hostname_list.append(router)
|
||||
return hostname_list
|
||||
|
||||
|
||||
def locations_list():
|
||||
"""Returns a dictionary of ASNs as keys, list of associated locations, router hostnames, and \
|
||||
router display names as keys. Used by Flask to populate the /routers/<asn> route, which is \
|
||||
ingested by a JS Ajax call to populate the list of locations associated with the selected \
|
||||
network/ASN on the main page."""
|
||||
networks_dict = {}
|
||||
routers_list = devices["router"]
|
||||
for router in routers_list:
|
||||
asn = routers_list[router]["asn"]
|
||||
if asn in networks_dict:
|
||||
networks_dict[asn].append(
|
||||
dict(
|
||||
location=routers_list[router]["location"],
|
||||
hostname=router,
|
||||
display_name=routers_list[router]["display_name"],
|
||||
)
|
||||
)
|
||||
else:
|
||||
networks_dict[asn] = [
|
||||
dict(
|
||||
location=routers_list[router]["location"],
|
||||
hostname=router,
|
||||
display_name=routers_list[router]["display_name"],
|
||||
)
|
||||
]
|
||||
return networks_dict
|
||||
|
||||
|
||||
def codes():
|
||||
"""Reusable status code numbers"""
|
||||
code_dict = {
|
||||
# 200: renders standard display text
|
||||
"success": 200,
|
||||
# 405: Renders Bulma "warning" class notification with message text
|
||||
"warning": 405,
|
||||
# 415: Renders Bulma "danger" class notification with message text
|
||||
"danger": 415,
|
||||
# 504: Renders Bulma "danger" class notifiction, used for Ping/Traceroute errors
|
||||
"error": 504,
|
||||
}
|
||||
return code_dict
|
||||
|
||||
|
||||
def codes_reason():
|
||||
"""Reusable status code descriptions"""
|
||||
code_desc_dict = {
|
||||
"200": "Valid Query",
|
||||
"405": "Query Not Allowed",
|
||||
"415": "Query Invalid",
|
||||
"504": "Unable to reach Ping target",
|
||||
}
|
||||
return code_desc_dict
|
||||
|
||||
|
||||
def rest_list():
|
||||
"""Returns list of supported hyperglass API types"""
|
||||
rest = ["frr", "bird"]
|
||||
return rest
|
||||
|
||||
|
||||
def scrape_list():
|
||||
"""Returns list of configured network operating systems"""
|
||||
config_commands = toml.load(os.path.join(working_dir, "commands.toml"))
|
||||
scrape = []
|
||||
for nos in config_commands:
|
||||
scrape.append(nos)
|
||||
return scrape
|
||||
|
||||
|
||||
def supported_nos():
|
||||
"""Combines scrape_list & rest_list for full list of supported network operating systems"""
|
||||
scrape = scrape_list()
|
||||
rest = rest_list()
|
||||
supported = scrape + rest
|
||||
return supported
|
||||
|
||||
|
||||
def command(nos):
|
||||
"""Associates input NOS with matched commands defined in commands.toml"""
|
||||
config_commands = toml.load(os.path.join(working_dir, "commands.toml"))
|
||||
commands = None
|
||||
if nos in scrape_list():
|
||||
commands = {
|
||||
"dual": config_commands[nos][0]["dual"],
|
||||
"ipv4": config_commands[nos][0]["ipv4"],
|
||||
"ipv6": config_commands[nos][0]["ipv6"],
|
||||
}
|
||||
return commands
|
||||
|
||||
|
||||
def credential(cred):
|
||||
"""Associates input credential key name with configured credential username & password in \
|
||||
devices.toml."""
|
||||
c_list = devices["credential"]
|
||||
return dict(username=c_list[cred]["username"], password=c_list[cred]["password"])
|
||||
|
||||
|
||||
def device(dev):
|
||||
"""Associates input device key name with configured device attributes in devices.toml"""
|
||||
device_config = devices["router"][dev]
|
||||
return dict(
|
||||
address=device_config.get("address"),
|
||||
asn=device_config.get("asn"),
|
||||
src_addr_ipv4=device_config.get("src_addr_ipv4"),
|
||||
src_addr_ipv6=device_config.get("src_addr_ipv6"),
|
||||
credential=device_config.get("credential"),
|
||||
location=device_config.get("location"),
|
||||
display_name=device_config.get("display_name"),
|
||||
port=device_config.get("port"),
|
||||
type=device_config.get("type"),
|
||||
proxy=device_config.get("proxy"),
|
||||
)
|
||||
|
||||
|
||||
def proxy(prx):
|
||||
"""Associates input proxy key name with configured proxy attributes in devices.toml"""
|
||||
proxy_config = devices["proxy"][prx]
|
||||
return dict(
|
||||
address=proxy_config["address"],
|
||||
username=proxy_config["username"],
|
||||
password=proxy_config["password"],
|
||||
type=proxy_config["type"],
|
||||
ssh_command=proxy_config["ssh_command"],
|
||||
)
|
||||
|
||||
|
||||
def params():
|
||||
"""Builds combined nested dictionary of all parameters defined in configuration.toml, and if \
|
||||
undefined, uses a default value"""
|
||||
# pylint: disable=too-many-statements
|
||||
# Dear PyLint, this function is intended to be long AF, because hyperglass is inteded to be \
|
||||
# customizable AF. It would also be silly AF to break this into multiple functions, and you'd \
|
||||
# probably still complain. <3 -ML
|
||||
general = {}
|
||||
branding = {}
|
||||
features = {}
|
||||
messages = {}
|
||||
general["primary_asn"] = config["general"].get("primary_asn", "65000")
|
||||
general["org_name"] = config["general"].get("org_name", "The Company")
|
||||
general["google_analytics"] = config["general"].get("google_analytics", "")
|
||||
general["redis_host"] = config["general"].get("redis_host", "localhost")
|
||||
general["redis_port"] = config["general"].get("redis_port", 6379)
|
||||
features["rate_limit"] = config["features"]["rate_limit"]
|
||||
features["rate_limit"]["redis_id"] = config["features"]["rate_limit"].get(
|
||||
"redis_id", 1
|
||||
)
|
||||
features["rate_limit"]["query"] = config["features"]["rate_limit"]["query"]
|
||||
features["rate_limit"]["query"]["rate"] = config["features"]["rate_limit"][
|
||||
"query"
|
||||
].get("rate", 5)
|
||||
features["rate_limit"]["query"]["period"] = config["features"]["rate_limit"].get(
|
||||
"period", "minute"
|
||||
)
|
||||
features["rate_limit"]["query"]["title"] = config["features"]["rate_limit"][
|
||||
"query"
|
||||
].get("title", "Query Limit Reached")
|
||||
features["rate_limit"]["query"]["message"] = config["features"]["rate_limit"][
|
||||
"query"
|
||||
].get(
|
||||
"message",
|
||||
f"""Query limit of {features["rate_limit"]["query"]["rate"]} per \
|
||||
{features["rate_limit"]["query"]["period"]} reached. Please wait one minute and try \
|
||||
again.""",
|
||||
)
|
||||
features["rate_limit"]["query"]["button"] = config["features"]["rate_limit"][
|
||||
"query"
|
||||
].get("button", "Try Again")
|
||||
|
||||
features["rate_limit"]["message"] = config["features"]["rate_limit"].get(
|
||||
"message",
|
||||
f"""Query limit of {features["rate_limit"]["query"]} per minute reached. \
|
||||
Please wait one minute and try again.""",
|
||||
)
|
||||
features["rate_limit"]["site"] = config["features"]["rate_limit"]["site"]
|
||||
features["rate_limit"]["site"]["rate"] = config["features"]["rate_limit"][
|
||||
"site"
|
||||
].get("rate", 60)
|
||||
features["rate_limit"]["site"]["period"] = config["features"]["rate_limit"][
|
||||
"site"
|
||||
].get("period", "minute")
|
||||
features["rate_limit"]["site"]["title"] = config["features"]["rate_limit"][
|
||||
"site"
|
||||
].get("title", "Limit Reached")
|
||||
features["rate_limit"]["site"]["subtitle"] = config["features"]["rate_limit"][
|
||||
"site"
|
||||
].get(
|
||||
"subtitle",
|
||||
f'You have accessed this site more than {features["rate_limit"]["site"]["rate"]} '
|
||||
f'times in the last {features["rate_limit"]["site"]["period"]}.',
|
||||
)
|
||||
features["rate_limit"]["site"]["button"] = config["features"]["rate_limit"][
|
||||
"site"
|
||||
].get("button", "Try Again")
|
||||
features["cache"] = config["features"]["cache"]
|
||||
features["cache"]["redis_id"] = config["features"]["cache"].get("redis_id", 0)
|
||||
features["cache"]["timeout"] = config["features"]["cache"].get("timeout", 120)
|
||||
features["cache"]["show_text"] = config["features"]["cache"].get("show_text", True)
|
||||
features["cache"]["text"] = config["features"]["cache"].get(
|
||||
"text",
|
||||
f'Results will be cached for {math.ceil(features["cache"]["timeout"] / 60)} minutes.',
|
||||
)
|
||||
features["bgp_route"] = config["features"]["bgp_route"]
|
||||
features["bgp_route"]["enable"] = config["features"]["bgp_route"].get(
|
||||
"enable", True
|
||||
)
|
||||
features["bgp_community"] = config["features"]["bgp_community"]
|
||||
features["bgp_community"]["enable"] = config["features"]["bgp_community"].get(
|
||||
"enable", True
|
||||
)
|
||||
features["bgp_community"]["regex"] = config["features"]["bgp_community"]["regex"]
|
||||
features["bgp_community"]["regex"]["decimal"] = config["features"]["bgp_community"][
|
||||
"regex"
|
||||
].get("decimal", r"^[0-9]{1,10}$")
|
||||
features["bgp_community"]["regex"]["extended_as"] = config["features"][
|
||||
"bgp_community"
|
||||
]["regex"].get("extended_as", r"^([0-9]{0,5})\:([0-9]{1,5})$")
|
||||
features["bgp_community"]["regex"]["large"] = config["features"]["bgp_community"][
|
||||
"regex"
|
||||
].get("large", r"^([0-9]{1,10})\:([0-9]{1,10})\:[0-9]{1,10}$")
|
||||
features["bgp_aspath"] = config["features"]["bgp_aspath"]
|
||||
features["bgp_aspath"]["enable"] = config["features"]["bgp_aspath"].get(
|
||||
"enable", True
|
||||
)
|
||||
features["bgp_aspath"]["regex"] = config["features"]["bgp_aspath"]["regex"]
|
||||
features["bgp_aspath"]["regex"]["mode"] = config["features"]["bgp_aspath"][
|
||||
"regex"
|
||||
].get("mode", "asplain")
|
||||
features["bgp_aspath"]["regex"]["asplain"] = config["features"]["bgp_aspath"][
|
||||
"regex"
|
||||
].get("asplain", r"^(\^|^\_)(\d+\_|\d+\$|\d+\(\_\.\+\_\))+$")
|
||||
features["bgp_aspath"]["regex"]["asdot"] = config["features"]["bgp_aspath"][
|
||||
"regex"
|
||||
].get("asdot", r"^(\^|^\_)((\d+\.\d+)\_|(\d+\.\d+)\$|(\d+\.\d+)\(\_\.\+\_\))+$")
|
||||
features["bgp_aspath"]["regex"]["pattern"] = config["features"]["bgp_aspath"][
|
||||
"regex"
|
||||
].get(features["bgp_aspath"]["regex"]["mode"], None)
|
||||
features["ping"] = config["features"]["ping"]
|
||||
features["ping"]["enable"] = config["features"]["ping"].get("enable", True)
|
||||
features["traceroute"] = config["features"]["traceroute"]
|
||||
features["traceroute"]["enable"] = config["features"]["traceroute"].get(
|
||||
"enable", True
|
||||
)
|
||||
features["max_prefix"] = config["features"]["max_prefix"]
|
||||
features["max_prefix"]["enable"] = config["features"]["max_prefix"].get(
|
||||
"enable", False
|
||||
)
|
||||
features["max_prefix"]["ipv4"] = config["features"]["max_prefix"].get("ipv4", 24)
|
||||
features["max_prefix"]["ipv6"] = config["features"]["max_prefix"].get("ipv6", 64)
|
||||
features["max_prefix"]["message"] = config["features"]["max_prefix"].get(
|
||||
"message",
|
||||
"Prefix length must be smaller than /{m}. <b>{i}</b> is too specific.",
|
||||
)
|
||||
messages["no_query_type"] = config["messages"].get(
|
||||
"no_query_type", "Query Type must be specified."
|
||||
)
|
||||
messages["no_location"] = config["messages"].get(
|
||||
"no_location", "A location must be selected."
|
||||
)
|
||||
messages["no_input"] = config["messages"].get(
|
||||
"no_input", "A target must be specified"
|
||||
)
|
||||
messages["not_allowed"] = config["messages"].get(
|
||||
"not_allowed", "<b>{i}</b> is not allowed."
|
||||
)
|
||||
messages["requires_ipv6_cidr"] = config["messages"].get(
|
||||
"requires_ipv6_cidr",
|
||||
"<b>{d}</b> requires IPv6 BGP lookups to be in CIDR notation.",
|
||||
)
|
||||
messages["invalid_ip"] = config["messages"].get(
|
||||
"invalid_ip", "<b>{i}</b> is not a valid IP address."
|
||||
)
|
||||
messages["invalid_dual"] = config["messages"].get(
|
||||
"invalid_dual", "<b>{i}</b> is an invalid {qt}."
|
||||
)
|
||||
messages["general"] = config["messages"].get("general", "An error occurred.")
|
||||
messages["directed_cidr"] = config["messages"].get(
|
||||
"directed_cidr", "<b>{q}</b> queries can not be in CIDR format."
|
||||
)
|
||||
branding["site_name"] = config["branding"].get("site_name", "hyperglass")
|
||||
branding["footer"] = config["branding"]["footer"]
|
||||
branding["footer"]["enable"] = config["branding"]["footer"].get("enable", True)
|
||||
branding["credit"] = config["branding"]["credit"]
|
||||
branding["credit"]["enable"] = config["branding"]["credit"].get("enable", True)
|
||||
branding["peering_db"] = config["branding"]["peering_db"]
|
||||
branding["peering_db"]["enable"] = config["branding"]["peering_db"].get(
|
||||
"enable", True
|
||||
)
|
||||
branding["text"] = config["branding"]["text"]
|
||||
branding["text"]["query_type"] = config["branding"]["text"].get(
|
||||
"query_type", "Query Type"
|
||||
)
|
||||
branding["text"]["title_mode"] = config["branding"]["text"].get(
|
||||
"title_mode", "logo_only"
|
||||
)
|
||||
branding["text"]["title"] = config["branding"]["text"].get("title", "hyperglass")
|
||||
branding["text"]["subtitle"] = config["branding"]["text"].get(
|
||||
"subtitle", f'AS{general["primary_asn"]}'
|
||||
)
|
||||
branding["text"]["results"] = config["branding"]["text"].get("results", "Results")
|
||||
branding["text"]["location"] = config["branding"]["text"].get(
|
||||
"location", "Select Location..."
|
||||
)
|
||||
branding["text"]["query_placeholder"] = config["branding"]["text"].get(
|
||||
"query_placeholder", "IP, Prefix, Community, or AS Path"
|
||||
)
|
||||
branding["text"]["bgp_route"] = config["branding"]["text"].get(
|
||||
"bgp_route", "BGP Route"
|
||||
)
|
||||
branding["text"]["bgp_community"] = config["branding"]["text"].get(
|
||||
"bgp_community", "BGP Community"
|
||||
)
|
||||
branding["text"]["bgp_aspath"] = config["branding"]["text"].get(
|
||||
"bgp_aspath", "BGP AS Path"
|
||||
)
|
||||
branding["text"]["ping"] = config["branding"]["text"].get("ping", "Ping")
|
||||
branding["text"]["traceroute"] = config["branding"]["text"].get(
|
||||
"traceroute", "Traceroute"
|
||||
)
|
||||
branding["text"]["404"]["title"] = config["branding"]["text"]["404"].get(
|
||||
"title", "Error"
|
||||
)
|
||||
branding["text"]["404"]["subtitle"] = config["branding"]["text"]["404"].get(
|
||||
"subtitle", "Page Not Found"
|
||||
)
|
||||
branding["text"]["500"]["title"] = config["branding"]["text"]["500"].get(
|
||||
"title", "Error"
|
||||
)
|
||||
branding["text"]["500"]["subtitle"] = config["branding"]["text"]["500"].get(
|
||||
"subtitle", "Something Went Wrong"
|
||||
)
|
||||
branding["text"]["500"]["button"] = config["branding"]["text"]["500"].get(
|
||||
"button", "Home"
|
||||
)
|
||||
branding["text"]["504"]["message"] = config["branding"]["text"]["504"].get(
|
||||
"message", "Unable to reach <b>{target}</b>."
|
||||
)
|
||||
branding["logo"] = config["branding"]["logo"]
|
||||
branding["logo"]["path"] = config["branding"]["logo"].get(
|
||||
"path", "static/images/hyperglass-dark.png"
|
||||
)
|
||||
branding["logo"]["width"] = config["branding"]["logo"].get("width", 384)
|
||||
branding["logo"]["favicons"] = config["branding"]["logo"].get(
|
||||
"favicons", "static/images/favicon/"
|
||||
)
|
||||
branding["color"] = config["branding"]["color"]
|
||||
branding["color"]["background"] = config["branding"]["color"].get(
|
||||
"background", "#fbfffe"
|
||||
)
|
||||
branding["color"]["button_submit"] = config["branding"]["color"].get(
|
||||
"button_submit", "#40798c"
|
||||
)
|
||||
branding["color"]["danger"] = config["branding"]["color"].get("danger", "#ff3860")
|
||||
branding["color"]["progress_bar"] = config["branding"]["color"].get(
|
||||
"progress_bar", "#40798c"
|
||||
)
|
||||
branding["color"]["tag"]["type"] = config["branding"]["color"]["tag"].get(
|
||||
"type", "#ff5e5b"
|
||||
)
|
||||
branding["color"]["tag"]["type_title"] = config["branding"]["color"]["tag"].get(
|
||||
"type_title", "#330036"
|
||||
)
|
||||
branding["color"]["tag"]["location"] = config["branding"]["color"]["tag"].get(
|
||||
"location", "#40798c"
|
||||
)
|
||||
branding["color"]["tag"]["location_title"] = config["branding"]["color"]["tag"].get(
|
||||
"location_title", "#330036"
|
||||
)
|
||||
branding["font"] = config["branding"]["font"]
|
||||
branding["font"]["primary"] = config["branding"]["font"]["primary"]
|
||||
branding["font"]["primary"]["name"] = config["branding"]["font"]["primary"].get(
|
||||
"name", "Nunito"
|
||||
)
|
||||
branding["font"]["primary"]["url"] = config["branding"]["font"]["primary"].get(
|
||||
"url", "https://fonts.googleapis.com/css?family=Nunito:400,600,700"
|
||||
)
|
||||
branding["font"]["mono"] = config["branding"]["font"]["mono"]
|
||||
branding["font"]["mono"]["name"] = config["branding"]["font"]["mono"].get(
|
||||
"name", "Fira Mono"
|
||||
)
|
||||
branding["font"]["mono"]["url"] = config["branding"]["font"]["mono"].get(
|
||||
"url", "https://fonts.googleapis.com/css?family=Fira+Mono"
|
||||
)
|
||||
params_dict = dict(
|
||||
general=general, branding=branding, features=features, messages=messages
|
||||
)
|
||||
return params_dict
|
||||
log_level = 20
|
||||
if params.general.debug:
|
||||
log_level = 10
|
||||
log_format = (
|
||||
"%(color)s[%(asctime)s.%(msecs)03d %(module)s:%(funcName)s:%(lineno)d "
|
||||
"%(levelname)s]%(end_color)s %(message)s"
|
||||
)
|
||||
date_format = "%Y-%m-%d %H:%M:%S"
|
||||
logzero_formatter = logzero.LogFormatter(fmt=log_format, datefmt=date_format)
|
||||
logzero_config = logzero.setup_default_logger(
|
||||
formatter=logzero_formatter, level=log_level
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,38 +0,0 @@
|
|||
[[cisco_ios]]
|
||||
[cisco_ios.dual]
|
||||
bgp_community = "show bgp all community {target}"
|
||||
bgp_aspath = 'show bgp all quote-regexp "{target}"'
|
||||
[cisco_ios.ipv4]
|
||||
bgp_route = "show bgp ipv4 unicast {target} | exclude pathid:|Epoch"
|
||||
ping = "ping {target} repeat 5 source {source}"
|
||||
traceroute = "traceroute {target} timeout 1 probe 2 source {source}"
|
||||
[cisco_ios.ipv6]
|
||||
bgp_route = "show bgp ipv6 unicast {target} | exclude pathid:|Epoch"
|
||||
ping = "ping ipv6 {target} repeat 5 source {source}"
|
||||
traceroute = "traceroute ipv6 {target} timeout 1 probe 2 source {source}"
|
||||
|
||||
[[cisco_xr]]
|
||||
[cisco_xr.dual]
|
||||
bgp_community = 'show bgp all unicast community {target} | utility egrep -v "\(BGP |Table |Non-stop\)"'
|
||||
bgp_aspath = 'show bgp all unicast regexp {target} | utility egrep -v "\(BGP |Table |Non-stop\)"'
|
||||
[cisco_xr.ipv4]
|
||||
bgp_route = 'show bgp ipv4 unicast {target} | util egrep "\(BGP routing table entry|Path \#|aggregated by|Origin |Community:|validity| from \)"'
|
||||
ping = "ping ipv4 {target} count 5 source {src_addr_ipv4}"
|
||||
traceroute = "traceroute ipv4 {target} timeout 1 probe 2 source {source}"
|
||||
[cisco_xr.ipv6]
|
||||
bgp_route = 'show bgp ipv6 unicast {target} | util egrep "\(BGP routing table entry|Path \#|aggregated by|Origin |Community:|validity| from \)"'
|
||||
ping = "ping ipv6 {target} count 5 source {src_addr_ipv6}"
|
||||
traceroute = "traceroute ipv6 {target} timeout 1 probe 2 source {source}"
|
||||
|
||||
[[juniper]]
|
||||
[juniper.dual]
|
||||
bgp_community = "show route protocol bgp community {target}"
|
||||
bgp_aspath = "show route protocol bgp aspath-regex {target}"
|
||||
[juniper.ipv4]
|
||||
bgp_route = "show route protocol bgp table inet.0 {target} detail"
|
||||
ping = "ping inet {target} count 5 source {src_addr_ipv4}"
|
||||
traceroute = "traceroute inet {target} wait 1 source {source}"
|
||||
[juniper.ipv6]
|
||||
bgp_route = "show route protocol bgp table inet6.0 {target} detail"
|
||||
ping = "ping inet6 {target} count 5 source {src_addr_ipv6}"
|
||||
traceroute = "traceroute inet6 {target} wait 1 source {source}"
|
||||
|
|
@ -1,124 +0,0 @@
|
|||
# Non-dictionary parameters
|
||||
debug = false
|
||||
requires_ipv6_cidr = [
|
||||
"cisco_ios",
|
||||
"cisco_nxos"
|
||||
]
|
||||
# IP Blacklist
|
||||
blacklist = [
|
||||
"198.18.0.0/15",
|
||||
"100.64.0.0/10",
|
||||
"2001:db8::/32",
|
||||
"10.0.0.0/8",
|
||||
"192.168.0.0/16",
|
||||
"172.16.0.0/12"
|
||||
]
|
||||
# General site-wide parameters
|
||||
[general]
|
||||
# primary_asn = ""
|
||||
# org_name = ""
|
||||
# google_analytics = ""
|
||||
|
||||
# Feature customization
|
||||
[features]
|
||||
[features.rate_limit.query]
|
||||
# rate = 5
|
||||
# title = ""
|
||||
# message = ""
|
||||
# button = ""
|
||||
[features.rate_limit.site]
|
||||
# rate = 120
|
||||
# title = ""
|
||||
# subtitle = ""
|
||||
[features.cache]
|
||||
# timeout = 120
|
||||
# directory = ""
|
||||
# show_text = true
|
||||
# text = ""
|
||||
[features.bgp_route]
|
||||
# enable = true
|
||||
[features.bgp_community]
|
||||
# enable = true
|
||||
[features.bgp_community.regex]
|
||||
# decimal = ""
|
||||
# extended_as = ""
|
||||
# large = ""
|
||||
[features.bgp_aspath]
|
||||
# enable = true
|
||||
[features.bgp_aspath.regex]
|
||||
# mode = ""
|
||||
# asplain = ""
|
||||
# asdot = ""
|
||||
[features.ping]
|
||||
# enable = true
|
||||
[features.traceroute]
|
||||
# enable = true
|
||||
[features.max_prefix]
|
||||
# enable = false
|
||||
# ipv4 = 24
|
||||
# ipv6 = 64
|
||||
# message = ""
|
||||
|
||||
# User messages
|
||||
[messages]
|
||||
# no_query_type = ""
|
||||
# no_location = ""
|
||||
# no_input = ""
|
||||
# not_allowed = ""
|
||||
# requires_ipv6_cidr = ""
|
||||
# invalid_ip = ""
|
||||
# invalid_dual = ""
|
||||
# general = ""
|
||||
# directed_cidr = ""
|
||||
|
||||
# Branding/Visual Customization Parameters
|
||||
[branding]
|
||||
# site_name = ""
|
||||
[branding.footer]
|
||||
# enable = true
|
||||
[branding.credit]
|
||||
# enable = true
|
||||
[branding.peering_db]
|
||||
# enable = true
|
||||
[branding.text]
|
||||
# title_mode = "text_only"
|
||||
# title = ""
|
||||
# subtitle = ""
|
||||
# query_type = ""
|
||||
# results = ""
|
||||
# location = ""
|
||||
# query_placeholder = ""
|
||||
# bgp_route = ""
|
||||
# bgp_community = ""
|
||||
# bgp_aspath = ""
|
||||
# ping = ""
|
||||
# traceroute = ""
|
||||
[branding.text.404]
|
||||
# title = ""
|
||||
# subtitle = ""
|
||||
[branding.text.500]
|
||||
# title = ""
|
||||
# subtitle = ""
|
||||
# button = ""
|
||||
[branding.text.504]
|
||||
# message = ""
|
||||
[branding.logo]
|
||||
# path = ""
|
||||
# width = ""
|
||||
# favicons = ""
|
||||
[branding.color]
|
||||
# background = ""
|
||||
# button_submit = ""
|
||||
# danger = ""
|
||||
# progress_bar = ""
|
||||
[branding.color.tag]
|
||||
# type = ""
|
||||
# location = ""
|
||||
# location_title = ""
|
||||
# type_title = ""
|
||||
[branding.font.primary]
|
||||
# name = ""
|
||||
# url = ""
|
||||
[branding.font.mono]
|
||||
# name = ""
|
||||
# url = ""
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
# Routers
|
||||
[router.'router1']
|
||||
address = "192.0.2.1"
|
||||
asn = "65000"
|
||||
src_addr_ipv4 = "192.0.2.1"
|
||||
src_addr_ipv6 = "2001:db8::1"
|
||||
credential = "default"
|
||||
location = "pop1"
|
||||
name = "router1.pop1"
|
||||
display_name = "Router"
|
||||
port = "22"
|
||||
type = "cisco_ios"
|
||||
proxy = "proxy1"
|
||||
|
||||
# Router Credentials
|
||||
[credential.'default']
|
||||
username = "username"
|
||||
password = "password"
|
||||
|
||||
[credential.'other']
|
||||
username = "otheradmin"
|
||||
password = "otherpass"
|
||||
|
||||
# SSH Proxy Servers
|
||||
[proxy.'proxy1']
|
||||
address = "10.0.1.1"
|
||||
username = "username"
|
||||
password = "password"
|
||||
type = "linux_ssh"
|
||||
ssh_command = "ssh -l {username} {host}"
|
||||
626
hyperglass/configuration/models.py
Normal file
|
|
@ -0,0 +1,626 @@
|
|||
"""
|
||||
Defines models for all config variables.
|
||||
|
||||
Imports config variables and overrides default class attributes.
|
||||
|
||||
Validates input for overridden parameters.
|
||||
"""
|
||||
import re
|
||||
from math import ceil
|
||||
from typing import Dict, List, Optional, Tuple, Union
|
||||
from ipaddress import IPv4Address, IPv6Address
|
||||
from pathlib import Path
|
||||
|
||||
from pydantic import (
|
||||
BaseSettings,
|
||||
ConstrainedStr,
|
||||
DirectoryPath,
|
||||
FilePath,
|
||||
IPvAnyNetwork,
|
||||
IPvAnyAddress,
|
||||
UrlStr,
|
||||
constr,
|
||||
validator,
|
||||
create_model,
|
||||
SecretStr,
|
||||
)
|
||||
from pydantic.color import Color
|
||||
from hyperglass.exceptions import UnsupportedDevice, ConfigError
|
||||
from hyperglass.constants import Supported
|
||||
|
||||
|
||||
def clean_name(_name):
|
||||
"""
|
||||
Converts any "desirable" seperators to underscore, then
|
||||
removes all characters that are unsupported in Python class
|
||||
variable names. Also removes leading numbers underscores.
|
||||
"""
|
||||
_replaced = re.sub(r"[\-|\.|\@|\~|\:\/|\s]", "_", _name)
|
||||
_scrubbed = "".join(re.findall(r"([a-zA-Z]\w+|\_+)", _replaced))
|
||||
_lower = _scrubbed.lower()
|
||||
return _lower
|
||||
|
||||
|
||||
class Router(BaseSettings):
|
||||
"""Model for per-router config in devices.yaml."""
|
||||
|
||||
address: Union[IPvAnyAddress, str]
|
||||
asn: int
|
||||
src_addr_ipv4: IPv4Address
|
||||
src_addr_ipv6: IPv6Address
|
||||
credential: str
|
||||
location: str
|
||||
display_name: str
|
||||
port: int
|
||||
nos: str
|
||||
proxy: Union[str, None] = None
|
||||
|
||||
@validator("nos")
|
||||
def supported_nos(cls, v):
|
||||
"""Validates that passed nos string is supported by hyperglass"""
|
||||
if not Supported.is_supported(v):
|
||||
raise UnsupportedDevice(f'"{v}" device type is not supported.')
|
||||
return v
|
||||
|
||||
@validator("credential", "proxy", "location")
|
||||
def clean_credential(cls, v):
|
||||
"""Remove or replace unsupported characters from field values"""
|
||||
return clean_name(v)
|
||||
|
||||
|
||||
class Routers(BaseSettings):
|
||||
"""Base model for devices class."""
|
||||
|
||||
def build_network_lists(valid_devices):
|
||||
"""
|
||||
Builds locations dict, which is converted to JSON and passed to
|
||||
JavaScript to associate locations with the selected network/ASN.
|
||||
|
||||
Builds networks dict, which is used to render the network/ASN
|
||||
select element contents.
|
||||
"""
|
||||
locations_dict = {}
|
||||
networks_dict = {}
|
||||
for (dev, params) in valid_devices.items():
|
||||
asn = str(params["asn"])
|
||||
if asn in locations_dict:
|
||||
locations_dict[asn].append(
|
||||
{
|
||||
"location": params["location"],
|
||||
"hostname": dev,
|
||||
"display_name": params["display_name"],
|
||||
}
|
||||
)
|
||||
networks_dict[asn].append(params["location"])
|
||||
elif asn not in locations_dict:
|
||||
locations_dict[asn] = [
|
||||
{
|
||||
"location": params["location"],
|
||||
"hostname": dev,
|
||||
"display_name": params["display_name"],
|
||||
}
|
||||
]
|
||||
networks_dict[asn] = [params["location"]]
|
||||
if not locations_dict:
|
||||
raise ConfigError('Unable to build locations list from "devices.yaml"')
|
||||
if not networks_dict:
|
||||
raise ConfigError('Unable to build networks list from "devices.yaml"')
|
||||
return (locations_dict, networks_dict)
|
||||
|
||||
@classmethod
|
||||
def import_params(Routers, input_params):
|
||||
"""
|
||||
Imports passed dict from YAML config, removes unsupported
|
||||
characters from device names, dynamically sets attributes for
|
||||
the Routers class.
|
||||
"""
|
||||
routers = {}
|
||||
hostnames = []
|
||||
for (devname, params) in input_params.items():
|
||||
dev = clean_name(devname)
|
||||
router_params = Router(**params)
|
||||
setattr(Routers, dev, router_params)
|
||||
routers.update({dev: router_params.dict()})
|
||||
hostnames.append(dev)
|
||||
locations_dict, networks_dict = Routers.build_network_lists(routers)
|
||||
setattr(Routers, "routers", routers)
|
||||
setattr(Routers, "hostnames", hostnames)
|
||||
setattr(Routers, "locations", locations_dict)
|
||||
setattr(Routers, "networks", networks_dict)
|
||||
return Routers()
|
||||
|
||||
class Config:
|
||||
"""Pydantic Config"""
|
||||
|
||||
validate_all = True
|
||||
validate_assignment = True
|
||||
|
||||
|
||||
class Credential(BaseSettings):
|
||||
"""Model for per-credential config in devices.yaml"""
|
||||
|
||||
username: str
|
||||
password: SecretStr
|
||||
|
||||
|
||||
class Credentials(BaseSettings):
|
||||
"""Base model for credentials class"""
|
||||
|
||||
@classmethod
|
||||
def import_params(Credentials, input_params):
|
||||
"""
|
||||
Imports passed dict from YAML config, removes unsupported
|
||||
characters from device names, dynamically sets attributes for
|
||||
the credentials class.
|
||||
"""
|
||||
obj = Credentials()
|
||||
for (credname, params) in input_params.items():
|
||||
cred = clean_name(credname)
|
||||
setattr(Credentials, cred, Credential(**params))
|
||||
return obj
|
||||
|
||||
class Config:
|
||||
"""Pydantic Config"""
|
||||
|
||||
validate_all = True
|
||||
validate_assignment = True
|
||||
|
||||
|
||||
class Proxy(BaseSettings):
|
||||
"""Model for per-proxy config in devices.yaml"""
|
||||
|
||||
address: Union[IPvAnyAddress, str]
|
||||
username: str
|
||||
password: SecretStr
|
||||
nos: str
|
||||
ssh_command: str
|
||||
|
||||
@validator("nos")
|
||||
def supported_nos(cls, v):
|
||||
"""Validates that passed nos string is supported by hyperglass"""
|
||||
if not v == "linux_ssh":
|
||||
raise UnsupportedDevice(f'"{v}" device type is not supported.')
|
||||
return v
|
||||
|
||||
|
||||
class Proxies(BaseSettings):
|
||||
"""Base model for proxies class"""
|
||||
|
||||
@classmethod
|
||||
def import_params(Proxies, input_params):
|
||||
"""
|
||||
Imports passed dict from YAML config, removes unsupported
|
||||
characters from device names, dynamically sets attributes for
|
||||
the proxies class.
|
||||
"""
|
||||
obj = Proxies()
|
||||
for (devname, params) in input_params.items():
|
||||
dev = clean_name(devname)
|
||||
setattr(Proxies, dev, Proxy(**params))
|
||||
return obj
|
||||
|
||||
class Config:
|
||||
"""Pydantic Config"""
|
||||
|
||||
validate_all = True
|
||||
validate_assignment = True
|
||||
|
||||
|
||||
class General(BaseSettings):
|
||||
"""Class model for params.general"""
|
||||
|
||||
debug: bool = False
|
||||
primary_asn: str = "65001"
|
||||
org_name: str = "The Company"
|
||||
google_analytics: Union[str, None] = None
|
||||
redis_host: Union[str, IPvAnyNetwork] = "localhost"
|
||||
redis_port: int = 6379
|
||||
requires_ipv6_cidr: List[str] = ["cisco_ios", "cisco_nxos"]
|
||||
|
||||
|
||||
class Branding(BaseSettings):
|
||||
"""Class model for params.branding"""
|
||||
|
||||
site_name: str = "hyperglass"
|
||||
|
||||
class Colors(BaseSettings):
|
||||
"""Class model for params.colors"""
|
||||
|
||||
background: Color = "#fbfffe"
|
||||
button_submit: Color = "#40798c"
|
||||
danger: Color = "#ff3860"
|
||||
progress_bar: Color = "#40798c"
|
||||
|
||||
class Tag(BaseSettings):
|
||||
"""Class model for params.colors.tag"""
|
||||
|
||||
query_type: Color = "#ff5e5b"
|
||||
query_type_title: Color = "#330036"
|
||||
location: Color = "#40798c"
|
||||
location_title: Color = "#330036"
|
||||
|
||||
tag: Tag = Tag()
|
||||
|
||||
class Credit(BaseSettings):
|
||||
"""Class model for params.branding.credit"""
|
||||
|
||||
enable: bool = True
|
||||
|
||||
class Font(BaseSettings):
|
||||
"""Class model for params.branding.font"""
|
||||
|
||||
class Primary(BaseSettings):
|
||||
"""Class model for params.branding.font.primary"""
|
||||
|
||||
name: str = "Nunito"
|
||||
url: UrlStr = "https://fonts.googleapis.com/css?family=Nunito:400,600,700"
|
||||
|
||||
class Mono(BaseSettings):
|
||||
"""Class model for params.branding.font.mono"""
|
||||
|
||||
name: str = "Fira Mono"
|
||||
url: UrlStr = "https://fonts.googleapis.com/css?family=Fira+Mono"
|
||||
|
||||
primary: Primary = Primary()
|
||||
mono: Mono = Mono()
|
||||
|
||||
class Footer(BaseSettings):
|
||||
"""Class model for params.branding.font"""
|
||||
|
||||
enable: bool = True
|
||||
|
||||
class Logo(BaseSettings):
|
||||
"""Class model for params.branding.logo"""
|
||||
|
||||
path: str = "static/images/hyperglass-dark.png"
|
||||
width: int = 384
|
||||
favicons: str = "static/images/favicons/"
|
||||
|
||||
class PeeringDb(BaseSettings):
|
||||
"""Class model for params.branding.peering_db"""
|
||||
|
||||
enable: bool = True
|
||||
|
||||
credit: Credit = Credit()
|
||||
font: Font = Font()
|
||||
footer: Footer = Footer()
|
||||
logo: Logo = Logo()
|
||||
colors: Colors = Colors()
|
||||
peering_db: PeeringDb = PeeringDb()
|
||||
|
||||
class Text(BaseSettings):
|
||||
"""Class model for params.branding.text"""
|
||||
|
||||
query_type: str = "Query Type"
|
||||
title_mode: str = "logo_only"
|
||||
title: str = "hyperglass"
|
||||
subtitle: str = "AS{primary_asn}".format(primary_asn=General().primary_asn)
|
||||
results: str = "Results"
|
||||
location: str = "Select Location..."
|
||||
query_placeholder: str = "IP, Prefix, Community, or AS Path"
|
||||
bgp_route: str = "BGP Route"
|
||||
bgp_community: str = "BGP Community"
|
||||
bgp_aspath: str = "BGP AS Path"
|
||||
ping: str = "Ping"
|
||||
traceroute: str = "Traceroute"
|
||||
|
||||
class Error404(BaseSettings):
|
||||
"""Class model for 404 Error Page"""
|
||||
|
||||
title: str = "Error"
|
||||
subtitle: str = "Page Not Found"
|
||||
|
||||
class Error500(BaseSettings):
|
||||
"""Class model for 500 Error Page"""
|
||||
|
||||
title: str = "Error"
|
||||
subtitle: str = "Something Went Wrong"
|
||||
button: str = "Home"
|
||||
|
||||
class Error504(BaseSettings):
|
||||
"""Class model for 504 Error Element"""
|
||||
|
||||
message: str = "Unable to reach <b>{target}</b>"
|
||||
|
||||
error404: Error404 = Error404()
|
||||
error500: Error500 = Error500()
|
||||
error504: Error504 = Error504()
|
||||
|
||||
credit: Credit = Credit()
|
||||
footer: Footer = Footer()
|
||||
text: Text = Text()
|
||||
|
||||
|
||||
class Messages(BaseSettings):
|
||||
"""Class model for params.messages"""
|
||||
|
||||
no_query_type: str = "Query Type must be specified."
|
||||
no_location: str = "A location must be selected."
|
||||
no_input: str = "A target must be specified"
|
||||
not_allowed: str = "<b>{i}</b> is not allowed."
|
||||
requires_ipv6_cidr: str = "<b>{d}</b> requires IPv6 BGP lookups to be in CIDR notation."
|
||||
invalid_ip: str = "<b>{i}</b> is not a valid IP address."
|
||||
invalid_dual: str = "invalid_dual <b>{i}</b> is an invalid {qt}."
|
||||
general: str = "An error occurred."
|
||||
directed_cidr: str = "<b>{q}</b> queries can not be in CIDR format."
|
||||
|
||||
|
||||
class Features(BaseSettings):
|
||||
"""Class model for params.features"""
|
||||
|
||||
class BgpRoute(BaseSettings):
|
||||
"""Class model for params.features.bgp_route"""
|
||||
|
||||
enable: bool = True
|
||||
|
||||
class BgpCommunity(BaseSettings):
|
||||
"""Class model for params.features.bgp_community"""
|
||||
|
||||
enable: bool = True
|
||||
|
||||
class Regex(BaseSettings):
|
||||
"""Class model for params.features.bgp_community.regex"""
|
||||
|
||||
decimal: str = r"^[0-9]{1,10}$"
|
||||
extended_as: str = r"^([0-9]{0,5})\:([0-9]{1,5})$"
|
||||
large: str = r"^([0-9]{1,10})\:([0-9]{1,10})\:[0-9]{1,10}$"
|
||||
|
||||
regex: Regex = Regex()
|
||||
|
||||
class BgpAsPath(BaseSettings):
|
||||
"""Class model for params.features.bgp_aspath"""
|
||||
|
||||
enable: bool = True
|
||||
|
||||
class Regex(BaseSettings):
|
||||
"""Class model for params.bgp_aspath.regex"""
|
||||
|
||||
mode: constr(regex="asplain|asdot") = "asplain"
|
||||
asplain: str = r"^(\^|^\_)(\d+\_|\d+\$|\d+\(\_\.\+\_\))+$"
|
||||
asdot: str = r"^(\^|^\_)((\d+\.\d+)\_|(\d+\.\d+)\$|(\d+\.\d+)\(\_\.\+\_\))+$"
|
||||
|
||||
regex: Regex = Regex()
|
||||
|
||||
class Ping(BaseSettings):
|
||||
"""Class model for params.features.ping"""
|
||||
|
||||
enable: bool = True
|
||||
|
||||
class Traceroute(BaseSettings):
|
||||
"""Class model for params.features.traceroute"""
|
||||
|
||||
enable: bool = True
|
||||
|
||||
class Blacklist(BaseSettings):
|
||||
"""Class model for params.features.blacklist"""
|
||||
|
||||
enable: bool = True
|
||||
networks: List[IPvAnyNetwork] = [
|
||||
"198.18.0.0/15",
|
||||
"100.64.0.0/10",
|
||||
"2001:db8::/32",
|
||||
"10.0.0.0/8",
|
||||
"192.168.0.0/16",
|
||||
"172.16.0.0/12",
|
||||
]
|
||||
|
||||
class Cache(BaseSettings):
|
||||
"""Class model for params.features.cache"""
|
||||
|
||||
redis_id: int = 0
|
||||
timeout: int = 120
|
||||
show_text: bool = True
|
||||
text: str = "Results will be cached for {timeout} minutes.".format(
|
||||
timeout=ceil(timeout / 60)
|
||||
)
|
||||
|
||||
class MaxPrefix(BaseSettings):
|
||||
"""Class model for params.features.max_prefix"""
|
||||
|
||||
enable: bool = False
|
||||
ipv4: int = 24
|
||||
ipv6: int = 64
|
||||
message: str = "Prefix length must be smaller than /{m}. <b>{i}</b> is too specific."
|
||||
|
||||
class RateLimit(BaseSettings):
|
||||
"""Class model for params.features.rate_limit"""
|
||||
|
||||
redis_id: int = 1
|
||||
|
||||
class Query(BaseSettings):
|
||||
"""Class model for params.features.rate_limit.query"""
|
||||
|
||||
rate: int = 5
|
||||
period: str = "minute"
|
||||
title: str = "Query Limit Reached"
|
||||
message: str = (
|
||||
"Query limit of {rate} per {period} reached. "
|
||||
"Please wait one minute and try again."
|
||||
).format(rate=rate, period=period)
|
||||
button: str = "Try Again"
|
||||
|
||||
class Site(BaseSettings):
|
||||
"""Class model for params.features.rate_limit.site"""
|
||||
|
||||
rate: int = 60
|
||||
period: str = "minute"
|
||||
title: str = "Limit Reached"
|
||||
subtitle: str = (
|
||||
"You have accessed this site more than {rate} "
|
||||
"times in the last {period}."
|
||||
).format(rate=rate, period=period)
|
||||
button: str = "Try Again"
|
||||
|
||||
query: Query = Query()
|
||||
site: Site = Site()
|
||||
|
||||
bgp_route: BgpRoute = BgpRoute()
|
||||
bgp_community: BgpCommunity = BgpCommunity()
|
||||
bgp_aspath: BgpAsPath = BgpAsPath()
|
||||
ping: Ping = Ping()
|
||||
traceroute: Traceroute = Traceroute()
|
||||
blacklist: Blacklist = Blacklist()
|
||||
cache: Cache = Cache()
|
||||
max_prefix: MaxPrefix = MaxPrefix()
|
||||
rate_limit: RateLimit = RateLimit()
|
||||
|
||||
|
||||
class Params(BaseSettings):
|
||||
"""Base model for params"""
|
||||
|
||||
general: General = General()
|
||||
features: Features = Features()
|
||||
branding: Branding = Branding()
|
||||
messages: Messages = Messages()
|
||||
|
||||
class Config:
|
||||
"""Pydantic Config"""
|
||||
|
||||
validate_all = True
|
||||
validate_assignment = True
|
||||
|
||||
|
||||
class NosModel(BaseSettings):
|
||||
"""Class model for non-default commands"""
|
||||
|
||||
class Dual(BaseSettings):
|
||||
"""Class model for non-default dual afi commands"""
|
||||
|
||||
bgp_aspath: str = None
|
||||
bgp_community: str = None
|
||||
|
||||
class IPv4(BaseSettings):
|
||||
"""Class model for non-default ipv4 commands"""
|
||||
|
||||
bgp_route: str = None
|
||||
ping: str = None
|
||||
traceroute: str = None
|
||||
|
||||
class IPv6(BaseSettings):
|
||||
"""Class model for non-default ipv6 commands"""
|
||||
|
||||
bgp_route: str = None
|
||||
ping: str = None
|
||||
traceroute: str = None
|
||||
|
||||
dual: Dual = Dual()
|
||||
ipv4: IPv4 = IPv4()
|
||||
ipv6: IPv6 = IPv6()
|
||||
|
||||
|
||||
class Commands(BaseSettings):
|
||||
"""Base class for commands class"""
|
||||
|
||||
@classmethod
|
||||
def import_params(Commands, input_params):
|
||||
"""
|
||||
Imports passed dict from YAML config, dynamically sets
|
||||
attributes for the commands class.
|
||||
"""
|
||||
obj = Commands()
|
||||
for (nos, cmds) in input_params.items():
|
||||
setattr(Commands, nos, NosModel(**cmds))
|
||||
return obj
|
||||
|
||||
class CiscoIOS(BaseSettings):
|
||||
"""Class model for default cisco_ios commands"""
|
||||
|
||||
class Dual(BaseSettings):
|
||||
"""Default commands for dual afi commands"""
|
||||
|
||||
bgp_community = "show bgp all community {target}"
|
||||
bgp_aspath = 'show bgp all quote-regexp "{target}"'
|
||||
|
||||
class IPv4(BaseSettings):
|
||||
"""Default commands for ipv4 commands"""
|
||||
|
||||
bgp_route = "show bgp ipv4 unicast {target} | exclude pathid:|Epoch"
|
||||
ping = "ping {target} repeat 5 source {source}"
|
||||
traceroute = "traceroute {target} timeout 1 probe 2 source {source}"
|
||||
|
||||
class IPv6(BaseSettings):
|
||||
"""Default commands for ipv6 commands"""
|
||||
|
||||
bgp_route = "show bgp ipv6 unicast {target} | exclude pathid:|Epoch"
|
||||
ping = "ping ipv6 {target} repeat 5 source {source}"
|
||||
traceroute = "traceroute ipv6 {target} timeout 1 probe 2 source {source}"
|
||||
|
||||
dual: Dual = Dual()
|
||||
ipv4: IPv4 = IPv4()
|
||||
ipv6: IPv6 = IPv6()
|
||||
|
||||
class CiscoXR(BaseSettings):
|
||||
"""Class model for default cisco_xr commands"""
|
||||
|
||||
class Dual(BaseSettings):
|
||||
"""Default commands for dual afi commands"""
|
||||
|
||||
bgp_community = (
|
||||
"show bgp all unicast community {target} | utility egrep -v "
|
||||
'"\\(BGP |Table |Non-stop\\)"'
|
||||
)
|
||||
bgp_aspath = (
|
||||
"show bgp all unicast regexp {target} | utility egrep -v "
|
||||
'"\\(BGP |Table |Non-stop\\)"'
|
||||
)
|
||||
|
||||
class IPv4(BaseSettings):
|
||||
"""Default commands for ipv4 commands"""
|
||||
|
||||
bgp_route = (
|
||||
"show bgp ipv4 unicast {target} | util egrep \\(BGP routing table "
|
||||
"entry|Path \\#|aggregated by|Origin |Community:|validity| from \\)"
|
||||
)
|
||||
ping = "ping ipv4 {target} count 5 source {src_addr_ipv4}"
|
||||
traceroute = "traceroute ipv4 {target} timeout 1 probe 2 source {source}"
|
||||
|
||||
class IPv6(BaseSettings):
|
||||
"""Default commands for ipv6 commands"""
|
||||
|
||||
bgp_route = (
|
||||
"show bgp ipv6 unicast {target} | util egrep \\(BGP routing table "
|
||||
"entry|Path \\#|aggregated by|Origin |Community:|validity| from \\)"
|
||||
)
|
||||
ping = "ping ipv6 {target} count 5 source {src_addr_ipv6}"
|
||||
traceroute = "traceroute ipv6 {target} timeout 1 probe 2 source {source}"
|
||||
|
||||
dual: Dual = Dual()
|
||||
ipv4: IPv4 = IPv4()
|
||||
ipv6: IPv6 = IPv6()
|
||||
|
||||
class Juniper(BaseSettings):
|
||||
"""Class model for default juniper commands"""
|
||||
|
||||
class Dual(BaseSettings):
|
||||
"""Default commands for dual afi commands"""
|
||||
|
||||
bgp_community = "show route protocol bgp community {target}"
|
||||
bgp_aspath = "show route protocol bgp aspath-regex {target}"
|
||||
|
||||
class IPv4(BaseSettings):
|
||||
"""Default commands for ipv4 commands"""
|
||||
|
||||
bgp_route = "show route protocol bgp table inet.0 {target} detail"
|
||||
ping = "ping inet {target} count 5 source {src_addr_ipv4}"
|
||||
traceroute = "traceroute inet {target} wait 1 source {source}"
|
||||
|
||||
class IPv6(BaseSettings):
|
||||
"""Default commands for ipv6 commands"""
|
||||
|
||||
bgp_route = "show route protocol bgp table inet6.0 {target} detail"
|
||||
ping = "ping inet6 {target} count 5 source {src_addr_ipv6}"
|
||||
traceroute = "traceroute inet6 {target} wait 1 source {source}"
|
||||
|
||||
dual: Dual = Dual()
|
||||
ipv4: IPv4 = IPv4()
|
||||
ipv6: IPv6 = IPv6()
|
||||
|
||||
cisco_ios: NosModel = CiscoIOS()
|
||||
cisco_xr: NosModel = CiscoXR()
|
||||
juniper: NosModel = Juniper()
|
||||
|
||||
class Config:
|
||||
"""Pydantic Config"""
|
||||
|
||||
validate_all = False
|
||||
validate_assignment = True
|
||||
163
hyperglass/constants.py
Normal file
|
|
@ -0,0 +1,163 @@
|
|||
"""
|
||||
Global Constants for hyperglass
|
||||
"""
|
||||
|
||||
__all__: ("code", "Supported")
|
||||
|
||||
|
||||
class Status:
|
||||
"""
|
||||
Defines codes, messages, and method names for status codes used by
|
||||
hyperglass.
|
||||
"""
|
||||
|
||||
codes_dict = {
|
||||
200: ("valid", "Valid Query"),
|
||||
405: ("not_allowed", "Query Not Allowed"),
|
||||
415: ("invalid", "Invalid Query"),
|
||||
504: ("target_error", "Unable to Reach Target"),
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Dynamically generates class attributes for codes in codes_dict.
|
||||
"""
|
||||
for (code, text) in Status.codes_dict.items():
|
||||
setattr(self, text[0], code)
|
||||
|
||||
@staticmethod
|
||||
def get_reason(search_code):
|
||||
"""
|
||||
Maps and returns input code integer to associated reason text.
|
||||
Mainly used for populating Prometheus fields.
|
||||
"""
|
||||
for (code, text) in Status.codes_dict.items():
|
||||
if code == search_code:
|
||||
return text[1]
|
||||
|
||||
|
||||
code = Status()
|
||||
|
||||
|
||||
class Supported:
|
||||
"""
|
||||
Defines items supported by hyperglass.
|
||||
|
||||
query_types: Supported query types used to validate Flask input.
|
||||
|
||||
rest: Supported REST API platforms
|
||||
|
||||
scrape: Supported "scrape" platforms which will be accessed via
|
||||
Netmiko. List updated 07/2019.
|
||||
"""
|
||||
|
||||
query_types = ("bgp_route", "bgp_community", "bgp_aspath", "ping", "traceroute")
|
||||
|
||||
rest = ("frr", "bird")
|
||||
|
||||
scrape = (
|
||||
"a10",
|
||||
"accedian",
|
||||
"alcatel_aos",
|
||||
"alcatel_sros",
|
||||
"apresia_aeos",
|
||||
"arista_eos",
|
||||
"aruba_os",
|
||||
"avaya_ers",
|
||||
"avaya_vsp",
|
||||
"brocade_fastiron",
|
||||
"brocade_netiron",
|
||||
"brocade_nos",
|
||||
"brocade_vdx",
|
||||
"brocade_vyos",
|
||||
"checkpoint_gaia",
|
||||
"calix_b6",
|
||||
"ciena_saos",
|
||||
"cisco_asa",
|
||||
"cisco_ios",
|
||||
"cisco_nxos",
|
||||
"cisco_s300",
|
||||
"cisco_tp",
|
||||
"cisco_wlc",
|
||||
"cisco_xe",
|
||||
"cisco_xr",
|
||||
"coriant",
|
||||
"dell_dnos9",
|
||||
"dell_force10",
|
||||
"dell_os6",
|
||||
"dell_os9",
|
||||
"dell_os10",
|
||||
"dell_powerconnect",
|
||||
"dell_isilon",
|
||||
"eltex",
|
||||
"enterasys",
|
||||
"extreme",
|
||||
"extreme_ers",
|
||||
"extreme_exos",
|
||||
"extreme_netiron",
|
||||
"extreme_nos",
|
||||
"extreme_slx",
|
||||
"extreme_vdx",
|
||||
"extreme_vsp",
|
||||
"extreme_wing",
|
||||
"f5_ltm",
|
||||
"f5_tmsh",
|
||||
"f5_linux",
|
||||
"fortinet",
|
||||
"generic_termserver",
|
||||
"hp_comware",
|
||||
"hp_procurve",
|
||||
"huawei",
|
||||
"huawei_vrpv8",
|
||||
"ipinfusion_ocnos",
|
||||
"juniper",
|
||||
"juniper_junos",
|
||||
"linux",
|
||||
"mellanox",
|
||||
"mrv_optiswitch",
|
||||
"netapp_cdot",
|
||||
"netscaler",
|
||||
"ovs_linux",
|
||||
"paloalto_panos",
|
||||
"pluribus",
|
||||
"quanta_mesh",
|
||||
"rad_etx",
|
||||
"ruckus_fastiron",
|
||||
"ubiquiti_edge",
|
||||
"ubiquiti_edgeswitch",
|
||||
"vyatta_vyos",
|
||||
"vyos",
|
||||
"oneaccess_oneos",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def is_supported(nos):
|
||||
"""
|
||||
Returns boolean state of input Network Operating System against
|
||||
rest OR scrape tuples.
|
||||
"""
|
||||
return bool(nos in (Supported.rest + Supported.scrape))
|
||||
|
||||
@staticmethod
|
||||
def is_scrape(nos):
|
||||
"""
|
||||
Returns boolean state of input Network Operating System against
|
||||
scrape tuple.
|
||||
"""
|
||||
return bool(nos in Supported.scrape)
|
||||
|
||||
@staticmethod
|
||||
def is_rest(nos):
|
||||
"""
|
||||
Returns boolean state of input Network Operating System against
|
||||
rest tuple.
|
||||
"""
|
||||
return bool(nos in Supported.rest)
|
||||
|
||||
@staticmethod
|
||||
def is_supported_query(query_type):
|
||||
"""
|
||||
Returns boolean state of input Network Operating System against
|
||||
query_type tuple.
|
||||
"""
|
||||
return bool(query_type in Supported.query_types)
|
||||
37
hyperglass/exceptions.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
"""
|
||||
Custom exceptions for hyperglass
|
||||
"""
|
||||
|
||||
|
||||
class HyperglassError(Exception):
|
||||
"""
|
||||
hyperglass base exception.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class ConfigError(HyperglassError):
|
||||
"""
|
||||
Raised for user-inflicted configuration issues. Examples:
|
||||
- Fat fingered NOS in device definition
|
||||
- Used invalid type (str, int, etc.) in hyperglass.yaml
|
||||
"""
|
||||
|
||||
def __init__(self, message):
|
||||
self.message = message
|
||||
|
||||
def __str__(self):
|
||||
return self.message
|
||||
|
||||
|
||||
class UnsupportedDevice(HyperglassError):
|
||||
"""
|
||||
Raised when an input NOS is not in the supported NOS list.
|
||||
"""
|
||||
|
||||
def __init__(self, message):
|
||||
self.message = message
|
||||
|
||||
def __str__(self):
|
||||
return self.message
|
||||
|
|
@ -1,41 +1,31 @@
|
|||
# https://github.com/checktheroads/hyperglass
|
||||
"""
|
||||
Main Hyperglass Front End
|
||||
"""
|
||||
# Standard Imports
|
||||
import json
|
||||
import logging
|
||||
|
||||
# Module Imports
|
||||
import redis
|
||||
import logzero
|
||||
from logzero import logger
|
||||
from flask import Flask, request, Response
|
||||
from flask_limiter import Limiter
|
||||
from flask_limiter.util import get_ipaddr
|
||||
from prometheus_client import generate_latest, Counter, CollectorRegistry, multiprocess
|
||||
from flask import Flask, Response, request
|
||||
from prometheus_client import CollectorRegistry, Counter, generate_latest, multiprocess
|
||||
|
||||
# Project Imports
|
||||
from hyperglass.command import execute
|
||||
from hyperglass import configuration
|
||||
from hyperglass import render
|
||||
from hyperglass.exceptions import ConfigError
|
||||
from hyperglass.command.execute import Execute
|
||||
from hyperglass.constants import Supported, code
|
||||
from hyperglass.configuration import params, devices, logzero_config
|
||||
|
||||
# Logzero Configuration
|
||||
if configuration.debug_state():
|
||||
logzero.loglevel(logging.DEBUG)
|
||||
else:
|
||||
logzero.loglevel(logging.INFO)
|
||||
|
||||
# Initialize general configuration parameters for reuse
|
||||
config = configuration.params()
|
||||
codes = configuration.codes()
|
||||
codes_reason = configuration.codes_reason()
|
||||
logger.debug(f"Configuration Parameters:\n {config}")
|
||||
logger.debug(f"Configuration Parameters:\n {params.dict()}")
|
||||
|
||||
# Redis Config
|
||||
redis_config = {
|
||||
"host": config["general"]["redis_host"],
|
||||
"port": config["general"]["redis_port"],
|
||||
"host": params.general.redis_host,
|
||||
"port": params.general.redis_port,
|
||||
"charset": "utf-8",
|
||||
"decode_responses": True,
|
||||
}
|
||||
|
|
@ -44,22 +34,23 @@ redis_config = {
|
|||
app = Flask(__name__, static_url_path="/static")
|
||||
|
||||
# Redis Cache Config
|
||||
r_cache = redis.Redis(**redis_config, db=config["features"]["rate_limit"]["redis_id"])
|
||||
r_cache = redis.Redis(**redis_config, db=params.features.rate_limit.redis_id)
|
||||
|
||||
# Flask-Limiter Config
|
||||
query_rate = config["features"]["rate_limit"]["query"]["rate"]
|
||||
query_period = config["features"]["rate_limit"]["query"]["period"]
|
||||
site_rate = config["features"]["rate_limit"]["site"]["rate"]
|
||||
site_period = config["features"]["rate_limit"]["site"]["period"]
|
||||
query_rate = params.features.rate_limit.query.rate
|
||||
query_period = params.features.rate_limit.query.period
|
||||
site_rate = params.features.rate_limit.site.rate
|
||||
site_period = params.features.rate_limit.site.period
|
||||
#
|
||||
rate_limit_query = f"{query_rate} per {query_period}"
|
||||
rate_limit_site = f"{site_rate} per {site_period}"
|
||||
logger.debug(f"Query rate limit: {rate_limit_query}")
|
||||
logger.debug(f"Site rate limit: {rate_limit_site}")
|
||||
|
||||
# Redis Config for Flask-Limiter storage
|
||||
r_limiter_db = config["features"]["rate_limit"]["redis_id"]
|
||||
r_limiter_db = params.features.rate_limit.redis_id
|
||||
r_limiter_url = f'redis://{redis_config["host"]}:{redis_config["port"]}/{r_limiter_db}'
|
||||
r_limiter = redis.Redis(**redis_config, db=config["features"]["rate_limit"]["redis_id"])
|
||||
r_limiter = redis.Redis(**redis_config, db=params.features.rate_limit.redis_id)
|
||||
# Adds Flask config variable for Flask-Limiter
|
||||
app.config.update(RATELIMIT_STORAGE_URL=r_limiter_url)
|
||||
# Initializes Flask-Limiter
|
||||
|
|
@ -151,10 +142,12 @@ def test_route():
|
|||
|
||||
@app.route("/locations/<asn>", methods=["GET"])
|
||||
def get_locations(asn):
|
||||
"""Flask GET route provides a JSON list of all locations for the selected network/ASN"""
|
||||
locations_list = configuration.locations_list()
|
||||
locations_list_json = json.dumps(locations_list[asn])
|
||||
logger.debug(f"Locations list:\n{locations_list}")
|
||||
"""
|
||||
Flask GET route provides a JSON list of all locations for the
|
||||
selected network/ASN.
|
||||
"""
|
||||
locations_list_json = json.dumps(devices.locations[asn])
|
||||
logger.debug(f"Locations list:{devices.locations[asn]}")
|
||||
return locations_list_json
|
||||
|
||||
|
||||
|
|
@ -162,30 +155,29 @@ def get_locations(asn):
|
|||
# Invoke Flask-Limiter with configured rate limit
|
||||
@limiter.limit(rate_limit_query, error_message="Query")
|
||||
def hyperglass_main():
|
||||
"""Main backend application initiator. Ingests Ajax POST data from form submit, passes it to
|
||||
the backend application to perform the filtering/lookups"""
|
||||
"""
|
||||
Main backend application initiator. Ingests Ajax POST data from
|
||||
form submit, passes it to the backend application to perform the
|
||||
filtering/lookups.
|
||||
"""
|
||||
# Get JSON data from Ajax POST
|
||||
lg_data = request.get_json()
|
||||
logger.debug(f"Unvalidated input: {lg_data}")
|
||||
# Return error if no target is specified
|
||||
if not lg_data["target"]:
|
||||
logger.debug("No input specified")
|
||||
return Response(config["messages"]["no_input"], codes["danger"])
|
||||
return Response(params.messages.no_input, code.invalid)
|
||||
# Return error if no location is selected
|
||||
if lg_data["location"] not in configuration.hostnames():
|
||||
if lg_data["location"] not in devices.hostnames:
|
||||
logger.debug("No selection specified")
|
||||
return Response(config["messages"]["no_location"], codes["danger"])
|
||||
return Response(params.messages.no_location, code.invalid)
|
||||
# Return error if no query type is selected
|
||||
if lg_data["type"] not in [
|
||||
"bgp_route",
|
||||
"bgp_community",
|
||||
"bgp_aspath",
|
||||
"ping",
|
||||
"traceroute",
|
||||
]:
|
||||
if not Supported.is_supported_query(lg_data["type"]):
|
||||
logger.debug("No query specified")
|
||||
return Response(config["messages"]["no_query_type"], codes["danger"])
|
||||
return Response(params.messages.no_query_type, code.invalid)
|
||||
# Get client IP address for Prometheus logging & rate limiting
|
||||
client_addr = get_ipaddr()
|
||||
# Increment Prometheus counter
|
||||
count_data.labels(
|
||||
client_addr, lg_data["type"], lg_data["location"], lg_data["target"]
|
||||
).inc()
|
||||
|
|
@ -194,17 +186,17 @@ def hyperglass_main():
|
|||
# cache store so each command output value is unique
|
||||
cache_key = str(lg_data)
|
||||
# Define cache entry expiry time
|
||||
cache_timeout = config["features"]["cache"]["timeout"]
|
||||
cache_timeout = params.features.cache.timeout
|
||||
logger.debug(f"Cache Timeout: {cache_timeout}")
|
||||
# Check if cached entry exists
|
||||
if not r_cache.hgetall(cache_key):
|
||||
try:
|
||||
logger.debug(f"Sending query {cache_key} to execute module...")
|
||||
cache_value = execute.Execute(lg_data).response()
|
||||
cache_value = Execute(lg_data).response()
|
||||
value_output = cache_value["output"]
|
||||
value_code = cache_value["status"]
|
||||
logger.debug(
|
||||
f"Validated response...\nStatus Code: {value_code}, Output: {value_output}"
|
||||
f"Validated response...\nStatus Code: {value_code}\nOutput:\n{value_output}"
|
||||
)
|
||||
# If it doesn't, create a cache entry
|
||||
r_cache.hmset(cache_key, cache_value)
|
||||
|
|
@ -216,10 +208,10 @@ def hyperglass_main():
|
|||
logger.debug(f"Returning {value_code} response")
|
||||
return Response(response["output"], response["status"])
|
||||
# If 400 error, return error message and code
|
||||
if value_code in [405, 415]:
|
||||
elif value_code in [405, 415]:
|
||||
count_errors.labels(
|
||||
response["status"],
|
||||
codes_reason[response["status"]],
|
||||
code.get_reason(response["status"]),
|
||||
client_addr,
|
||||
lg_data["type"],
|
||||
lg_data["location"],
|
||||
|
|
|
|||
|
|
@ -1,11 +1,8 @@
|
|||
# https://github.com/checktheroads/hyperglass
|
||||
"""
|
||||
Renders Jinja2 & Sass templates for use by the front end application
|
||||
"""
|
||||
# Standard Imports
|
||||
import os
|
||||
import logging
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
# Module Imports
|
||||
import sass
|
||||
|
|
@ -17,42 +14,30 @@ from markdown2 import Markdown
|
|||
from flask import render_template
|
||||
|
||||
# Project Imports
|
||||
import hyperglass
|
||||
from hyperglass import configuration
|
||||
from hyperglass.exceptions import HyperglassError
|
||||
from hyperglass.configuration import params, devices, logzero_config
|
||||
|
||||
# Module Directories
|
||||
working_directory = os.path.dirname(os.path.abspath(__file__))
|
||||
hyperglass_root = os.path.dirname(hyperglass.__file__)
|
||||
file_loader = jinja2.FileSystemLoader(working_directory)
|
||||
working_directory = Path(__file__).resolve().parent
|
||||
hyperglass_root = working_directory.parent
|
||||
file_loader = jinja2.FileSystemLoader(str(working_directory))
|
||||
env = jinja2.Environment(loader=file_loader)
|
||||
|
||||
# Logzero Configuration
|
||||
if configuration.debug_state():
|
||||
logzero.loglevel(logging.DEBUG)
|
||||
else:
|
||||
logzero.loglevel(logging.INFO)
|
||||
|
||||
# Configuration Imports
|
||||
config = configuration.params()
|
||||
# branding = configuration.branding()
|
||||
# general = configuration.general()
|
||||
networks = configuration.networks()
|
||||
|
||||
default_details = {
|
||||
"footer": """
|
||||
+++
|
||||
+++
|
||||
By using {{ branding["site_name"] }}, you agree to be bound by the following terms of use: All \
|
||||
By using {{ branding.site_name }}, you agree to be bound by the following terms of use: All \
|
||||
queries executed on this page are logged for analysis and troubleshooting. Users are prohibited \
|
||||
from automating queries, or attempting to process queries in bulk. This service is provided on a \
|
||||
best effort basis, and {{ general["org_name"] }} makes no availability or performance warranties or \
|
||||
best effort basis, and {{ general.org_name }} makes no availability or performance warranties or \
|
||||
guarantees whatsoever.
|
||||
""",
|
||||
"bgp_aspath": r"""
|
||||
+++
|
||||
title = "Supported AS Path Patterns"
|
||||
+++
|
||||
{{ branding["site_name"] }} accepts the following `AS_PATH` regular expression patterns:
|
||||
{{ branding.site_name }} accepts the following `AS_PATH` regular expression patterns:
|
||||
|
||||
| Expression | Match |
|
||||
| :------------------- | :-------------------------------------------- |
|
||||
|
|
@ -66,7 +51,7 @@ title = "Supported AS Path Patterns"
|
|||
+++
|
||||
title = "BGP Communities"
|
||||
+++
|
||||
{{ branding["site_name"] }} makes use of the following BGP communities:
|
||||
{{ branding.site_name }} makes use of the following BGP communities:
|
||||
|
||||
| Community | Description |
|
||||
| :-------- | :---------- |
|
||||
|
|
@ -125,10 +110,10 @@ def info(file_name):
|
|||
"html-classes": html_classes,
|
||||
}
|
||||
)
|
||||
file = os.path.join(working_directory, f"templates/info/{file_name}.md")
|
||||
file = working_directory.joinpath(f"templates/info/{file_name}.md")
|
||||
frontmatter_dict = {}
|
||||
if os.path.exists(file):
|
||||
with open(file, "r") as file_raw:
|
||||
if file.exists():
|
||||
with file.open(mode="r") as file_raw:
|
||||
file_read = file_raw.read()
|
||||
_, frontmatter, content = file_read.split("+++")
|
||||
frontmatter_dict[file_name] = toml.loads(frontmatter)
|
||||
|
|
@ -146,9 +131,9 @@ def info(file_name):
|
|||
md_template_content = jinja2.Environment(loader=jinja2.BaseLoader).from_string(
|
||||
content
|
||||
)
|
||||
frontmatter_rendered = md_template_fm.render(**config)
|
||||
frontmatter_rendered = md_template_fm.render(params)
|
||||
frontmatter_dict[file_name] = toml.loads(frontmatter_rendered)
|
||||
content_rendered = md_template_content.render(**config, info=frontmatter_dict)
|
||||
content_rendered = md_template_content.render(params, info=frontmatter_dict)
|
||||
frontmatter_dict[file_name]["content"] = markdown.convert(content_rendered)
|
||||
return frontmatter_dict
|
||||
|
||||
|
|
@ -165,10 +150,10 @@ def details(file_name):
|
|||
"html-classes": html_classes,
|
||||
}
|
||||
)
|
||||
file = os.path.join(working_directory, f"templates/info/details/{file_name}.md")
|
||||
file = working_directory.joinpath(f"templates/info/details/{file_name}.md")
|
||||
frontmatter_dict = {}
|
||||
if os.path.exists(file):
|
||||
with open(file, "r") as file_raw:
|
||||
if file.exists():
|
||||
with file.open(mode="r") as file_raw:
|
||||
file_read = file_raw.read()
|
||||
_, frontmatter, content = file_read.split("+++")
|
||||
md_template_fm = jinja2.Environment(loader=jinja2.BaseLoader).from_string(
|
||||
|
|
@ -186,9 +171,9 @@ def details(file_name):
|
|||
md_template_content = jinja2.Environment(loader=jinja2.BaseLoader).from_string(
|
||||
content
|
||||
)
|
||||
frontmatter_rendered = md_template_fm.render(**config)
|
||||
frontmatter_rendered = md_template_fm.render(params)
|
||||
frontmatter_dict[file_name] = toml.loads(frontmatter_rendered)
|
||||
content_rendered = md_template_content.render(**config, details=frontmatter_dict)
|
||||
content_rendered = md_template_content.render(params, details=frontmatter_dict)
|
||||
frontmatter_dict[file_name]["content"] = markdown.convert(content_rendered)
|
||||
return frontmatter_dict
|
||||
|
||||
|
|
@ -205,30 +190,39 @@ def html(template_name):
|
|||
for info_name in info_list:
|
||||
info_data = info(info_name)
|
||||
info_dict.update(info_data)
|
||||
template = env.get_template(f"templates/{template_name}.html")
|
||||
return template.render(
|
||||
**config, info=info_dict, details=details_dict, networks=networks
|
||||
)
|
||||
try:
|
||||
template_file = f"templates/{template_name}.html.j2"
|
||||
template = env.get_template(template_file)
|
||||
return template.render(
|
||||
params, info=info_dict, details=details_dict, networks=devices.networks
|
||||
)
|
||||
except jinja2.TemplateNotFound as template_error:
|
||||
logger.error(
|
||||
f"Error rendering Jinja2 template {Path(template_file).resolve()}."
|
||||
)
|
||||
raise HyperglassError(template_error)
|
||||
|
||||
|
||||
def css():
|
||||
"""Renders Jinja2 template to Sass file, then compiles Sass as CSS"""
|
||||
scss_file = os.path.join(hyperglass_root, "static/sass/hyperglass.scss")
|
||||
css_file = os.path.join(hyperglass_root, "static/css/hyperglass.css")
|
||||
scss_file = hyperglass_root.joinpath("static/sass/hyperglass.scss")
|
||||
css_file = hyperglass_root.joinpath("static/css/hyperglass.css")
|
||||
# Renders Jinja2 template as Sass file
|
||||
try:
|
||||
template_file = "templates/hyperglass.scss"
|
||||
template_file = "templates/hyperglass.scss.j2"
|
||||
template = env.get_template(template_file)
|
||||
rendered_output = template.render(**config)
|
||||
with open(scss_file, "w") as scss_output:
|
||||
rendered_output = template.render(params)
|
||||
with scss_file.open(mode="w") as scss_output:
|
||||
scss_output.write(rendered_output)
|
||||
except:
|
||||
logger.error(f"Error rendering Jinja2 template {template_file}.")
|
||||
raise
|
||||
except jinja2.TemplateNotFound as template_error:
|
||||
logger.error(
|
||||
f"Error rendering Jinja2 template {Path(template_file).resolve()}."
|
||||
)
|
||||
raise HyperglassError(template_error)
|
||||
# Compiles Sass to CSS
|
||||
try:
|
||||
generated_sass = sass.compile(filename=scss_file)
|
||||
with open(css_file, "w") as css_output:
|
||||
generated_sass = sass.compile(filename=str(scss_file))
|
||||
with css_file.open(mode="w") as css_output:
|
||||
css_output.write(generated_sass)
|
||||
logger.debug(f"Compiled Sass file {scss_file} to CSS file {css_file}.")
|
||||
except:
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
{% extends "templates/base.html" %}
|
||||
{% extends "templates/base.html.j2" %}
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
|
|
@ -23,19 +23,19 @@
|
|||
<section>
|
||||
<div class="container has-text-centered">
|
||||
<h1 class="title is-size-1">
|
||||
{{ branding["text"]["404"]["title"] }}
|
||||
{{ branding.text.error404.title }}
|
||||
</h1>
|
||||
<h2 class="subtitle is-size-3">
|
||||
{{ branding["text"]["404"]["subtitle"] }}
|
||||
{{ branding.text.error404.subtitle }}
|
||||
</h2>
|
||||
<br>
|
||||
</div>
|
||||
</section>
|
||||
{% if branding["footer"]["enable"] == true %}
|
||||
{% include "templates/footer.html" %}
|
||||
{% if branding.footer.enable == true %}
|
||||
{% include "templates/footer.html.j2" %}
|
||||
{% endif %}
|
||||
{% if branding["credit"]["enable"] == true %}
|
||||
{% include "templates/credit.html" %}
|
||||
{% if branding.credit.enable == true %}
|
||||
{% include "templates/credit.html.j2" %}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
</body>
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
{% extends "templates/base.html" %}
|
||||
{% extends "templates/base.html.j2" %}
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
|
|
@ -25,20 +25,20 @@
|
|||
<section>
|
||||
<div class="container has-text-centered">
|
||||
<h1 class="title is-size-1">
|
||||
{{ branding["text"]["500"]["title"] }}
|
||||
{{ features.rate_limit.site.title }}
|
||||
</h1>
|
||||
<h2 class="subtitle is-size-3">
|
||||
{{ branding["text"]["500"]["subtitle"] }}
|
||||
{{ features.rate_limit.site.subtitle }}
|
||||
</h2>
|
||||
<br>
|
||||
<a href="/" class="button is-medium is-rounded is-inverted is-danger is-outlined">{{ branding["text"]["500"]["button"] }}</a>
|
||||
<a href="/" class="button is-medium is-rounded is-inverted is-danger is-outlined">{{ features.rate_limit.site.button }}</a>
|
||||
</div>
|
||||
</section>
|
||||
{% if branding["footer"]["enable"] == true %}
|
||||
{% include "templates/footer.html" %}
|
||||
{% if branding.footer.enable == true %}
|
||||
{% include "templates/footer.html.j2" %}
|
||||
{% endif %}
|
||||
{% if branding["credit"]["enable"] == true %}
|
||||
{% include "templates/credit.html" %}
|
||||
{% if branding.credit.enable == true %}
|
||||
{% include "templates/credit.html.j2" %}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
</body>
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
{% extends "templates/base.html" %}
|
||||
{% extends "templates/base.html.j2" %}
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
|
|
@ -25,20 +25,20 @@
|
|||
<section>
|
||||
<div class="container has-text-centered">
|
||||
<h1 class="title is-size-1">
|
||||
{{ features["rate_limit"]["site"]["title"] }}
|
||||
{{ branding.text.error500.title }}
|
||||
</h1>
|
||||
<h2 class="subtitle is-size-3">
|
||||
{{ features["rate_limit"]["site"]["subtitle"] }}
|
||||
{{ branding.text.error500.subtitle }}
|
||||
</h2>
|
||||
<br>
|
||||
<a href="/" class="button is-medium is-rounded is-inverted is-danger is-outlined">{{ features["rate_limit"]["site"]["button"] }}</a>
|
||||
<a href="/" class="button is-medium is-rounded is-inverted is-danger is-outlined">{{ branding.text.error500.button }}</a>
|
||||
</div>
|
||||
</section>
|
||||
{% if branding["footer"]["enable"] == true %}
|
||||
{% include "templates/footer.html" %}
|
||||
{% if branding.footer.enable == true %}
|
||||
{% include "templates/footer.html.j2" %}
|
||||
{% endif %}
|
||||
{% if branding["credit"]["enable"] == true %}
|
||||
{% include "templates/credit.html" %}
|
||||
{% if branding.credit.enable == true %}
|
||||
{% include "templates/credit.html.j2" %}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
</body>
|
||||
|
|
@ -6,15 +6,15 @@
|
|||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1" />
|
||||
<link rel="apple-touch-icon" sizes="180x180" href="{{ branding["logo"]["favicons"] }}apple-touch-icon.png">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="{{ branding["logo"]["favicons"] }}favicon-16x16.png">
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="{{ branding["logo"]["favicons"] }}favicon-32x32.png">
|
||||
<link rel="manifest" href="{{ branding["logo"]["favicons"] }}site.webmanifest">
|
||||
<link rel="mask-icon" href="{{ branding["logo"]["favicons"] }}safari-pinned-tab.svg" color="{{ branding["color"]["tag"]["command"] }}">
|
||||
<link rel="shortcut icon" href="{{ branding["logo"]["favicons"] }}favicon.ico">
|
||||
<meta name="msapplication-TileColor" content="{{ branding["color"]["tag"]["location_title"] }}">
|
||||
<meta name="msapplication-config" content="{{ branding["logo"]["favicons"] }}browserconfig.xml">
|
||||
<meta name="theme-color" content="{{ branding["color"]["button_submit"] }}">
|
||||
<link rel="apple-touch-icon" sizes="180x180" href="{{ branding.logo.favicons }}apple-touch-icon.png">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="{{ branding.logo.favicons }}favicon-16x16.png">
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="{{ branding.logo.favicons }}favicon-32x32.png">
|
||||
<link rel="manifest" href="{{ branding.logo.favicons }}site.webmanifest">
|
||||
<link rel="mask-icon" href="{{ branding.logo.favicons }}safari-pinned-tab.svg" color="{{ branding.colors.tag.command }}">
|
||||
<link rel="shortcut icon" href="{{ branding.logo.favicons }}favicon.ico">
|
||||
<meta name="msapplication-TileColor" content="{{ branding.colors.tag.location_title }}">
|
||||
<meta name="msapplication-config" content="{{ branding.logo.favicons }}browserconfig.xml">
|
||||
<meta name="theme-color" content="{{ branding.colors.button_submit }}">
|
||||
<link href="static/css/icofont/icofont.min.css" rel="stylesheet" />
|
||||
<link href="static/css/hyperglass.css" rel="stylesheet" />
|
||||
{% endblock %}
|
||||
|
|
@ -26,9 +26,9 @@
|
|||
<script src="static/js/jquery-3.4.0.min.js"></script>
|
||||
<script src="static/js/clipboard.min.js"></script>
|
||||
<script src="static/js/hyperglass.js"></script>
|
||||
{% if general.google_analytics|length > 0 %}
|
||||
{% if general.google_analytics %}
|
||||
<!--Google Analytics-->
|
||||
<script async src="https://www.googletagmanager.com/gtag/js?id={{ general["google_analytics"] }}"></script>
|
||||
<script async src="https://www.googletagmanager.com/gtag/js?id={{ general.google_analytics }}"></script>
|
||||
<script>
|
||||
window.dataLayer = window.dataLayer || [];
|
||||
|
||||
|
|
@ -37,7 +37,7 @@
|
|||
}
|
||||
gtag('js', new Date());
|
||||
|
||||
gtag('config', '{{ general["google_analytics"] }}');
|
||||
gtag('config', '{{ general.google_analytics }}');
|
||||
</script>
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
@charset "utf-8";
|
||||
|
||||
/* Fonts */
|
||||
@import url('{{ branding["font"]["primary"]["url"] }}');
|
||||
@import url('{{ branding["font"]["mono"]["url"] }}');
|
||||
|
||||
$family-sans-serif: "{{ branding["font"]["primary"]["name"] }}", sans-serif;
|
||||
$family-monospace: "{{ branding["font"]["mono"]["name"] }}", monospace;
|
||||
|
||||
/* Color Changes */
|
||||
$body-background-color: {{ branding["color"]["background"] }};
|
||||
$footer-background-color: transparent;
|
||||
$danger: {{ branding["color"]["danger"] }};
|
||||
|
||||
/* Custom Colors */
|
||||
$lg-btn-submit: {{ branding["color"]["button_submit"] }};
|
||||
$lg-tag-loc_title: {{ branding["color"]["tag"]["location_title"] }};
|
||||
$lg-tag-type_title: {{ branding["color"]["tag"]["type_title"] }};
|
||||
$lg-tag-type: {{ branding["color"]["tag"]["type"] }};
|
||||
$lg-progressbar: {{ branding["color"]["progress_bar"] }};
|
||||
$lg-tag-loc: {{ branding["color"]["tag"]["location"] }};
|
||||
|
||||
/* Element Changes */
|
||||
$footer-padding: 3rem 1.5rem 3rem ;
|
||||
|
||||
/*! bulma.io v0.7.4 | MIT License | github.com/jgthms/bulma */
|
||||
@import "utilities/_all";
|
||||
@import "base/_all";
|
||||
@import "elements/_all";
|
||||
@import "components/_all";
|
||||
@import "grid/_all";
|
||||
@import "layout/_all";
|
||||
|
||||
/* Hyperglass Imports */
|
||||
@import "custom/custom_elements";
|
||||
35
hyperglass/render/templates/hyperglass.scss.j2
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
@charset "utf-8";
|
||||
|
||||
/* Fonts */
|
||||
@import url('{{ branding.font.primary.url }}');
|
||||
@import url('{{ branding.font.mono.url }}');
|
||||
|
||||
$family-sans-serif: "{{ branding.font.primary.name }}", sans-serif;
|
||||
$family-monospace: "{{ branding.font.mono.name }}", monospace;
|
||||
|
||||
/* Color Changes */
|
||||
$body-background-color: {{ branding.colors.background }};
|
||||
$footer-background-color: transparent;
|
||||
$danger: {{ branding.colors.danger }};
|
||||
|
||||
/* Custom Colors */
|
||||
$lg-btn-submit: {{ branding.colors.button_submit }};
|
||||
$lg-tag-loc_title: {{ branding.colors.tag.location_title }};
|
||||
$lg-tag-type_title: {{ branding.colors.tag.query_type_title }};
|
||||
$lg-tag-type: {{ branding.colors.tag.query_type }};
|
||||
$lg-progressbar: {{ branding.colors.progress_bar }};
|
||||
$lg-tag-loc: {{ branding.colors.tag.location }};
|
||||
|
||||
/* Element Changes */
|
||||
$footer-padding: 3rem 1.5rem 3rem ;
|
||||
|
||||
/*! bulma.io v0.7.4 | MIT License | github.com/jgthms/bulma */
|
||||
@import "utilities/_all";
|
||||
@import "base/_all";
|
||||
@import "elements/_all";
|
||||
@import "components/_all";
|
||||
@import "grid/_all";
|
||||
@import "layout/_all";
|
||||
|
||||
/* Hyperglass Imports */
|
||||
@import "custom/custom_elements";
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
{% extends "templates/base.html" %}
|
||||
{% extends "templates/base.html.j2" %}
|
||||
<!DOCTYPE html>
|
||||
|
||||
<head>
|
||||
|
|
@ -11,19 +11,19 @@
|
|||
<div class="modal-content">
|
||||
<article class="message is-danger">
|
||||
<div class="message-header">
|
||||
<p>{{ features["rate_limit"]["query"]["title"] }}</p>
|
||||
<p>{{ features.rate_limit.query.title }}</p>
|
||||
</div>
|
||||
<div class="message-body">
|
||||
<p>{{ features["rate_limit"]["query"]["message"] }}</p>
|
||||
<p>{{ features.rate_limit.query.message }}</p>
|
||||
<br>
|
||||
<div class="buttons is-right">
|
||||
<a href="/" class="button is-danger is-rounded is-outlined">{{ features["rate_limit"]["query"]["button"] }}</a>
|
||||
<a href="/" class="button is-danger is-rounded is-outlined">{{ features.rate_limit.query.button }}</a>
|
||||
</div>
|
||||
</div>
|
||||
</article>
|
||||
</div>
|
||||
</div>
|
||||
{% if features["bgp_aspath"]["enable"] == true %}
|
||||
{% if features.bgp_aspath.enable == true %}
|
||||
<div class="modal" id="help_bgp_aspath">
|
||||
<div class="modal-background" onclick="closeModal()"></div>
|
||||
<div class="modal-content is-clipped">
|
||||
|
|
@ -35,7 +35,7 @@
|
|||
<button class="modal-close is-large" aria-label="close" onclick="closeModal()"></button>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if features["bgp_community"]["enable"] == true %}
|
||||
{% if features.bgp_community.enable == true %}
|
||||
<div class="modal" id="help_bgp_community">
|
||||
<div class="modal-background" onclick="closeModal()"></div>
|
||||
<div class="modal-content">
|
||||
|
|
@ -51,10 +51,10 @@
|
|||
<div class="container is-fluid">
|
||||
<div class="navbar-brand">
|
||||
</div>
|
||||
{% if branding["peering_db"]["enable"] == true %}
|
||||
{% if branding.peering_db.enable == true %}
|
||||
<div class="navbar-menu">
|
||||
<div class="navbar-end">
|
||||
<a class="navbar-item" href="https://as{{ general["primary_asn"] }}.peeringdb.com" target="_blank">
|
||||
<a class="navbar-item" href="https://as{{ general.primary_asn }}.peeringdb.com" target="_blank">
|
||||
<span>PeeringDB</span>
|
||||
<span class="icon">
|
||||
<i class="icofont-external"></i>
|
||||
|
|
@ -68,33 +68,33 @@
|
|||
</nav>
|
||||
<section class="section">
|
||||
<div class="container has-text-centered is-fluid">
|
||||
{% if branding["text"]["title_mode"] == 'all' %}
|
||||
<img src="{{ branding["logo"]["path"] }}" style="width: {{ branding["logo"]["width"] }}px;">
|
||||
{% if branding.text.title_mode == 'all' %}
|
||||
<img src="{{ branding.logo.path }}" style="width: {{ branding.logo.width }}px;">
|
||||
<h1 class="title is-3" id="lg-title">
|
||||
{{ branding["text"]["title"] }}
|
||||
{{ branding.text.title }}
|
||||
</h1>
|
||||
<h2 class="subtitle is-5" id="lg-subtitle">
|
||||
{{ branding["text"]["subtitle"] }}
|
||||
{{ branding.text.subtitle }}
|
||||
</h2>
|
||||
<br>
|
||||
{% elif branding["text"]["title_mode"] == 'text_only' %}
|
||||
{% elif branding.text.title_mode == 'text_only' %}
|
||||
<h1 class="title is-1" id="lg-title">
|
||||
{{ branding["text"]["title"] }}
|
||||
{{ branding.text.title }}
|
||||
</h1>
|
||||
<h2 class="subtitle is-3" id="lg-subtitle">
|
||||
{{ branding["text"]["subtitle"] }}
|
||||
{{ branding.text.subtitle }}
|
||||
</h2>
|
||||
<br>
|
||||
{% elif branding["text"]["title_mode"] == 'logo_title' %}
|
||||
<img src="{{ branding["logo"]["path"] }}" style="width: {{ branding["logo"]["width"] }}px;">
|
||||
{% elif branding.text.title_mode == 'logo_title' %}
|
||||
<img src="{{ branding.logo.path }}" style="width: {{ branding.logo.width }}px;">
|
||||
<h1 class="title is-3" id="lg-title">
|
||||
{{ branding["text"]["title"] }}
|
||||
{{ branding.text.title }}
|
||||
</h1>
|
||||
{% elif branding["text"]["title_mode"] == 'logo_only' %}
|
||||
{% elif branding.text.title_mode == 'logo_only' %}
|
||||
<br>
|
||||
<br>
|
||||
<br>
|
||||
<img src="{{ branding["logo"]["path"] }}" style="width: {{ branding["logo"]["width"] }}px;">
|
||||
<img src="{{ branding.logo.path }}" style="width: {{ branding.logo.width }}px;">
|
||||
<br>
|
||||
<br>
|
||||
{% endif %}
|
||||
|
|
@ -103,7 +103,7 @@
|
|||
<div class="container is-fluid">
|
||||
<div class="field has-addons has-addons-centered">
|
||||
<div class="control has-icons-left is-expanded">
|
||||
<input type="text" class="input is-medium is-rounded is-family-monospace" id="target" placeholder="{{ branding["text"]["query_placeholder"] }}">
|
||||
<input type="text" class="input is-medium is-rounded is-family-monospace" id="target" placeholder="{{ branding.text.query_placeholder }}">
|
||||
<span class="icon is-small is-left"><i class="icofont-at"></i></span>
|
||||
</div>
|
||||
</div>
|
||||
|
|
@ -122,7 +122,7 @@
|
|||
<div class="control has-icons-left" id="location-control">
|
||||
<div class="select is-medium is-rounded">
|
||||
<select id="location" style="width: 256px">
|
||||
<option id="text_location" selected disabled>{{ branding["text"]["location"] }}</option>
|
||||
<option id="text_location" selected disabled>{{ branding.text.location }}</option>
|
||||
</select>
|
||||
</div>
|
||||
<span class="icon is-left"><i class="icofont-location-arrow"></i></span>
|
||||
|
|
@ -143,33 +143,33 @@
|
|||
</div>
|
||||
<div class="dropdown-menu is-expanded" id="dropdown-menu2" role="menu">
|
||||
<div class="dropdown-content lg-help">
|
||||
{% if features["bgp_route"]["enable"] == true %}
|
||||
{% if features.bgp_route.enable == true %}
|
||||
<div class="dropdown-item">
|
||||
<strong>{{ branding["text"]["bgp_route"] }}</strong>
|
||||
<strong>{{ branding.text.bgp_route }}</strong>
|
||||
<p>{{ info["bgp_route"]["content"] }}</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if features["bgp_community"]["enable"] == true %}
|
||||
{% if features.bgp_community.enable == true %}
|
||||
<div class="dropdown-item">
|
||||
<strong>{{ branding["text"]["bgp_community"] }}</strong>
|
||||
<strong>{{ branding.text.bgp_community }}</strong>
|
||||
<p>{{ info["bgp_community"]["content"] }}</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if features["bgp_aspath"]["enable"] == true %}
|
||||
{% if features.bgp_aspath.enable == true %}
|
||||
<div class="dropdown-item">
|
||||
<strong>{{ branding["text"]["bgp_aspath"] }}</strong>
|
||||
<strong>{{ branding.text.bgp_aspath }}</strong>
|
||||
<p>{{ info["bgp_aspath"]["content"] }}</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if features["ping"]["enable"] == true %}
|
||||
{% if features.ping.enable == true %}
|
||||
<div class="dropdown-item">
|
||||
<strong>{{ branding["text"]["ping"] }}</strong>
|
||||
<strong>{{ branding.text.ping }}</strong>
|
||||
<p>{{ info["ping"]["content"] }}</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if features["traceroute"]["enable"] == true %}
|
||||
{% if features.traceroute.enable == true %}
|
||||
<div class="dropdown-item">
|
||||
<strong>{{ branding["text"]["traceroute"] }}</strong>
|
||||
<strong>{{ branding.text.traceroute }}</strong>
|
||||
<p>{{ info["traceroute"]["content"] }}</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
|
@ -181,31 +181,31 @@
|
|||
<div class="select is-medium is-rounded">
|
||||
<select id="type">
|
||||
<option selected disabled>
|
||||
{{ branding["text"]["query_type"] }}
|
||||
{{ branding.text.query_type }}
|
||||
</option>
|
||||
{% if features["bgp_route"]["enable"] == true %}
|
||||
{% if features.bgp_route.enable == true %}
|
||||
<option name="type" id="type_bgp_route" value="bgp_route">
|
||||
{{ branding["text"]["bgp_route"] }}
|
||||
{{ branding.text.bgp_route }}
|
||||
</option>
|
||||
{% endif %}
|
||||
{% if features["bgp_community"]["enable"] == true %}
|
||||
{% if features.bgp_community.enable == true %}
|
||||
<option name="type" id="type_bgp_community" value="bgp_community">
|
||||
{{ branding["text"]["bgp_community"] }}
|
||||
{{ branding.text.bgp_community }}
|
||||
</option>
|
||||
{% endif %}
|
||||
{% if features["bgp_aspath"]["enable"] == true %}
|
||||
{% if features.bgp_aspath.enable == true %}
|
||||
<option name="type" id="type_bgp_aspath" value="bgp_aspath">
|
||||
{{ branding["text"]["bgp_aspath"] }}
|
||||
{{ branding.text.bgp_aspath }}
|
||||
</option>
|
||||
{% endif %}
|
||||
{% if features["ping"]["enable"] == true %}
|
||||
{% if features.ping.enable == true %}
|
||||
<option name="type" id="type_ping" value="ping">
|
||||
{{ branding["text"]["ping"] }}
|
||||
{{ branding.text.ping }}
|
||||
</option>
|
||||
{% endif %}
|
||||
{% if features["traceroute"]["enable"] == true %}
|
||||
{% if features.traceroute.enable == true %}
|
||||
<option name="type" id="type_traceroute" value="traceroute">
|
||||
{{ branding["text"]["traceroute"] }}
|
||||
{{ branding.text.traceroute }}
|
||||
</option>
|
||||
{% endif %}
|
||||
</select>
|
||||
|
|
@ -235,7 +235,7 @@
|
|||
<i id="copy-icon" class="icofont-ui-copy"></i>
|
||||
</span>
|
||||
</a>
|
||||
<p class="title" id="results">{{ branding["text"]["results"] }}</p>
|
||||
<p class="title" id="results">{{ branding.text.results }}</p>
|
||||
<p id="queryInfo">
|
||||
</p>
|
||||
<p id="progress">
|
||||
|
|
@ -245,18 +245,18 @@
|
|||
<br>
|
||||
<p class="query-output" id="output">
|
||||
</p>
|
||||
{% if features["cache"]["show_text"] == true %}
|
||||
{% if features.cache.show_text == true %}
|
||||
<hr>
|
||||
<p class="is-size-7">{{ features["cache"]["text"] }}</p>
|
||||
<p class="is-size-7">{{ features.cache.text }}</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
{% if branding["footer"]["enable"] == true %}
|
||||
{% include "templates/footer.html" %}
|
||||
{% if branding.footer.enable == true %}
|
||||
{% include "templates/footer.html.j2" %}
|
||||
{% endif %}
|
||||
{% if branding["credit"]["enable"] == true %}
|
||||
{% include "templates/credit.html" %}
|
||||
{% if branding.credit.enable == true %}
|
||||
{% include "templates/credit.html.j2" %}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
</body>
|
||||
|
Before Width: | Height: | Size: 8 KiB After Width: | Height: | Size: 8 KiB |
|
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 23 KiB |
|
Before Width: | Height: | Size: 4.3 KiB After Width: | Height: | Size: 4.3 KiB |
|
Before Width: | Height: | Size: 849 B After Width: | Height: | Size: 849 B |
|
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
|
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 15 KiB |
|
Before Width: | Height: | Size: 4.4 KiB After Width: | Height: | Size: 4.4 KiB |
|
Before Width: | Height: | Size: 4.1 KiB After Width: | Height: | Size: 4.1 KiB |
12
manage.py
|
|
@ -35,6 +35,7 @@ def construct_test(test_query, location, test_target):
|
|||
def hg():
|
||||
pass
|
||||
|
||||
|
||||
@hg.command("pylint-check", help="Runs Pylint and generates a badge for GitHub")
|
||||
@click.option(
|
||||
"-i",
|
||||
|
|
@ -44,7 +45,14 @@ def hg():
|
|||
default=False,
|
||||
help="Output Pylint score as integer",
|
||||
)
|
||||
@click.option("-b", "--badge", "create_badge", type=bool, default=False, help="Create Pylint badge")
|
||||
@click.option(
|
||||
"-b",
|
||||
"--badge",
|
||||
"create_badge",
|
||||
type=bool,
|
||||
default=False,
|
||||
help="Create Pylint badge",
|
||||
)
|
||||
def pylint_check(int_only, create_badge):
|
||||
try:
|
||||
import re
|
||||
|
|
@ -520,7 +528,7 @@ def flask_dev_server(host, port):
|
|||
from hyperglass import configuration
|
||||
from hyperglass import render
|
||||
|
||||
debug_state = configuration.debug_state()
|
||||
debug_state = configuration.params.general.debug
|
||||
render.css()
|
||||
click.secho(f"✓ Starting Flask development server", fg="green", bold=True)
|
||||
hyperglass.app.run(host=host, debug=debug_state, port=port)
|
||||
|
|
|
|||
|
|
@ -12,3 +12,5 @@ libsass==0.18.0
|
|||
markdown2==2.3.7
|
||||
passlib==1.7.1
|
||||
prometheus_client==0.7.0
|
||||
pydantic==0.29
|
||||
pyyaml==5.1.1
|
||||
|
|
|
|||