Merge pull request #7 from checktheroads/async

 async all the things
This commit is contained in:
Matt Love 2019-07-16 09:48:09 -07:00 committed by GitHub
commit 9d933944ef
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
18 changed files with 629 additions and 632 deletions

1
.gitignore vendored
View file

@ -1,5 +1,6 @@
test.py
.DS_Store
.idea
*.sass-cache
#
# Github Default from https://github.com/martinohanlon/flightlight/issues/1

View file

@ -2,18 +2,19 @@
language: python
python:
- '3.6'
- '3.7'
before_install:
- sudo add-apt-repository universe -y
- sudo apt-get update -q
- sudo apt-get install -y redis-server redis-tools
- sudo apt-get install -y redis-server
- sudo systemctl start redis-server
- sudo systemctl status redis-server
install:
- pip3 install -r requirements.txt
before_script:
- pip3 uninstall -y -r ./tests/requirements_dev.txt
- pip3 install -r ./tests/requirements_dev.txt
- isort -rc hyperglass
- flake8 hyperglass
- python3 ./tests/ci_prepare.py
script:

View file

@ -22,8 +22,9 @@ class Construct:
input parameters.
"""
def __init__(self, device):
def __init__(self, device, transport):
self.device = device
self.transport = transport
def get_src(self, ver):
"""
@ -48,15 +49,17 @@ class Construct:
cmd_path = f"{nos}.{afi}.{query_type}"
return operator.attrgetter(cmd_path)(commands)
def ping(self, transport, target):
def ping(self, target):
"""Constructs ping query parameters from pre-validated input"""
query_type = "ping"
logger.debug(f"Constructing {query_type} query for {target} via {transport}...")
logger.debug(
f"Constructing {query_type} query for {target} via {self.transport}..."
)
query = None
ip_version = ipaddress.ip_network(target).version
afi = f"ipv{ip_version}"
source = self.get_src(ip_version)
if transport == "rest":
if self.transport == "rest":
query = json.dumps(
{
"query_type": query_type,
@ -65,24 +68,25 @@ class Construct:
"target": target,
}
)
elif transport == "scrape":
elif self.transport == "scrape":
conf_command = self.device_commands(self.device.nos, afi, query_type)
fmt_command = conf_command.format(target=target, source=source)
query = (self.device.address.exploded, self.device.nos, fmt_command)
query = conf_command.format(target=target, source=source)
logger.debug(f"Constructed query: {query}")
return query
def traceroute(self, transport, target):
def traceroute(self, target):
"""
Constructs traceroute query parameters from pre-validated input.
"""
query_type = "traceroute"
logger.debug(f"Constructing {query_type} query for {target} via {transport}...")
logger.debug(
f"Constructing {query_type} query for {target} via {self.transport}..."
)
query = None
ip_version = ipaddress.ip_network(target).version
afi = f"ipv{ip_version}"
source = self.get_src(ip_version)
if transport == "rest":
if self.transport == "rest":
query = json.dumps(
{
"query_type": query_type,
@ -92,62 +96,64 @@ class Construct:
}
)
elif transport == "scrape":
elif self.transport == "scrape":
conf_command = self.device_commands(self.device.nos, afi, query_type)
fmt_command = conf_command.format(target=target, source=source)
query = (self.device.address.exploded, self.device.nos, fmt_command)
query = conf_command.format(target=target, source=source)
logger.debug(f"Constructed query: {query}")
return query
def bgp_route(self, transport, target):
def bgp_route(self, target):
"""
Constructs bgp_route query parameters from pre-validated input.
"""
query_type = "bgp_route"
logger.debug(f"Constructing {query_type} query for {target} via {transport}...")
logger.debug(
f"Constructing {query_type} query for {target} via {self.transport}..."
)
query = None
ip_version = ipaddress.ip_network(target).version
afi = f"ipv{ip_version}"
if transport == "rest":
if self.transport == "rest":
query = json.dumps({"query_type": query_type, "afi": afi, "target": target})
if transport == "scrape":
elif self.transport == "scrape":
conf_command = self.device_commands(self.device.nos, afi, query_type)
fmt_command = conf_command.format(target=target)
query = (self.device.address.exploded, self.device.nos, fmt_command)
query = conf_command.format(target=target)
logger.debug(f"Constructed query: {query}")
return query
def bgp_community(self, transport, target):
def bgp_community(self, target):
"""
Constructs bgp_community query parameters from pre-validated
input.
"""
query_type = "bgp_community"
logger.debug(f"Constructing {query_type} query for {target} via {transport}...")
logger.debug(
f"Constructing {query_type} query for {target} via {self.transport}..."
)
afi = "dual"
query = None
if transport == "rest":
if self.transport == "rest":
query = json.dumps({"query_type": query_type, "afi": afi, "target": target})
if transport == "scrape":
elif self.transport == "scrape":
conf_command = self.device_commands(self.device.nos, afi, query_type)
fmt_command = conf_command.format(target=target)
query = (self.device.address.exploded, self.device.nos, fmt_command)
query = conf_command.format(target=target)
logger.debug(f"Constructed query: {query}")
return query
def bgp_aspath(self, transport, target):
def bgp_aspath(self, target):
"""
Constructs bgp_aspath query parameters from pre-validated input.
"""
query_type = "bgp_aspath"
logger.debug(f"Constructing {query_type} query for {target} via {transport}...")
logger.debug(
f"Constructing {query_type} query for {target} via {self.transport}..."
)
afi = "dual"
query = None
if transport == "rest":
if self.transport == "rest":
query = json.dumps({"query_type": query_type, "afi": afi, "target": target})
if transport == "scrape":
elif self.transport == "scrape":
conf_command = self.device_commands(self.device.nos, afi, query_type)
fmt_command = conf_command.format(target=target)
query = (self.device.address.exploded, self.device.nos, fmt_command)
query = conf_command.format(target=target)
logger.debug(f"Constructed query: {query}")
return query

View file

@ -4,20 +4,15 @@ returns errors if input is invalid. Passes validated parameters to
construct.py, which is used to build & run the Netmiko connectoins or
hyperglass-frr API calls, returns the output back to the front end.
"""
# Standard Library Imports
import json
import time
# Third Party Imports
import requests
import requests.exceptions
import http3
import sshtunnel
from logzero import logger
from netmiko import ConnectHandler
from netmiko import NetMikoAuthenticationException
from netmiko import NetmikoAuthError
from netmiko import NetmikoTimeoutError
from netmiko import NetMikoTimeoutException
from netmiko import redispatch
# Project Imports
from hyperglass.command.construct import Construct
@ -29,196 +24,175 @@ from hyperglass.configuration import params
from hyperglass.configuration import proxies
from hyperglass.constants import Supported
from hyperglass.constants import code
from hyperglass.constants import protocol_map
from hyperglass.exceptions import CantConnect
class Rest:
"""Executes connections to REST API devices"""
class Connect:
"""
Parent class for all connection types:
def __init__(self, transport, device, query_type, target):
self.transport = transport
self.device = device
scrape() connects to devices via SSH for "screen scraping"
rest() connects to devices via HTTP for RESTful API communication
"""
def __init__(self, device_config, query_type, target, transport):
self.device_config = device_config
self.query_type = query_type
self.target = target
self.cred = getattr(credentials, self.device.credential)
self.query = getattr(Construct(self.device), self.query_type)(
self.transport, self.target
)
self.transport = transport
self.cred = getattr(credentials, device_config.credential)
self.query = getattr(Construct(device_config, transport), query_type)(target)
def frr(self):
"""Sends HTTP POST to router running the hyperglass-frr API"""
logger.debug(f"FRR host params:\n{self.device}")
logger.debug(f"Raw query parameters: {self.query}")
try:
headers = {
"Content-Type": "application/json",
"X-API-Key": self.cred.password.get_secret_value(),
}
json_query = json.dumps(self.query)
frr_endpoint = (
f"http://{self.device.address.exploded}:{self.device.port}/frr"
)
logger.debug(f"HTTP Headers: {headers}")
logger.debug(f"JSON query: {json_query}")
logger.debug(f"FRR endpoint: {frr_endpoint}")
frr_response = requests.post(
frr_endpoint, headers=headers, data=json_query, timeout=7
)
response = frr_response.text
status = frr_response.status_code
logger.debug(f"FRR status code: {status}")
logger.debug(f"FRR response text:\n{response}")
except requests.exceptions.RequestException as rest_error:
logger.error(
f"Error connecting to device {self.device.location}: {rest_error}"
)
response = params.messages.general
status = code.invalid
return response, status
def bird(self):
"""Sends HTTP POST to router running the hyperglass-bird API"""
logger.debug(f"BIRD host params:\n{self.device}")
logger.debug(f"Raw query parameters: {self.query}")
try:
headers = {
"Content-Type": "application/json",
"X-API-Key": self.cred.password.get_secret_value(),
}
json_query = json.dumps(self.query)
bird_endpoint = (
f"http://{self.device.address.exploded}:{self.device.port}/bird"
)
logger.debug(f"HTTP Headers: {headers}")
logger.debug(f"JSON query: {json_query}")
logger.debug(f"BIRD endpoint: {bird_endpoint}")
bird_response = requests.post(
bird_endpoint, headers=headers, data=json_query, timeout=7
)
response = bird_response.text
status = bird_response.status_code
logger.debug(f"BIRD status code: {status}")
logger.debug(f"BIRD response text:\n{response}")
except requests.exceptions.RequestException as requests_exception:
logger.error(
f"Error connecting to device {self.device}: {requests_exception}"
)
response = params.messages.general
status = code.invalid
return response, status
class Netmiko:
"""Executes connections to Netmiko devices"""
def __init__(self, transport, device, query_type, target):
self.device = device
self.target = target
self.cred = getattr(credentials, self.device.credential)
self.location, self.nos, self.command = getattr(Construct(device), query_type)(
transport, target
)
self.nm_host = {
"host": self.location,
"device_type": self.nos,
"username": self.cred.username,
"password": self.cred.password.get_secret_value(),
"global_delay_factor": 0.5,
}
def direct(self):
async def scrape(self):
"""
Connects to the router via netmiko library, return the command
output.
Connects to the router via Netmiko library, return the command
output. If an SSH proxy is enabled, creates an SSH tunnel via
the sshtunnel library, and netmiko uses the local binding to
connect to the remote device.
"""
logger.debug(f"Connecting to {self.device.location} via Netmiko library...")
response = None
try:
nm_connect_direct = ConnectHandler(**self.nm_host)
response = nm_connect_direct.send_command(self.command)
if self.device_config.proxy:
device_proxy = getattr(proxies, self.device_config.proxy)
logger.debug(
f"Proxy: {device_proxy.address.compressed}:{device_proxy.port}"
)
logger.debug(
"Connecting to {dev} via sshtunnel library...".format(
dev=self.device_config.proxy
)
)
with sshtunnel.open_tunnel(
device_proxy.address.compressed,
device_proxy.port,
ssh_username=device_proxy.username,
ssh_password=device_proxy.password.get_secret_value(),
remote_bind_address=(
self.device_config.address.compressed,
self.device_config.port,
),
local_bind_address=("localhost", 0),
) as tunnel:
logger.debug(f"Established tunnel with {self.device_config.proxy}")
scrape_host = {
"host": "localhost",
"port": tunnel.local_bind_port,
"device_type": self.device_config.nos,
"username": self.cred.username,
"password": self.cred.password.get_secret_value(),
"fast_cli": True,
}
logger.debug(f"Local binding: localhost:{tunnel.local_bind_port}")
try:
logger.debug(
"Connecting to {dev} via Netmiko library...".format(
dev=self.device_config.location
)
)
nm_connect_direct = ConnectHandler(**scrape_host)
response = nm_connect_direct.send_command(self.query)
except (
NetMikoAuthenticationException,
NetMikoTimeoutException,
NetmikoAuthError,
NetmikoTimeoutError,
sshtunnel.BaseSSHTunnelForwarderError,
) as scrape_error:
raise CantConnect(scrape_error)
else:
scrape_host = {
"host": self.device_config.address,
"device_type": self.device_config.nos,
"username": self.cred.username,
"password": self.cred.password.get_secret_value(),
"fast_cli": True,
}
try:
logger.debug(
"Connecting to {dev} via Netmiko library...".format(
dev=self.device_config.location
)
)
nm_connect_direct = ConnectHandler(**scrape_host)
response = nm_connect_direct.send_command(self.query)
except (
NetMikoAuthenticationException,
NetMikoTimeoutException,
NetmikoAuthError,
NetmikoTimeoutError,
sshtunnel.BaseSSHTunnelForwarderError,
) as scrape_error:
raise CantConnect(scrape_error)
if not response:
raise CantConnect("No response")
status = code.valid
logger.debug(f"Response for direct command {self.command}:\n{response}")
except (
NetMikoAuthenticationException,
NetMikoTimeoutException,
NetmikoAuthError,
NetmikoTimeoutError,
) as netmiko_exception:
logger.debug(f"Output for query: {self.query}:\n{response}")
except CantConnect as scrape_error:
logger.error(scrape_error)
response = params.messages.general
status = code.invalid
logger.error(f"{netmiko_exception}, {status}")
return response, status
def proxied(self):
"""
Connects to the proxy server via netmiko library, then logs
into the router via SSH.
"""
device_proxy = getattr(proxies, self.device.proxy)
nm_proxy = {
"host": device_proxy.address.exploded,
"username": device_proxy.username,
"password": device_proxy.password.get_secret_value(),
"device_type": device_proxy.nos,
"global_delay_factor": 0.5,
async def rest(self):
"""Sends HTTP POST to router running a hyperglass API agent"""
logger.debug(f"Query parameters: {self.query}")
uri = Supported.map_rest(self.device_config.nos)
headers = {
"Content-Type": "application/json",
"X-API-Key": self.cred.password.get_secret_value(),
}
nm_connect_proxied = ConnectHandler(**nm_proxy)
nm_ssh_command = device_proxy.ssh_command.format(**self.nm_host) + "\n"
logger.debug(f"Netmiko proxy {self.device.proxy}")
logger.debug(f"Proxy SSH command: {nm_ssh_command}")
nm_connect_proxied.write_channel(nm_ssh_command)
time.sleep(1)
proxy_output = nm_connect_proxied.read_channel()
logger.debug(f"Proxy output:\n{proxy_output}")
http_protocol = protocol_map.get(self.device_config.port, "http")
endpoint = "{protocol}://{addr}:{port}/{uri}".format(
protocol=http_protocol,
addr=self.device_config.address.exploded,
port=self.device_config.port,
uri=uri,
)
logger.debug(f"HTTP Headers: {headers}")
logger.debug(f"URL endpoint: {endpoint}")
try:
# Accept SSH key warnings
if "Are you sure you want to continue connecting" in proxy_output:
logger.debug("Received OpenSSH key warning")
nm_connect_proxied.write_channel("yes" + "\n")
nm_connect_proxied.write_channel(self.nm_host["password"] + "\n")
# Send password on prompt
elif "assword" in proxy_output:
logger.debug("Received password prompt")
nm_connect_proxied.write_channel(self.nm_host["password"] + "\n")
proxy_output += nm_connect_proxied.read_channel()
# Reclassify netmiko connection as configured device type
logger.debug(
f'Redispatching netmiko with device class {self.nm_host["device_type"]}'
http_client = http3.AsyncClient()
raw_response = await http_client.post(
endpoint, headers=headers, json=self.query, timeout=7
)
redispatch(nm_connect_proxied, self.nm_host["device_type"])
response = nm_connect_proxied.send_command(self.command)
status = code.valid
response = raw_response.text
status = raw_response.status_code
logger.debug(f"Netmiko proxied response:\n{response}")
logger.debug(f"HTTP status code: {status}")
logger.debug(f"Output for query {self.query}:\n{response}")
except (
NetMikoAuthenticationException,
NetMikoTimeoutException,
NetmikoAuthError,
NetmikoTimeoutError,
) as netmiko_exception:
http3.exceptions.ConnectTimeout,
http3.exceptions.CookieConflict,
http3.exceptions.DecodingError,
http3.exceptions.InvalidURL,
http3.exceptions.PoolTimeout,
http3.exceptions.ProtocolError,
http3.exceptions.ReadTimeout,
http3.exceptions.RedirectBodyUnavailable,
http3.exceptions.RedirectLoop,
http3.exceptions.ResponseClosed,
http3.exceptions.ResponseNotRead,
http3.exceptions.StreamConsumed,
http3.exceptions.Timeout,
http3.exceptions.TooManyRedirects,
http3.exceptions.WriteTimeout,
OSError,
) as rest_error:
logger.error(f"Error connecting to device {self.device_config.location}")
logger.error(rest_error)
response = params.messages.general
status = code.invalid
logger.error(f"{netmiko_exception}, {status},Proxy: {self.device.proxy}")
return response, status
class Execute:
"""
Ingests user input, runs blacklist check, runs prefix length check
(if enabled), pulls all configuraiton variables for the input
router.
Ingests raw user input, performs validation of target input, pulls
all configuraiton variables for the input router and connects to the
selected device to execute the query.
"""
def __init__(self, lg_data):
@ -227,32 +201,27 @@ class Execute:
self.input_type = self.input_data["type"]
self.input_target = self.input_data["target"]
def parse(self, output, nos):
def parse(self, raw_output, nos):
"""
Splits BGP output by AFI, returns only IPv4 & IPv6 output for
Splits BGP raw output by AFI, returns only IPv4 & IPv6 output for
protocol-agnostic commands (Community & AS_PATH Lookups).
"""
logger.debug("Parsing output...")
logger.debug("Parsing raw output...")
parsed = output
parsed = raw_output
if self.input_type in ("bgp_community", "bgp_aspath"):
logger.debug(f"Parsing raw output for device type {nos}")
if nos in ("cisco_ios",):
logger.debug(f"Parsing output for device type {nos}")
delimiter = "For address family: "
parsed_ipv4 = output.split(delimiter)[1]
parsed_ipv6 = output.split(delimiter)[2]
parsed = delimiter + parsed_ipv4 + delimiter + parsed_ipv6
parsed_raw = raw_output.split(delimiter)[1:3]
parsed = "\n\n".join([delimiter + afi.rstrip() for afi in parsed_raw])
elif nos in ("cisco_xr",):
logger.debug(f"Parsing output for device type {nos}")
delimiter = "Address Family: "
parsed_ipv4 = output.split(delimiter)[1]
parsed_ipv6 = output.split(delimiter)[2]
parsed = delimiter + parsed_ipv4 + delimiter + parsed_ipv6
parsed_raw = raw_output.split(delimiter)[1:3]
parsed = "\n\n".join([delimiter + afi.rstrip() for afi in parsed_raw])
return parsed
def response(self):
async def response(self):
"""
Initializes Execute.filter(), if input fails to pass filter,
returns errors to front end. Otherwise, executes queries.
@ -274,23 +243,16 @@ class Execute:
logger.debug(f"Validity: {validity}, Message: {msg}, Status: {status}")
transport = Supported.map_transport(device_config.nos)
connection = Connect(
device_config, self.input_type, self.input_target, transport
)
if Supported.is_rest(device_config.nos):
connection = Rest("rest", device_config, self.input_type, self.input_target)
raw_output, status = getattr(connection, device_config.nos)()
output = self.parse(raw_output, device_config.nos)
raw_output, status = await connection.rest()
elif Supported.is_scrape(device_config.nos):
logger.debug("Initializing Netmiko...")
raw_output, status = await connection.scrape()
output = self.parse(raw_output, device_config.nos)
connection = Netmiko(
"scrape", device_config, self.input_type, self.input_target
)
if device_config.proxy:
raw_output, status = connection.proxied()
elif not device_config.proxy:
raw_output, status = connection.direct()
output = self.parse(raw_output, device_config.nos)
logger.debug(f"Parsed output for device type {device_config.nos}:\n{output}")
logger.debug(
f"Parsed output for device type {device_config.nos}:\n{output}"
)
return (output, status)

View file

@ -86,11 +86,8 @@ def ip_validate(target):
try:
valid_ip = ipaddress.ip_network(target)
if valid_ip.is_reserved or valid_ip.is_unspecified or valid_ip.is_loopback:
validity = False
logger.debug(f"IP {valid_ip} is invalid")
if valid_ip.is_global:
validity = True
logger.debug(f"IP {valid_ip} is valid")
raise ValueError
validity = True
except (ipaddress.AddressValueError, ValueError):
logger.debug(f"IP {target} is invalid")
validity = False
@ -299,8 +296,6 @@ class Validate:
validity = True
msg = f"{target} matched large community."
status = code.valid
if not validity:
logger.error(f"{msg}, {status}")
logger.debug(f"{msg}, {status}")
return (validity, msg, status)
@ -320,7 +315,5 @@ class Validate:
validity = True
msg = f"{target} matched AS_PATH regex."
status = code.valid
if not validity:
logger.error(f"{msg}, {status}")
logger.debug(f"{msg}, {status}")
return (validity, msg, status)

View file

@ -174,6 +174,7 @@ class Proxy(BaseSettings):
"""Model for per-proxy config in devices.yaml"""
address: Union[IPvAnyAddress, str]
port: int = 22
username: str
password: SecretStr
nos: str
@ -348,7 +349,7 @@ class Messages(BaseSettings):
"<b>{d}</b> requires IPv6 BGP lookups" "to be in CIDR notation."
)
invalid_ip: str = "<b>{i}</b> is not a valid IP address."
invalid_dual: str = "invalid_dual <b>{i}</b> is an invalid {qt}."
invalid_dual: str = "<b>{i}</b> is an invalid {qt}."
general: str = "An error occurred."
directed_cidr: str = "<b>{q}</b> queries can not be in CIDR format."

View file

@ -2,6 +2,8 @@
Global Constants for hyperglass
"""
protocol_map = {80: "http", 8080: "http", 443: "https", 8443: "https"}
class Status:
"""
@ -9,8 +11,6 @@ class Status:
hyperglass.
"""
# pylint: disable=too-few-public-methods
codes_dict = {
200: ("valid", "Valid Query"),
405: ("not_allowed", "Query Not Allowed"),
@ -163,3 +163,21 @@ class Supported:
query_type tuple.
"""
return bool(query_type in Supported.query_types)
@staticmethod
def map_transport(nos):
"""
Returns "scrape" if input nos is in Supported.scrape tuple, or
"rest" if input nos is in Supported.rest tuple.
"""
transport = None
if nos in Supported.scrape:
transport = "scrape"
elif nos in Supported.rest:
transport = "rest"
return transport
@staticmethod
def map_rest(nos):
uri_map = {"frr": "frr", "bird": "bird"}
return uri_map.get(nos)

View file

@ -24,6 +24,28 @@ class ConfigError(HyperglassError):
return self.message
class CantConnect(HyperglassError):
def __init__(self, message):
super().__init__(message)
self.message = message
def __str__(self):
return self.message
class ParseError(HyperglassError):
"""
Raised when an ouput parser encounters an error.
"""
def __init__(self, message):
super().__init__(message)
self.message = message
def __str__(self):
return self.message
class UnsupportedDevice(HyperglassError):
"""
Raised when an input NOS is not in the supported NOS list.

View file

@ -1,71 +0,0 @@
"""
https://github.com/checktheroads/hyperglass
Guncorn configuration
"""
import os
import shutil
import multiprocessing
from logzero import logger
command = "/usr/local/bin/gunicorn"
pythonpath = "/opt/hyperglass"
bind = "[::1]:8001"
preload = True
workers = multiprocessing.cpu_count() * 2
user = "www-data"
timeout = 60
keepalive = 10
# Prometheus Multiprocessing directory, set as environment variable
prometheus_multiproc_dir = "/tmp/hyperglass_prometheus"
def on_starting(server): # pylint: disable=unused-argument
"""Pre-startup Gunicorn Tasks"""
# Renders Jinja2 -> Sass, compiles Sass -> CSS prior to worker load
try:
import hyperglass.render
hyperglass.render.css()
print(1)
except ImportError as error_exception:
logger.error(f"Exception occurred:\n{error_exception}")
# Verify Redis is running
try:
import hyperglass.configuration
import redis
config = hyperglass.configuration.params()
redis_config = {
"host": config["general"]["redis_host"],
"port": config["general"]["redis_port"],
"charset": "utf-8",
"decode_responses": True,
"db": config["features"]["cache"]["redis_id"],
}
r_cache = redis.Redis(**redis_config)
if r_cache.set("testkey", "testvalue", ex=1):
logger.debug("Redis is working properly")
except (redis.exceptions.ConnectionError):
logger.error("Redis is not running")
raise EnvironmentError("Redis is not running")
# Prometheus multiprocessing directory
if os.path.exists(prometheus_multiproc_dir):
shutil.rmtree(prometheus_multiproc_dir)
else:
os.mkdir(prometheus_multiproc_dir)
os.environ["prometheus_multiproc_dir"] = prometheus_multiproc_dir
def worker_exit(server, worker): # pylint: disable=unused-argument
"""Prometheus multiprocessing WSGI support"""
from prometheus_client import multiprocess
multiprocess.mark_process_dead(worker.pid)
def on_exit(server):
"""Pre-shutdown Gunicorn Tasks"""
if os.path.exists(prometheus_multiproc_dir):
shutil.rmtree(prometheus_multiproc_dir)

View file

@ -1,22 +1,24 @@
"""
Main Hyperglass Front End
"""
"""Hyperglass Front End"""
# Standard Library Imports
import json
import time
from ast import literal_eval
from pathlib import Path
# Third Party Imports
import redis
from flask import Flask
from flask import Response
from flask import request
from flask_limiter import Limiter
from flask_limiter.util import get_ipaddr
import aredis
from logzero import logger
from prometheus_client import CollectorRegistry
from prometheus_client import Counter
from prometheus_client import generate_latest
from prometheus_client import multiprocess
from sanic import Sanic
from sanic import response
from sanic.exceptions import NotFound
from sanic.exceptions import ServerError
from sanic_limiter import Limiter
from sanic_limiter import RateLimitExceeded
from sanic_limiter import get_remote_address
# Project Imports
from hyperglass import render
@ -34,17 +36,19 @@ logger.debug(f"Configuration Parameters:\n {params.dict()}")
redis_config = {
"host": params.general.redis_host,
"port": params.general.redis_port,
"charset": "utf-8",
"decode_responses": True,
}
# Main Flask definition
app = Flask(__name__, static_url_path="/static")
# Main Sanic app definition
static_dir = Path(__file__).parent / "static"
logger.debug(f"Static Files: {static_dir}")
app = Sanic(__name__)
app.static("/static", str(static_dir))
# Redis Cache Config
r_cache = redis.Redis(db=params.features.rate_limit.redis_id, **redis_config)
r_cache = aredis.StrictRedis(db=params.features.cache.redis_id, **redis_config)
# Flask-Limiter Config
# Sanic-Limiter Config
query_rate = params.features.rate_limit.query.rate
query_period = params.features.rate_limit.query.period
site_rate = params.features.rate_limit.site.rate
@ -55,14 +59,20 @@ rate_limit_site = f"{site_rate} per {site_period}"
logger.debug(f"Query rate limit: {rate_limit_query}")
logger.debug(f"Site rate limit: {rate_limit_site}")
# Redis Config for Flask-Limiter storage
# Redis Config for Sanic-Limiter storage
r_limiter_db = params.features.rate_limit.redis_id
r_limiter_url = f'redis://{redis_config["host"]}:{redis_config["port"]}/{r_limiter_db}'
r_limiter = redis.Redis(**redis_config, db=params.features.rate_limit.redis_id)
# Adds Flask config variable for Flask-Limiter
r_limiter_url = "redis://{host}:{port}/{db}".format(
host=params.general.redis_host,
port=params.general.redis_port,
db=params.features.rate_limit.redis_id,
)
r_limiter = aredis.StrictRedis(db=params.features.rate_limit.redis_id, **redis_config)
# Adds Sanic config variable for Sanic-Limiter
app.config.update(RATELIMIT_STORAGE_URL=r_limiter_url)
# Initializes Flask-Limiter
limiter = Limiter(app, key_func=get_ipaddr, default_limits=[rate_limit_site])
# Initializes Sanic-Limiter
limiter = Limiter(app, key_func=get_remote_address, global_limits=[rate_limit_site])
# Prometheus Config
count_data = Counter(
@ -85,49 +95,50 @@ count_notfound = Counter(
@app.route("/metrics")
def metrics():
@limiter.exempt
async def metrics(request):
"""Prometheus metrics"""
content_type_latest = str("text/plain; version=0.0.4; charset=utf-8")
registry = CollectorRegistry()
multiprocess.MultiProcessCollector(registry)
return Response(generate_latest(registry), mimetype=content_type_latest)
latest = generate_latest(registry)
return response.text(latest)
@app.errorhandler(404)
def handle_404(e):
@app.exception(NotFound)
async def handle_404(request, exception):
"""Renders full error page for invalid URI"""
html = render.html("404")
path = request.path
client_addr = get_ipaddr()
count_notfound.labels(e, path, client_addr).inc()
logger.error(f"Error: {e}, Path: {path}, Source: {client_addr}")
return html, 404
client_addr = get_remote_address(request)
count_notfound.labels(exception, path, client_addr).inc()
logger.error(f"Error: {exception}, Path: {path}, Source: {client_addr}")
return response.html(html, status=404)
@app.errorhandler(429)
def handle_429(e):
@app.exception(RateLimitExceeded)
async def handle_429(request, exception):
"""Renders full error page for too many site queries"""
html = render.html("429")
client_addr = get_ipaddr()
count_ratelimit.labels(e, client_addr).inc()
logger.error(f"Error: {e}, Source: {client_addr}")
return html, 429
client_addr = get_remote_address(request)
count_ratelimit.labels(exception, client_addr).inc()
logger.error(f"Error: {exception}, Source: {client_addr}")
return response.html(html, status=429)
@app.errorhandler(500)
def handle_500(e):
@app.exception(ServerError)
async def handle_500(request, exception):
"""General Error Page"""
client_addr = get_ipaddr()
count_errors.labels(500, e, client_addr, None, None, None).inc()
logger.error(f"Error: {e}, Source: {client_addr}")
client_addr = get_remote_address(request)
count_errors.labels(500, exception, client_addr, None, None, None).inc()
logger.error(f"Error: {exception}, Source: {client_addr}")
html = render.html("500")
return html, 500
return response.html(html, status=500)
def clear_cache():
async def clear_cache():
"""Function to clear the Redis cache"""
try:
r_cache.flushdb()
await r_cache.flushdb()
except Exception as error_exception:
logger.error(f"Error clearing cache: {error_exception}")
raise HyperglassError(f"Error clearing cache: {error_exception}")
@ -135,60 +146,59 @@ def clear_cache():
@app.route("/", methods=["GET"])
@limiter.limit(rate_limit_site, error_message="Site")
def site():
async def site(request):
"""Main front-end web application"""
return render.html("index")
return response.html(render.html("index"))
@app.route("/test", methods=["GET"])
def test_route():
async def test_route(request):
"""Test route for various tests"""
html = render.html("500")
return html, 500
return response.html(html, status=500), 500
@app.route("/locations/<asn>", methods=["GET"])
def get_locations(asn):
async def get_locations(request, asn):
"""
Flask GET route provides a JSON list of all locations for the
selected network/ASN.
GET route provides a JSON list of all locations for the selected
network/ASN.
"""
locations_list_json = json.dumps(devices.locations[asn])
logger.debug(f"Locations list:{devices.locations[asn]}")
return locations_list_json
return response.json(devices.locations[asn])
@app.route("/lg", methods=["POST"])
# Invoke Flask-Limiter with configured rate limit
@limiter.limit(rate_limit_query, error_message="Query")
def hyperglass_main():
async def hyperglass_main(request):
"""
Main backend application initiator. Ingests Ajax POST data from
form submit, passes it to the backend application to perform the
filtering/lookups.
"""
# Get JSON data from Ajax POST
lg_data = request.get_json()
lg_data = request.json
logger.debug(f"Unvalidated input: {lg_data}")
# Return error if no target is specified
if not lg_data["target"]:
logger.debug("No input specified")
return Response(params.messages.no_input, code.invalid)
return response.html(params.messages.no_input, status=code.invalid)
# Return error if no location is selected
if lg_data["location"] not in devices.hostnames:
logger.debug("No selection specified")
return Response(params.messages.no_location, code.invalid)
return response.html(params.messages.no_location, status=code.invalid)
# Return error if no query type is selected
if not Supported.is_supported_query(lg_data["type"]):
logger.debug("No query specified")
return Response(params.messages.no_query_type, code.invalid)
return response.html(params.messages.no_query_type, status=code.invalid)
# Get client IP address for Prometheus logging & rate limiting
client_addr = get_ipaddr()
client_addr = get_remote_address(request)
# Increment Prometheus counter
count_data.labels(
client_addr, lg_data["type"], lg_data["location"], lg_data["target"]
).inc()
logger.debug(f"Client Address: {client_addr}")
# Stringify the form response containing serialized JSON for the
# request, use as key for k/v cache store so each command output
# value is unique
@ -197,24 +207,26 @@ def hyperglass_main():
cache_timeout = params.features.cache.timeout
logger.debug(f"Cache Timeout: {cache_timeout}")
# Check if cached entry exists
if not r_cache.get(cache_key):
if not await r_cache.get(cache_key):
logger.debug(f"Sending query {cache_key} to execute module...")
# Pass request to execution module
cache_value = Execute(lg_data).response()
logger.debug("Validated Response...")
logger.debug(f"Status: {cache_value[1]}")
logger.debug(f"Output:\n {cache_value[0]}")
starttime = time.time()
cache_value = await Execute(lg_data).response()
endtime = time.time()
elapsedtime = round(endtime - starttime, 4)
logger.debug(
f"Execution for query {cache_key} took {elapsedtime} seconds to run."
)
# Create a cache entry
r_cache.set(cache_key, str(cache_value))
r_cache.expire(cache_key, cache_timeout)
await r_cache.set(cache_key, str(cache_value))
await r_cache.expire(cache_key, cache_timeout)
logger.debug(f"Added cache entry for query: {cache_key}")
logger.error(f"Unable to add output to cache: {cache_key}")
# If it does, return the cached entry
cache_response = r_cache.get(cache_key)
response = literal_eval(cache_response)
response_output, response_status = response
cache_response = await r_cache.get(cache_key)
# Serialize stringified tuple response from cache
serialized_response = literal_eval(cache_response)
response_output, response_status = serialized_response
logger.debug(f"Cache match for: {cache_key}, returning cached entry")
logger.debug(f"Cache Output: {response_output}")
@ -229,4 +241,4 @@ def hyperglass_main():
lg_data["location"],
lg_data["target"],
).inc()
return Response(*response)
return response.html(response_output, status=response_status)

View file

@ -7,7 +7,7 @@ Requires=redis-server.service
User=www-data
Group=www-data
WorkingDirectory=/opt/hyperglass
ExecStart=/usr/local/bin/gunicorn -c /opt/hyperglass/hyperglass/gunicorn_config.py hyperglass.wsgi
ExecStart=/usr/local/bin/python3 -m sanic hyperglass.web.app
[Install]
WantedBy=multi-user.target

View file

@ -81,7 +81,7 @@ link: <a href="#" id="helplink_bgpc">{{ general.org_name }} BGP Communities</a>
Performs BGP table lookup based on [Extended](https://tools.ietf.org/html/rfc4360) \
or [Large](https://tools.ietf.org/html/rfc8195) community value.
{{ info["link"] }}
{{ info["link"] | safe }}
""",
"bgp_aspath": """
---
@ -90,7 +90,7 @@ link: <a href="#" id="helplink_bgpa">Supported BGP AS Path Expressions</a>
---
Performs BGP table lookup based on `AS_PATH` regular expression.
{{ info["link"] }}
{{ info["link"] | safe }}
""",
"ping": """
---

View file

@ -9,7 +9,7 @@ clearPage();
// Bulma Toggable Dropdown - help text
$('#help-dropdown').click(
function(event) {
function (event) {
event.stopPropagation();
$(this).toggleClass('is-active');
}
@ -18,21 +18,21 @@ $('#help-dropdown').click(
// ClipboardJS Elements
var btn_copy = document.getElementById('btn-copy');
var clipboard = new ClipboardJS(btn_copy);
clipboard.on('success', function(e) {
clipboard.on('success', function (e) {
console.log(e);
$('#btn-copy').addClass('is-success').addClass('is-outlined');
$('#copy-icon').removeClass('icofont-ui-copy').addClass('icofont-check');
setTimeout(function() {
setTimeout(function () {
$('#btn-copy').removeClass('is-success').removeClass('is-outlined');
$('#copy-icon').removeClass('icofont-check').addClass('icofont-ui-copy');
}, 1000);
});
clipboard.on('error', function(e) {
clipboard.on('error', function (e) {
console.log(e);
});
$('.modal-background, .modal-close').click(
function(event) {
function (event) {
event.stopPropagation();
$('.modal').removeClass("is-active");
}
@ -40,21 +40,21 @@ $('.modal-background, .modal-close').click(
// Adjust behavior of help text dropdown based on device screen size
$('#help-dropdown-button').click(
function(event) {
function (event) {
if (window.innerWidth < 1024) {
$('#help-dropdown').removeClass('is-right');
$('.lg-help').addClass('lg-help-mobile').removeClass('lg-help');
}
$('#help-dropdown').removeClass('is-right');
$('.lg-help').addClass('lg-help-mobile').removeClass('lg-help');
}
}
);
function adjustDropdowns() {
var actual_width = window.innerWidth;
if (actual_width < 1024) {
$('#lg-netlocdropdown').removeClass('has-addons').removeClass('has-addons-centered').addClass('is-grouped').addClass('is-grouped-centered').addClass('is-grouped-multiline');
$('#network').css('width', actual_width * 0.85);
$('#location').css('width', actual_width * 0.85);
}
var actual_width = window.innerWidth;
if (actual_width < 1024) {
$('#lg-netlocdropdown').removeClass('has-addons').removeClass('has-addons-centered').addClass('is-grouped').addClass('is-grouped-centered').addClass('is-grouped-multiline');
$('#network').css('width', actual_width * 0.85);
$('#location').css('width', actual_width * 0.85);
}
}
function clearErrors() {
@ -81,179 +81,179 @@ function clearPage() {
}
function prepResults() {
progress.show();
resultsbox.show();
progress.show();
resultsbox.show();
}
$(document).ready(function() {
$(document).ready(function () {
var defaultasn = $("#network").val();
$.ajax({
url: '/locations/'+defaultasn,
url: '/locations/' + defaultasn,
context: document.body,
type: 'get',
success: function(data) {
selectedRouters = JSON.parse(data);
success: function (data) {
selectedRouters = data;
console.log(selectedRouters);
updateRouters(selectedRouters);
},
error: function(err) {
error: function (err) {
console.log(err);
}
});
});
$('#network').on('change', (function(event) {
var asn = $("select[id=network").val();
$('#location').children(":not(#text_location)").remove();
$.ajax({
url: '/locations/'+asn,
type: 'get',
success: function(data) {
clearPage();
updateRouters(JSON.parse(data));
},
error: function(err) {
console.log(err);
}
});
$('#network').on('change', (function (event) {
var asn = $("select[id=network").val();
$('#location').children(":not(#text_location)").remove();
$.ajax({
url: '/locations/' + asn,
type: 'get',
success: function (data) {
clearPage();
updateRouters(JSON.parse(data));
},
error: function (err) {
console.log(err);
}
});
}));
function updateRouters(locations) {
locations.forEach(function(r) {
locations.forEach(function (r) {
$('#location').append($("<option>").attr('value', r.hostname).text(r.display_name));
});
}
$('#helplink_bgpc').click(function(event) {
$('#helplink_bgpc').click(function (event) {
$('#help_bgp_community').addClass("is-active");
});
$('#helplink_bgpa').click(function(event) {
$('#helplink_bgpa').click(function (event) {
$('#help_bgp_aspath').addClass("is-active");
});
// Submit Form Action
$('#lgForm').on('submit', function() {
submitForm();
$('#lgForm').on('submit', function () {
submitForm();
});
function submitForm() {
clearErrors();
var type = $('#type option:selected').val();
var type_title = $('#type option:selected').text();
var network = $('#network option:selected').val();
var location = $('#location option:selected').val();
var location_name = $('#location option:selected').text();
var target = $('#target').val();
clearErrors();
var type = $('#type option:selected').val();
var type_title = $('#type option:selected').text();
var network = $('#network option:selected').val();
var location = $('#location option:selected').val();
var location_name = $('#location option:selected').text();
var target = $('#target').val();
var tags = [
'<div class="field is-grouped is-grouped-multiline">',
'<div class="control">',
'<div class="tags has-addons">',
'<span class="tag lg-tag-loc-title">AS',
network,
'</span>',
'<span class="tag lg-tag-loc">',
location_name,
'</span>',
'</div>',
'</div>',
var tags = [
'<div class="field is-grouped is-grouped-multiline">',
'<div class="control">',
'<div class="tags has-addons">',
'<span class="tag lg-tag-type-title">',
type_title,
'</span>',
'<span class="tag lg-tag-type">',
target,
'</span>',
'</div>',
'<span class="tag lg-tag-loc-title">AS',
network,
'</span>',
'<span class="tag lg-tag-loc">',
location_name,
'</span>',
'</div>',
'</div>'
].join('');
'</div>',
'<div class="control">',
'<div class="tags has-addons">',
'<span class="tag lg-tag-type-title">',
type_title,
'</span>',
'<span class="tag lg-tag-type">',
target,
'</span>',
'</div>',
'</div>',
'</div>'
].join('');
$('#output').text("");
$('#queryInfo').text("");
$('#queryInfo').html(tags);
$('#output').text("");
$('#queryInfo').text("");
$('#queryInfo').html(tags);
$.ajax(
{
url: '/lg',
type: 'POST',
data: JSON.stringify(
{
location: location,
type: type,
target: target
}
),
contentType: "application/json; charset=utf-8",
context: document.body,
readyState: prepResults(),
statusCode: {
200: function(response, code) {
response_html = [
'<br>',
'<div class="content">',
'<p class="query-output" id="output">',
response,
'</p>',
'</div>',
];
progress.hide();
$('#output').html(response_html);
},
401: function(response, code) {
response_html = [
'<br>',
'<div class="notification is-danger">',
response.responseText,
'</div>',
].join('');
clearPage();
target_error.show();
target_input.addClass('is-danger');
target_error.html(response_html);
},
405: function(response, code) {
response_html = [
'<br>',
'<div class="notification is-warning">',
response.responseText,
'</div>',
].join('');
clearPage();
target_error.show();
target_input.addClass('is-warning');
target_error.html(response_html);
},
415: function(response, code) {
response_html = [
'<br>',
'<div class="notification is-danger">',
response.responseText,
'</div>',
].join('');
clearPage();
target_error.show();
target_input.addClass('is-danger');
target_error.html(response_html);
},
429: function(response, code) {
clearPage();
$("#ratelimit").addClass("is-active");
},
504: function(response, code) {
response_html = [
'<br>',
'<div class="notification is-danger">',
response.responseText,
'</div>',
].join('');
clearPage();
target_error.show();
target_error.html(response_html);
}
}
}
);
$.ajax(
{
url: '/lg',
type: 'POST',
data: JSON.stringify(
{
location: location,
type: type,
target: target
}
),
contentType: "application/json; charset=utf-8",
context: document.body,
readyState: prepResults(),
statusCode: {
200: function (response, code) {
response_html = [
'<br>',
'<div class="content">',
'<p class="query-output" id="output">',
response,
'</p>',
'</div>',
];
progress.hide();
$('#output').html(response_html);
},
401: function (response, code) {
response_html = [
'<br>',
'<div class="notification is-danger">',
response.responseText,
'</div>',
].join('');
clearPage();
target_error.show();
target_input.addClass('is-danger');
target_error.html(response_html);
},
405: function (response, code) {
response_html = [
'<br>',
'<div class="notification is-warning">',
response.responseText,
'</div>',
].join('');
clearPage();
target_error.show();
target_input.addClass('is-warning');
target_error.html(response_html);
},
415: function (response, code) {
response_html = [
'<br>',
'<div class="notification is-danger">',
response.responseText,
'</div>',
].join('');
clearPage();
target_error.show();
target_input.addClass('is-danger');
target_error.html(response_html);
},
429: function (response, code) {
clearPage();
$("#ratelimit").addClass("is-active");
},
504: function (response, code) {
response_html = [
'<br>',
'<div class="notification is-danger">',
response.responseText,
'</div>',
].join('');
clearPage();
target_error.show();
target_error.html(response_html);
}
}
}
);
}

50
hyperglass/web.py Normal file
View file

@ -0,0 +1,50 @@
"""
Hyperglass web app initiator. Launches Sanic with appropriate number of
workers per their documentation (equal to number of CPU cores).
"""
# Override web server listen host & port if necessary:
host = "localhost"
port = 8001
try:
import multiprocessing
from hyperglass import render
from hyperglass import hyperglass
from hyperglass.configuration import params
except ImportError as import_error:
raise RuntimeError(import_error)
if params.general.debug:
debug = True
access_log = False
elif not params.general.debug:
debug = False
access_log = True
# Override the number of web server workers if necessary:
workers = multiprocessing.cpu_count()
def start():
"""
Compiles configured Sass variables to CSS, then starts Sanic web
server.
"""
try:
render.css()
except Exception as render_error:
raise RuntimeError(render_error)
try:
hyperglass.app.run(
host=host,
port=port,
debug=params.general.debug,
workers=workers,
access_log=access_log,
)
except Exception as hyperglass_error:
raise RuntimeError(hyperglass_error)
app = start()

View file

@ -1,17 +0,0 @@
"""
https://github.com/checktheroads/hyperglass
Gunicorn WSGI Target
"""
# Standard Library Imports
import os
# Project Imports
import hyperglass.hyperglass
application = hyperglass.hyperglass.app
hyperglass_root = os.path.dirname(hyperglass.__file__)
static = os.path.join(hyperglass_root, "static")
if __name__ == "__main__":
application.run(static_folder=static)

View file

@ -1,14 +1,17 @@
Flask==1.0.2
Flask-Limiter==1.0.1
redis==3.2.1
gunicorn==19.9.0
aredis==1.1.5
click==6.7
logzero==1.5.0
netmiko==2.3.3
requests==2.21.0
hiredis==1.0.0
http3==0.6.7
jinja2==2.10.1
libsass==0.18.0
logzero==1.5.0
markdown2==2.3.7
netmiko==2.3.3
passlib==1.7.1
prometheus_client==0.7.0
pydantic==0.29
pyyaml==5.1.1
redis==3.2.1
sanic_limiter==0.1.3
sanic==19.6.2
sshtunnel==0.1.5

View file

@ -1,6 +1,6 @@
#!/usr/bin/env python3
"""
Starts hyperglass with the Flask development server
Starts hyperglass with the Sanic web server
"""
import os
import sys
@ -12,15 +12,15 @@ parent_directory = os.path.dirname(working_directory)
def construct_test(test_query, location, test_target):
"""Constructs JSON POST data for test_hyperglass function"""
"""Constructs JSON POST data for test_hyperglass function."""
constructed_query = json.dumps(
{"type": test_query, "location": location, "target": test_target}
)
return constructed_query
def flask_dev_server(host, port):
"""Starts Flask development server for testing without WSGI/Reverse Proxy"""
def test_server(host, port):
"""Starts Sanic web server for testing."""
try:
sys.path.insert(0, parent_directory)
@ -28,7 +28,7 @@ def flask_dev_server(host, port):
from hyperglass import hyperglass
render.css()
logger.info("Starting Flask development server")
logger.info("Starting Sanic web server...")
hyperglass.app.run(host=host, debug=True, port=port)
except:
logger.error("Exception occurred while trying to start test server...")
@ -36,4 +36,4 @@ def flask_dev_server(host, port):
if __name__ == "__main__":
flask_dev_server("localhost", 5000)
test_server("localhost", 5000)

View file

@ -2,15 +2,18 @@
"""
Runs tests against test hyperglass instance
"""
import asyncio
import os
import sys
import json
import requests
import http3
import logzero
working_directory = os.path.dirname(os.path.abspath(__file__))
parent_directory = os.path.dirname(working_directory)
# Async loop
loop = asyncio.get_event_loop()
# Logzero Configuration
logger = logzero.logger
log_level = 10
@ -25,19 +28,13 @@ logzero_config = logzero.setup_default_logger(
)
def construct_test(test_query, location, test_target):
"""Constructs JSON POST data for test_hyperglass function"""
constructed_query = json.dumps(
{"type": test_query, "location": location, "target": test_target}
)
return constructed_query
def ci_hyperglass_test(
async def ci_hyperglass_test(
location, target_ipv4, target_ipv6, requires_ipv6_cidr, test_blacklist
):
"""Tests hyperglass backend by making use of requests library to mimic the JS Ajax POST \
performed by the front end."""
"""
Tests hyperglass backend by making use of HTTP3 library to mimic
the JS Ajax POST performed by the front end.
"""
invalid_ip = "this_ain't_an_ip!"
invalid_aspath = ".*"
ipv4_cidr = "1.1.1.0/24"
@ -45,14 +42,15 @@ def ci_hyperglass_test(
ipv6_cidr = "2606:4700:4700::/48"
test_headers = {"Content-Type": "application/json"}
test_endpoint = "http://localhost:5000/lg"
http_client = http3.AsyncClient()
# No Query Type Test
try:
logger.info("Starting No Query Type test...")
test_query = construct_test("", location, target_ipv4)
hg_response = requests.post(
test_endpoint, headers=test_headers, data=test_query
test_query = {"type": "", "location": location, "target": target_ipv4}
hg_response = await http_client.post(
test_endpoint, headers=test_headers, json=test_query
)
if not hg_response.status_code in range(400, 500):
if hg_response.status_code not in range(400, 500):
logger.error(hg_response.text)
raise RuntimeError("No Query Type test failed")
except:
@ -61,11 +59,11 @@ def ci_hyperglass_test(
# No Location Test
try:
logger.info("Starting No Location test...")
test_query = construct_test("bgp_route", "", target_ipv6)
hg_response = requests.post(
test_endpoint, headers=test_headers, data=test_query
test_query = {"type": "bgp_route", "location": "", "target": target_ipv6}
hg_response = await http_client.post(
test_endpoint, headers=test_headers, json=test_query
)
if not hg_response.status_code in range(400, 500):
if hg_response.status_code not in range(400, 500):
logger.error(hg_response.text)
raise RuntimeError("No Location test failed")
except:
@ -74,11 +72,11 @@ def ci_hyperglass_test(
# No Target Test
try:
logger.info("Starting No Target test...")
test_query = construct_test("bgp_route", location, "")
hg_response = requests.post(
test_endpoint, headers=test_headers, data=test_query
test_query = {"type": "bgp_route", "location": location, "target": ""}
hg_response = await http_client.post(
test_endpoint, headers=test_headers, json=test_query
)
if not hg_response.status_code in range(400, 500):
if hg_response.status_code not in range(400, 500):
logger.error(hg_response.text)
raise RuntimeError("No Target test failed")
except:
@ -87,11 +85,11 @@ def ci_hyperglass_test(
# Invalid BGP Route Test
try:
logger.info("Starting Invalid BGP IPv4 Route test...")
test_query = construct_test("bgp_route", location, invalid_ip)
hg_response = requests.post(
test_endpoint, headers=test_headers, data=test_query
test_query = {"type": "bgp_route", "location": location, "target": invalid_ip}
hg_response = await http_client.post(
test_endpoint, headers=test_headers, json=test_query
)
if not hg_response.status_code in range(400, 500):
if hg_response.status_code not in range(400, 500):
logger.error(hg_response.text)
raise RuntimeError("Invalid BGP IPv4 Route test failed")
except:
@ -100,11 +98,15 @@ def ci_hyperglass_test(
if requires_ipv6_cidr:
try:
logger.info("Starting Requires IPv6 CIDR test...")
test_query = construct_test("bgp_route", requires_ipv6_cidr, ipv6_host)
hg_response = requests.post(
test_endpoint, headers=test_headers, data=test_query
test_query = {
"type": "bgp_route",
"location": requires_ipv6_cidr,
"target": ipv6_host,
}
hg_response = await http_client.post(
test_endpoint, headers=test_headers, json=test_query
)
if not hg_response.status_code in range(400, 500):
if hg_response.status_code not in range(400, 500):
logger.error(hg_response.text)
raise RuntimeError("Requires IPv6 CIDR test failed")
except:
@ -113,11 +115,15 @@ def ci_hyperglass_test(
# Invalid BGP Community Test
try:
logger.info("Starting Invalid BGP Community test...")
test_query = construct_test("bgp_community", location, target_ipv4)
hg_response = requests.post(
test_endpoint, headers=test_headers, data=test_query
test_query = {
"type": "bgp_community",
"location": location,
"target": target_ipv4,
}
hg_response = await http_client.post(
test_endpoint, headers=test_headers, json=test_query
)
if not hg_response.status_code in range(400, 500):
if hg_response.status_code not in range(400, 500):
logger.error(hg_response.text)
raise RuntimeError("Invalid BGP Community test failed")
except:
@ -126,11 +132,15 @@ def ci_hyperglass_test(
# Invalid BGP AS_PATH Test
try:
logger.info("Starting invalid BGP AS_PATH test...")
test_query = construct_test("bgp_aspath", location, invalid_aspath)
hg_response = requests.post(
test_endpoint, headers=test_headers, data=test_query
test_query = {
"type": "bgp_aspath",
"location": location,
"target": invalid_aspath,
}
hg_response = await http_client.post(
test_endpoint, headers=test_headers, json=test_query
)
if not hg_response.status_code in range(400, 500):
if hg_response.status_code not in range(400, 500):
logger.error(hg_response.text)
raise RuntimeError("Invalid BGP AS_PATH test failed")
except:
@ -139,11 +149,11 @@ def ci_hyperglass_test(
# Invalid IPv4 Ping Test
try:
logger.info("Starting Invalid IPv4 Ping test...")
test_query = construct_test("ping", location, ipv4_cidr)
hg_response = requests.post(
test_endpoint, headers=test_headers, data=test_query
test_query = {"target": "ping", "location": location, "target": ipv4_cidr}
hg_response = await http_client.post(
test_endpoint, headers=test_headers, json=test_query
)
if not hg_response.status_code in range(400, 500):
if hg_response.status_code not in range(400, 500):
logger.error(hg_response.text)
raise RuntimeError("Invalid IPv4 Ping test failed")
except:
@ -152,11 +162,11 @@ def ci_hyperglass_test(
# Invalid IPv6 Ping Test
try:
logger.info("Starting Invalid IPv6 Ping test...")
test_query = construct_test("ping", location, ipv6_cidr)
hg_response = requests.post(
test_endpoint, headers=test_headers, data=test_query
test_query = {"type": "ping", "location": location, "target": ipv6_cidr}
hg_response = await http_client.post(
test_endpoint, headers=test_headers, json=test_query
)
if not hg_response.status_code in range(400, 500):
if hg_response.status_code not in range(400, 500):
logger.error(hg_response.text)
raise RuntimeError("Invalid IPv6 Ping test failed")
except:
@ -165,11 +175,15 @@ def ci_hyperglass_test(
# Blacklist Test
try:
logger.info("Starting Blacklist test...")
test_query = construct_test("bgp_route", location, test_blacklist)
hg_response = requests.post(
test_endpoint, headers=test_headers, data=test_query
test_query = {
"type": "bgp_route",
"location": location,
"target": test_blacklist,
}
hg_response = await http_client.post(
test_endpoint, headers=test_headers, json=test_query
)
if not hg_response.status_code in range(400, 500):
if hg_response.status_code not in range(400, 500):
logger.error(hg_response.text)
raise RuntimeError("Blacklist test failed")
except:
@ -178,6 +192,8 @@ def ci_hyperglass_test(
if __name__ == "__main__":
ci_hyperglass_test(
"pop2", "1.1.1.0/24", "2606:4700:4700::/48", "pop1", "100.64.0.1"
loop.run_until_complete(
ci_hyperglass_test(
"pop2", "1.1.1.0/24", "2606:4700:4700::/48", "pop1", "100.64.0.1"
)
)