From b7747cf1df7a622b75ac90c43a08fbcc611bd1ff Mon Sep 17 00:00:00 2001 From: Matt Love Date: Mon, 15 Jul 2019 02:30:42 -0700 Subject: [PATCH] :zap: Async all the things MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Flask → Sanic, Requests → HTTP3, Add SSHTunnel for SSH Proxying, Remove Gunicorn dependency --- .gitignore | 1 + hyperglass/command/construct.py | 68 ++--- hyperglass/command/execute.py | 371 +++++++++++--------------- hyperglass/command/validate.py | 11 +- hyperglass/configuration/models.py | 3 +- hyperglass/constants.py | 22 +- hyperglass/exceptions.py | 22 ++ hyperglass/gunicorn_config.py.example | 71 ----- hyperglass/hyperglass.py | 160 ++++++----- hyperglass/hyperglass.service.example | 2 +- hyperglass/render/__init__.py | 4 +- hyperglass/static/js/hyperglass.js | 306 ++++++++++----------- hyperglass/web.py | 50 ++++ hyperglass/wsgi.py | 17 -- requirements.txt | 14 +- 15 files changed, 535 insertions(+), 587 deletions(-) delete mode 100644 hyperglass/gunicorn_config.py.example create mode 100644 hyperglass/web.py delete mode 100644 hyperglass/wsgi.py diff --git a/.gitignore b/.gitignore index 79b05d4..fd75eee 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ test.py .DS_Store +.idea *.sass-cache # # Github Default from https://github.com/martinohanlon/flightlight/issues/1 diff --git a/hyperglass/command/construct.py b/hyperglass/command/construct.py index e37adb6..aa9aa02 100644 --- a/hyperglass/command/construct.py +++ b/hyperglass/command/construct.py @@ -22,8 +22,9 @@ class Construct: input parameters. """ - def __init__(self, device): + def __init__(self, device, transport): self.device = device + self.transport = transport def get_src(self, ver): """ @@ -48,15 +49,17 @@ class Construct: cmd_path = f"{nos}.{afi}.{query_type}" return operator.attrgetter(cmd_path)(commands) - def ping(self, transport, target): + def ping(self, target): """Constructs ping query parameters from pre-validated input""" query_type = "ping" - logger.debug(f"Constructing {query_type} query for {target} via {transport}...") + logger.debug( + f"Constructing {query_type} query for {target} via {self.transport}..." + ) query = None ip_version = ipaddress.ip_network(target).version afi = f"ipv{ip_version}" source = self.get_src(ip_version) - if transport == "rest": + if self.transport == "rest": query = json.dumps( { "query_type": query_type, @@ -65,24 +68,25 @@ class Construct: "target": target, } ) - elif transport == "scrape": + elif self.transport == "scrape": conf_command = self.device_commands(self.device.nos, afi, query_type) - fmt_command = conf_command.format(target=target, source=source) - query = (self.device.address.exploded, self.device.nos, fmt_command) + query = conf_command.format(target=target, source=source) logger.debug(f"Constructed query: {query}") return query - def traceroute(self, transport, target): + def traceroute(self, target): """ Constructs traceroute query parameters from pre-validated input. """ query_type = "traceroute" - logger.debug(f"Constructing {query_type} query for {target} via {transport}...") + logger.debug( + f"Constructing {query_type} query for {target} via {self.transport}..." + ) query = None ip_version = ipaddress.ip_network(target).version afi = f"ipv{ip_version}" source = self.get_src(ip_version) - if transport == "rest": + if self.transport == "rest": query = json.dumps( { "query_type": query_type, @@ -92,62 +96,64 @@ class Construct: } ) - elif transport == "scrape": + elif self.transport == "scrape": conf_command = self.device_commands(self.device.nos, afi, query_type) - fmt_command = conf_command.format(target=target, source=source) - query = (self.device.address.exploded, self.device.nos, fmt_command) + query = conf_command.format(target=target, source=source) logger.debug(f"Constructed query: {query}") return query - def bgp_route(self, transport, target): + def bgp_route(self, target): """ Constructs bgp_route query parameters from pre-validated input. """ query_type = "bgp_route" - logger.debug(f"Constructing {query_type} query for {target} via {transport}...") + logger.debug( + f"Constructing {query_type} query for {target} via {self.transport}..." + ) query = None ip_version = ipaddress.ip_network(target).version afi = f"ipv{ip_version}" - if transport == "rest": + if self.transport == "rest": query = json.dumps({"query_type": query_type, "afi": afi, "target": target}) - if transport == "scrape": + elif self.transport == "scrape": conf_command = self.device_commands(self.device.nos, afi, query_type) - fmt_command = conf_command.format(target=target) - query = (self.device.address.exploded, self.device.nos, fmt_command) + query = conf_command.format(target=target) logger.debug(f"Constructed query: {query}") return query - def bgp_community(self, transport, target): + def bgp_community(self, target): """ Constructs bgp_community query parameters from pre-validated input. """ query_type = "bgp_community" - logger.debug(f"Constructing {query_type} query for {target} via {transport}...") + logger.debug( + f"Constructing {query_type} query for {target} via {self.transport}..." + ) afi = "dual" query = None - if transport == "rest": + if self.transport == "rest": query = json.dumps({"query_type": query_type, "afi": afi, "target": target}) - if transport == "scrape": + elif self.transport == "scrape": conf_command = self.device_commands(self.device.nos, afi, query_type) - fmt_command = conf_command.format(target=target) - query = (self.device.address.exploded, self.device.nos, fmt_command) + query = conf_command.format(target=target) logger.debug(f"Constructed query: {query}") return query - def bgp_aspath(self, transport, target): + def bgp_aspath(self, target): """ Constructs bgp_aspath query parameters from pre-validated input. """ query_type = "bgp_aspath" - logger.debug(f"Constructing {query_type} query for {target} via {transport}...") + logger.debug( + f"Constructing {query_type} query for {target} via {self.transport}..." + ) afi = "dual" query = None - if transport == "rest": + if self.transport == "rest": query = json.dumps({"query_type": query_type, "afi": afi, "target": target}) - if transport == "scrape": + elif self.transport == "scrape": conf_command = self.device_commands(self.device.nos, afi, query_type) - fmt_command = conf_command.format(target=target) - query = (self.device.address.exploded, self.device.nos, fmt_command) + query = conf_command.format(target=target) logger.debug(f"Constructed query: {query}") return query diff --git a/hyperglass/command/execute.py b/hyperglass/command/execute.py index 9ba04ae..8f3d032 100644 --- a/hyperglass/command/execute.py +++ b/hyperglass/command/execute.py @@ -4,21 +4,15 @@ returns errors if input is invalid. Passes validated parameters to construct.py, which is used to build & run the Netmiko connectoins or hyperglass-frr API calls, returns the output back to the front end. """ -# Standard Library Imports -import json -import time -import asyncio - # Third Party Imports import http3 -import http3.exceptions +import sshtunnel from logzero import logger from netmiko import ConnectHandler from netmiko import NetMikoAuthenticationException from netmiko import NetmikoAuthError from netmiko import NetmikoTimeoutError from netmiko import NetMikoTimeoutException -from netmiko import redispatch # Project Imports from hyperglass.command.construct import Construct @@ -30,47 +24,144 @@ from hyperglass.configuration import params from hyperglass.configuration import proxies from hyperglass.constants import Supported from hyperglass.constants import code +from hyperglass.constants import protocol_map +from hyperglass.exceptions import CantConnect -class Rest: - """Executes connections to REST API devices""" +class Connect: + """ + Parent class for all connection types: - def __init__(self, transport, device, query_type, target): - self.transport = transport - self.device = device + scrape() connects to devices via SSH for "screen scraping" + + rest() connects to devices via HTTP for RESTful API communication + """ + + def __init__(self, device_config, query_type, target, transport): + self.device_config = device_config self.query_type = query_type self.target = target - self.cred = getattr(credentials, self.device.credential) - self.query = getattr(Construct(self.device), self.query_type)( - self.transport, self.target - ) - - async def frr(self): - """Sends HTTP POST to router running the hyperglass-frr API""" - logger.debug(f"FRR host params:\n{self.device}") - logger.debug(f"Query parameters: {self.query}") + self.transport = transport + self.cred = getattr(credentials, device_config.credential) + self.query = getattr(Construct(device_config, transport), query_type)(target) + async def scrape(self): + """ + Connects to the router via Netmiko library, return the command + output. If an SSH proxy is enabled, creates an SSH tunnel via + the sshtunnel library, and netmiko uses the local binding to + connect to the remote device. + """ + response = None try: - headers = { - "Content-Type": "application/json", - "X-API-Key": self.cred.password.get_secret_value(), - } - frr_endpoint = ( - f"http://{self.device.address.exploded}:{self.device.port}/frr" - ) - - logger.debug(f"HTTP Headers: {headers}") - logger.debug(f"FRR endpoint: {frr_endpoint}") + if self.device_config.proxy: + device_proxy = getattr(proxies, self.device_config.proxy) + logger.debug( + f"Proxy: {device_proxy.address.compressed}:{device_proxy.port}" + ) + logger.debug( + "Connecting to {dev} via sshtunnel library...".format( + dev=self.device_config.proxy + ) + ) + with sshtunnel.open_tunnel( + device_proxy.address.compressed, + device_proxy.port, + ssh_username=device_proxy.username, + ssh_password=device_proxy.password.get_secret_value(), + remote_bind_address=( + self.device_config.address.compressed, + self.device_config.port, + ), + local_bind_address=("localhost", 0), + ) as tunnel: + logger.debug(f"Established tunnel with {self.device_config.proxy}") + scrape_host = { + "host": "localhost", + "port": tunnel.local_bind_port, + "device_type": self.device_config.nos, + "username": self.cred.username, + "password": self.cred.password.get_secret_value(), + "fast_cli": True, + } + logger.debug(f"Local binding: localhost:{tunnel.local_bind_port}") + try: + logger.debug( + "Connecting to {dev} via Netmiko library...".format( + dev=self.device_config.location + ) + ) + nm_connect_direct = ConnectHandler(**scrape_host) + response = nm_connect_direct.send_command(self.query) + except ( + NetMikoAuthenticationException, + NetMikoTimeoutException, + NetmikoAuthError, + NetmikoTimeoutError, + sshtunnel.BaseSSHTunnelForwarderError, + ) as scrape_error: + raise CantConnect(scrape_error) + else: + scrape_host = { + "host": self.device_config.address, + "device_type": self.device_config.nos, + "username": self.cred.username, + "password": self.cred.password.get_secret_value(), + "fast_cli": True, + } + try: + logger.debug( + "Connecting to {dev} via Netmiko library...".format( + dev=self.device_config.location + ) + ) + nm_connect_direct = ConnectHandler(**scrape_host) + response = nm_connect_direct.send_command(self.query) + except ( + NetMikoAuthenticationException, + NetMikoTimeoutException, + NetmikoAuthError, + NetmikoTimeoutError, + sshtunnel.BaseSSHTunnelForwarderError, + ) as scrape_error: + raise CantConnect(scrape_error) + if not response: + raise CantConnect("No response") + status = code.valid + logger.debug(f"Output for query: {self.query}:\n{response}") + except CantConnect as scrape_error: + logger.error(scrape_error) + response = params.messages.general + status = code.invalid + return response, status + async def rest(self): + """Sends HTTP POST to router running a hyperglass API agent""" + logger.debug(f"Query parameters: {self.query}") + uri = Supported.map_rest(self.device_config.nos) + headers = { + "Content-Type": "application/json", + "X-API-Key": self.cred.password.get_secret_value(), + } + http_protocol = protocol_map.get(self.device_config.port, "http") + endpoint = "{protocol}://{addr}:{port}/{uri}".format( + protocol=http_protocol, + addr=self.device_config.address.exploded, + port=self.device_config.port, + uri=uri, + ) + logger.debug(f"HTTP Headers: {headers}") + logger.debug(f"URL endpoint: {endpoint}") + try: http_client = http3.AsyncClient() - frr_response = await http_client.post( - frr_endpoint, headers=headers, json=self.query, timeout=7 + raw_response = await http_client.post( + endpoint, headers=headers, json=self.query, timeout=7 ) - response = frr_response.text - status = frr_response.status_code + response = raw_response.text + status = raw_response.status_code - logger.debug(f"FRR status code: {status}") - logger.debug(f"FRR response text:\n{response}") + logger.debug(f"HTTP status code: {status}") + logger.debug(f"Output for query {self.query}:\n{response}") except ( http3.exceptions.ConnectTimeout, http3.exceptions.CookieConflict, @@ -87,167 +178,21 @@ class Rest: http3.exceptions.Timeout, http3.exceptions.TooManyRedirects, http3.exceptions.WriteTimeout, + OSError, ) as rest_error: - logger.error( - f"Error connecting to device {self.device.location}: {rest_error}" - ) + logger.error(f"Error connecting to device {self.device_config.location}") + logger.error(rest_error) + response = params.messages.general status = code.invalid return response, status - def bird(self): - """Sends HTTP POST to router running the hyperglass-bird API""" - logger.debug(f"BIRD host params:\n{self.device}") - logger.debug(f"Query parameters: {self.query}") - - try: - headers = { - "Content-Type": "application/json", - "X-API-Key": self.cred.password.get_secret_value(), - } - bird_endpoint = ( - f"http://{self.device.address.exploded}:{self.device.port}/bird" - ) - - logger.debug(f"HTTP Headers: {headers}") - logger.debug(f"BIRD endpoint: {bird_endpoint}") - - http_client = http3.AsyncClient() - bird_response = http_client.post( - bird_endpoint, headers=headers, json=self.query, timeout=7 - ) - response = bird_response.text - status = bird_response.status_code - - logger.debug(f"BIRD status code: {status}") - logger.debug(f"BIRD response text:\n{response}") - except ( - http3.exceptions.ConnectTimeout, - http3.exceptions.CookieConflict, - http3.exceptions.DecodingError, - http3.exceptions.InvalidURL, - http3.exceptions.PoolTimeout, - http3.exceptions.ProtocolError, - http3.exceptions.ReadTimeout, - http3.exceptions.RedirectBodyUnavailable, - http3.exceptions.RedirectLoop, - http3.exceptions.ResponseClosed, - http3.exceptions.ResponseNotRead, - http3.exceptions.StreamConsumed, - http3.exceptions.Timeout, - http3.exceptions.TooManyRedirects, - http3.exceptions.WriteTimeout, - ) as rest_error: - logger.error(f"Error connecting to device {self.device}: {rest_error}") - response = params.messages.general - status = code.invalid - return response, status - - -class Netmiko: - """Executes connections to Netmiko devices""" - - def __init__(self, transport, device, query_type, target): - self.device = device - self.target = target - self.cred = getattr(credentials, self.device.credential) - self.location, self.nos, self.command = getattr(Construct(device), query_type)( - transport, target - ) - self.nm_host = { - "host": self.location, - "device_type": self.nos, - "username": self.cred.username, - "password": self.cred.password.get_secret_value(), - "global_delay_factor": 0.5, - } - - def direct(self): - """ - Connects to the router via netmiko library, return the command - output. - """ - logger.debug(f"Connecting to {self.device.location} via Netmiko library...") - - try: - nm_connect_direct = ConnectHandler(**self.nm_host) - response = nm_connect_direct.send_command(self.command) - status = code.valid - logger.debug(f"Response for direct command {self.command}:\n{response}") - except ( - NetMikoAuthenticationException, - NetMikoTimeoutException, - NetmikoAuthError, - NetmikoTimeoutError, - ) as netmiko_exception: - response = params.messages.general - status = code.invalid - logger.error(f"{netmiko_exception}, {status}") - return response, status - - def proxied(self): - """ - Connects to the proxy server via netmiko library, then logs - into the router via SSH. - """ - device_proxy = getattr(proxies, self.device.proxy) - nm_proxy = { - "host": device_proxy.address.exploded, - "username": device_proxy.username, - "password": device_proxy.password.get_secret_value(), - "device_type": device_proxy.nos, - "global_delay_factor": 0.5, - } - nm_connect_proxied = ConnectHandler(**nm_proxy) - nm_ssh_command = device_proxy.ssh_command.format(**self.nm_host) + "\n" - - logger.debug(f"Netmiko proxy {self.device.proxy}") - logger.debug(f"Proxy SSH command: {nm_ssh_command}") - - nm_connect_proxied.write_channel(nm_ssh_command) - time.sleep(1) - proxy_output = nm_connect_proxied.read_channel() - - logger.debug(f"Proxy output:\n{proxy_output}") - - try: - # Accept SSH key warnings - if "Are you sure you want to continue connecting" in proxy_output: - logger.debug("Received OpenSSH key warning") - nm_connect_proxied.write_channel("yes" + "\n") - nm_connect_proxied.write_channel(self.nm_host["password"] + "\n") - # Send password on prompt - elif "assword" in proxy_output: - logger.debug("Received password prompt") - nm_connect_proxied.write_channel(self.nm_host["password"] + "\n") - proxy_output += nm_connect_proxied.read_channel() - # Reclassify netmiko connection as configured device type - logger.debug( - f'Redispatching netmiko with device class {self.nm_host["device_type"]}' - ) - redispatch(nm_connect_proxied, self.nm_host["device_type"]) - response = nm_connect_proxied.send_command(self.command) - status = code.valid - - logger.debug(f"Netmiko proxied response:\n{response}") - except ( - NetMikoAuthenticationException, - NetMikoTimeoutException, - NetmikoAuthError, - NetmikoTimeoutError, - ) as netmiko_exception: - response = params.messages.general - status = code.invalid - - logger.error(f"{netmiko_exception}, {status},Proxy: {self.device.proxy}") - return response, status - class Execute: """ - Ingests user input, runs blacklist check, runs prefix length check - (if enabled), pulls all configuraiton variables for the input - router. + Ingests raw user input, performs validation of target input, pulls + all configuraiton variables for the input router and connects to the + selected device to execute the query. """ def __init__(self, lg_data): @@ -256,29 +201,24 @@ class Execute: self.input_type = self.input_data["type"] self.input_target = self.input_data["target"] - def parse(self, output, nos): + def parse(self, raw_output, nos): """ - Splits BGP output by AFI, returns only IPv4 & IPv6 output for + Splits BGP raw output by AFI, returns only IPv4 & IPv6 output for protocol-agnostic commands (Community & AS_PATH Lookups). """ - logger.debug("Parsing output...") + logger.debug("Parsing raw output...") - parsed = output + parsed = raw_output if self.input_type in ("bgp_community", "bgp_aspath"): + logger.debug(f"Parsing raw output for device type {nos}") if nos in ("cisco_ios",): - logger.debug(f"Parsing output for device type {nos}") - delimiter = "For address family: " - parsed_ipv4 = output.split(delimiter)[1] - parsed_ipv6 = output.split(delimiter)[2] - parsed = delimiter + parsed_ipv4 + delimiter + parsed_ipv6 + parsed_raw = raw_output.split(delimiter)[1:3] + parsed = "\n\n".join([delimiter + afi.rstrip() for afi in parsed_raw]) elif nos in ("cisco_xr",): - logger.debug(f"Parsing output for device type {nos}") - delimiter = "Address Family: " - parsed_ipv4 = output.split(delimiter)[1] - parsed_ipv6 = output.split(delimiter)[2] - parsed = delimiter + parsed_ipv4 + delimiter + parsed_ipv6 + parsed_raw = raw_output.split(delimiter)[1:3] + parsed = "\n\n".join([delimiter + afi.rstrip() for afi in parsed_raw]) return parsed async def response(self): @@ -303,23 +243,16 @@ class Execute: logger.debug(f"Validity: {validity}, Message: {msg}, Status: {status}") + transport = Supported.map_transport(device_config.nos) + connection = Connect( + device_config, self.input_type, self.input_target, transport + ) if Supported.is_rest(device_config.nos): - connection = Rest("rest", device_config, self.input_type, self.input_target) - raw_output, status = await getattr(connection, device_config.nos)() - output = self.parse(raw_output, device_config.nos) + raw_output, status = await connection.rest() elif Supported.is_scrape(device_config.nos): - logger.debug("Initializing Netmiko...") + raw_output, status = await connection.scrape() + output = self.parse(raw_output, device_config.nos) - connection = Netmiko( - "scrape", device_config, self.input_type, self.input_target - ) - if device_config.proxy: - raw_output, status = connection.proxied() - elif not device_config.proxy: - raw_output, status = connection.direct() - output = self.parse(raw_output, device_config.nos) + logger.debug(f"Parsed output for device type {device_config.nos}:\n{output}") - logger.debug( - f"Parsed output for device type {device_config.nos}:\n{output}" - ) return (output, status) diff --git a/hyperglass/command/validate.py b/hyperglass/command/validate.py index 6b7b449..394e009 100644 --- a/hyperglass/command/validate.py +++ b/hyperglass/command/validate.py @@ -86,11 +86,8 @@ def ip_validate(target): try: valid_ip = ipaddress.ip_network(target) if valid_ip.is_reserved or valid_ip.is_unspecified or valid_ip.is_loopback: - validity = False - logger.debug(f"IP {valid_ip} is invalid") - if valid_ip.is_global: - validity = True - logger.debug(f"IP {valid_ip} is valid") + raise ValueError + validity = True except (ipaddress.AddressValueError, ValueError): logger.debug(f"IP {target} is invalid") validity = False @@ -299,8 +296,6 @@ class Validate: validity = True msg = f"{target} matched large community." status = code.valid - if not validity: - logger.error(f"{msg}, {status}") logger.debug(f"{msg}, {status}") return (validity, msg, status) @@ -320,7 +315,5 @@ class Validate: validity = True msg = f"{target} matched AS_PATH regex." status = code.valid - if not validity: - logger.error(f"{msg}, {status}") logger.debug(f"{msg}, {status}") return (validity, msg, status) diff --git a/hyperglass/configuration/models.py b/hyperglass/configuration/models.py index 9e81e38..ab8fa51 100644 --- a/hyperglass/configuration/models.py +++ b/hyperglass/configuration/models.py @@ -174,6 +174,7 @@ class Proxy(BaseSettings): """Model for per-proxy config in devices.yaml""" address: Union[IPvAnyAddress, str] + port: int = 22 username: str password: SecretStr nos: str @@ -348,7 +349,7 @@ class Messages(BaseSettings): "{d} requires IPv6 BGP lookups" "to be in CIDR notation." ) invalid_ip: str = "{i} is not a valid IP address." - invalid_dual: str = "invalid_dual {i} is an invalid {qt}." + invalid_dual: str = "{i} is an invalid {qt}." general: str = "An error occurred." directed_cidr: str = "{q} queries can not be in CIDR format." diff --git a/hyperglass/constants.py b/hyperglass/constants.py index 59ba7e0..652ae07 100644 --- a/hyperglass/constants.py +++ b/hyperglass/constants.py @@ -2,6 +2,8 @@ Global Constants for hyperglass """ +protocol_map = {80: "http", 8080: "http", 443: "https", 8443: "https"} + class Status: """ @@ -9,8 +11,6 @@ class Status: hyperglass. """ - # pylint: disable=too-few-public-methods - codes_dict = { 200: ("valid", "Valid Query"), 405: ("not_allowed", "Query Not Allowed"), @@ -163,3 +163,21 @@ class Supported: query_type tuple. """ return bool(query_type in Supported.query_types) + + @staticmethod + def map_transport(nos): + """ + Returns "scrape" if input nos is in Supported.scrape tuple, or + "rest" if input nos is in Supported.rest tuple. + """ + transport = None + if nos in Supported.scrape: + transport = "scrape" + elif nos in Supported.rest: + transport = "rest" + return transport + + @staticmethod + def map_rest(nos): + uri_map = {"frr": "frr", "bird": "bird"} + return uri_map.get(nos) diff --git a/hyperglass/exceptions.py b/hyperglass/exceptions.py index 49bcee1..46d40f8 100644 --- a/hyperglass/exceptions.py +++ b/hyperglass/exceptions.py @@ -24,6 +24,28 @@ class ConfigError(HyperglassError): return self.message +class CantConnect(HyperglassError): + def __init__(self, message): + super().__init__(message) + self.message = message + + def __str__(self): + return self.message + + +class ParseError(HyperglassError): + """ + Raised when an ouput parser encounters an error. + """ + + def __init__(self, message): + super().__init__(message) + self.message = message + + def __str__(self): + return self.message + + class UnsupportedDevice(HyperglassError): """ Raised when an input NOS is not in the supported NOS list. diff --git a/hyperglass/gunicorn_config.py.example b/hyperglass/gunicorn_config.py.example deleted file mode 100644 index 14571d4..0000000 --- a/hyperglass/gunicorn_config.py.example +++ /dev/null @@ -1,71 +0,0 @@ -""" -https://github.com/checktheroads/hyperglass -Guncorn configuration -""" -import os -import shutil -import multiprocessing -from logzero import logger - -command = "/usr/local/bin/gunicorn" -pythonpath = "/opt/hyperglass" -bind = "[::1]:8001" -preload = True -workers = multiprocessing.cpu_count() * 2 -user = "www-data" -timeout = 60 -keepalive = 10 - -# Prometheus Multiprocessing directory, set as environment variable -prometheus_multiproc_dir = "/tmp/hyperglass_prometheus" - - -def on_starting(server): # pylint: disable=unused-argument - """Pre-startup Gunicorn Tasks""" - # Renders Jinja2 -> Sass, compiles Sass -> CSS prior to worker load - try: - import hyperglass.render - - hyperglass.render.css() - print(1) - except ImportError as error_exception: - logger.error(f"Exception occurred:\n{error_exception}") - # Verify Redis is running - try: - import hyperglass.configuration - import redis - - config = hyperglass.configuration.params() - - redis_config = { - "host": config["general"]["redis_host"], - "port": config["general"]["redis_port"], - "charset": "utf-8", - "decode_responses": True, - "db": config["features"]["cache"]["redis_id"], - } - r_cache = redis.Redis(**redis_config) - if r_cache.set("testkey", "testvalue", ex=1): - logger.debug("Redis is working properly") - except (redis.exceptions.ConnectionError): - logger.error("Redis is not running") - raise EnvironmentError("Redis is not running") - # Prometheus multiprocessing directory - if os.path.exists(prometheus_multiproc_dir): - shutil.rmtree(prometheus_multiproc_dir) - else: - os.mkdir(prometheus_multiproc_dir) - os.environ["prometheus_multiproc_dir"] = prometheus_multiproc_dir - - -def worker_exit(server, worker): # pylint: disable=unused-argument - """Prometheus multiprocessing WSGI support""" - from prometheus_client import multiprocess - - multiprocess.mark_process_dead(worker.pid) - - -def on_exit(server): - """Pre-shutdown Gunicorn Tasks""" - if os.path.exists(prometheus_multiproc_dir): - shutil.rmtree(prometheus_multiproc_dir) diff --git a/hyperglass/hyperglass.py b/hyperglass/hyperglass.py index 28c0ddf..38eda80 100644 --- a/hyperglass/hyperglass.py +++ b/hyperglass/hyperglass.py @@ -1,22 +1,24 @@ -""" -Main Hyperglass Front End -""" +"""Hyperglass Front End""" + # Standard Library Imports -import json +import time from ast import literal_eval +from pathlib import Path # Third Party Imports -import redis -from flask import Flask -from flask import Response -from flask import request -from flask_limiter import Limiter -from flask_limiter.util import get_ipaddr +import aredis from logzero import logger from prometheus_client import CollectorRegistry from prometheus_client import Counter from prometheus_client import generate_latest from prometheus_client import multiprocess +from sanic import Sanic +from sanic import response +from sanic.exceptions import NotFound +from sanic.exceptions import ServerError +from sanic_limiter import Limiter +from sanic_limiter import RateLimitExceeded +from sanic_limiter import get_remote_address # Project Imports from hyperglass import render @@ -34,17 +36,19 @@ logger.debug(f"Configuration Parameters:\n {params.dict()}") redis_config = { "host": params.general.redis_host, "port": params.general.redis_port, - "charset": "utf-8", "decode_responses": True, } -# Main Flask definition -app = Flask(__name__, static_url_path="/static") +# Main Sanic app definition +static_dir = Path(__file__).parent / "static" +logger.debug(f"Static Files: {static_dir}") +app = Sanic(__name__) +app.static("/static", str(static_dir)) # Redis Cache Config -r_cache = redis.Redis(db=params.features.rate_limit.redis_id, **redis_config) +r_cache = aredis.StrictRedis(db=params.features.cache.redis_id, **redis_config) -# Flask-Limiter Config +# Sanic-Limiter Config query_rate = params.features.rate_limit.query.rate query_period = params.features.rate_limit.query.period site_rate = params.features.rate_limit.site.rate @@ -55,14 +59,20 @@ rate_limit_site = f"{site_rate} per {site_period}" logger.debug(f"Query rate limit: {rate_limit_query}") logger.debug(f"Site rate limit: {rate_limit_site}") -# Redis Config for Flask-Limiter storage +# Redis Config for Sanic-Limiter storage r_limiter_db = params.features.rate_limit.redis_id -r_limiter_url = f'redis://{redis_config["host"]}:{redis_config["port"]}/{r_limiter_db}' -r_limiter = redis.Redis(**redis_config, db=params.features.rate_limit.redis_id) -# Adds Flask config variable for Flask-Limiter +r_limiter_url = "redis://{host}:{port}/{db}".format( + host=params.general.redis_host, + port=params.general.redis_port, + db=params.features.rate_limit.redis_id, +) +r_limiter = aredis.StrictRedis(db=params.features.rate_limit.redis_id, **redis_config) + +# Adds Sanic config variable for Sanic-Limiter app.config.update(RATELIMIT_STORAGE_URL=r_limiter_url) -# Initializes Flask-Limiter -limiter = Limiter(app, key_func=get_ipaddr, default_limits=[rate_limit_site]) + +# Initializes Sanic-Limiter +limiter = Limiter(app, key_func=get_remote_address, global_limits=[rate_limit_site]) # Prometheus Config count_data = Counter( @@ -85,49 +95,50 @@ count_notfound = Counter( @app.route("/metrics") -def metrics(): +@limiter.exempt +async def metrics(request): """Prometheus metrics""" - content_type_latest = str("text/plain; version=0.0.4; charset=utf-8") registry = CollectorRegistry() multiprocess.MultiProcessCollector(registry) - return Response(generate_latest(registry), mimetype=content_type_latest) + latest = generate_latest(registry) + return response.text(latest) -@app.errorhandler(404) -def handle_404(e): +@app.exception(NotFound) +async def handle_404(request, exception): """Renders full error page for invalid URI""" html = render.html("404") path = request.path - client_addr = get_ipaddr() - count_notfound.labels(e, path, client_addr).inc() - logger.error(f"Error: {e}, Path: {path}, Source: {client_addr}") - return html, 404 + client_addr = get_remote_address(request) + count_notfound.labels(exception, path, client_addr).inc() + logger.error(f"Error: {exception}, Path: {path}, Source: {client_addr}") + return response.html(html, status=404) -@app.errorhandler(429) -def handle_429(e): +@app.exception(RateLimitExceeded) +async def handle_429(request, exception): """Renders full error page for too many site queries""" html = render.html("429") - client_addr = get_ipaddr() - count_ratelimit.labels(e, client_addr).inc() - logger.error(f"Error: {e}, Source: {client_addr}") - return html, 429 + client_addr = get_remote_address(request) + count_ratelimit.labels(exception, client_addr).inc() + logger.error(f"Error: {exception}, Source: {client_addr}") + return response.html(html, status=429) -@app.errorhandler(500) -def handle_500(e): +@app.exception(ServerError) +async def handle_500(request, exception): """General Error Page""" - client_addr = get_ipaddr() - count_errors.labels(500, e, client_addr, None, None, None).inc() - logger.error(f"Error: {e}, Source: {client_addr}") + client_addr = get_remote_address(request) + count_errors.labels(500, exception, client_addr, None, None, None).inc() + logger.error(f"Error: {exception}, Source: {client_addr}") html = render.html("500") - return html, 500 + return response.html(html, status=500) -def clear_cache(): +async def clear_cache(): """Function to clear the Redis cache""" try: - r_cache.flushdb() + await r_cache.flushdb() except Exception as error_exception: logger.error(f"Error clearing cache: {error_exception}") raise HyperglassError(f"Error clearing cache: {error_exception}") @@ -135,60 +146,59 @@ def clear_cache(): @app.route("/", methods=["GET"]) @limiter.limit(rate_limit_site, error_message="Site") -def site(): +async def site(request): """Main front-end web application""" - return render.html("index") + return response.html(render.html("index")) @app.route("/test", methods=["GET"]) -def test_route(): +async def test_route(request): """Test route for various tests""" html = render.html("500") - return html, 500 + return response.html(html, status=500), 500 @app.route("/locations/", methods=["GET"]) -def get_locations(asn): +async def get_locations(request, asn): """ - Flask GET route provides a JSON list of all locations for the - selected network/ASN. + GET route provides a JSON list of all locations for the selected + network/ASN. """ - locations_list_json = json.dumps(devices.locations[asn]) - logger.debug(f"Locations list:{devices.locations[asn]}") - return locations_list_json + return response.json(devices.locations[asn]) @app.route("/lg", methods=["POST"]) -# Invoke Flask-Limiter with configured rate limit @limiter.limit(rate_limit_query, error_message="Query") -def hyperglass_main(): +async def hyperglass_main(request): """ Main backend application initiator. Ingests Ajax POST data from form submit, passes it to the backend application to perform the filtering/lookups. """ # Get JSON data from Ajax POST - lg_data = request.get_json() + lg_data = request.json logger.debug(f"Unvalidated input: {lg_data}") # Return error if no target is specified if not lg_data["target"]: logger.debug("No input specified") - return Response(params.messages.no_input, code.invalid) + return response.html(params.messages.no_input, status=code.invalid) # Return error if no location is selected if lg_data["location"] not in devices.hostnames: logger.debug("No selection specified") - return Response(params.messages.no_location, code.invalid) + return response.html(params.messages.no_location, status=code.invalid) # Return error if no query type is selected if not Supported.is_supported_query(lg_data["type"]): logger.debug("No query specified") - return Response(params.messages.no_query_type, code.invalid) + return response.html(params.messages.no_query_type, status=code.invalid) # Get client IP address for Prometheus logging & rate limiting - client_addr = get_ipaddr() + client_addr = get_remote_address(request) # Increment Prometheus counter count_data.labels( client_addr, lg_data["type"], lg_data["location"], lg_data["target"] ).inc() + logger.debug(f"Client Address: {client_addr}") + # Stringify the form response containing serialized JSON for the # request, use as key for k/v cache store so each command output # value is unique @@ -197,24 +207,26 @@ def hyperglass_main(): cache_timeout = params.features.cache.timeout logger.debug(f"Cache Timeout: {cache_timeout}") # Check if cached entry exists - if not r_cache.get(cache_key): + if not await r_cache.get(cache_key): logger.debug(f"Sending query {cache_key} to execute module...") # Pass request to execution module - cache_value = Execute(lg_data).response() - - logger.debug("Validated Response...") - logger.debug(f"Status: {cache_value[1]}") - logger.debug(f"Output:\n {cache_value[0]}") + starttime = time.time() + cache_value = await Execute(lg_data).response() + endtime = time.time() + elapsedtime = round(endtime - starttime, 4) + logger.debug( + f"Execution for query {cache_key} took {elapsedtime} seconds to run." + ) # Create a cache entry - r_cache.set(cache_key, str(cache_value)) - r_cache.expire(cache_key, cache_timeout) + await r_cache.set(cache_key, str(cache_value)) + await r_cache.expire(cache_key, cache_timeout) logger.debug(f"Added cache entry for query: {cache_key}") - logger.error(f"Unable to add output to cache: {cache_key}") # If it does, return the cached entry - cache_response = r_cache.get(cache_key) - response = literal_eval(cache_response) - response_output, response_status = response + cache_response = await r_cache.get(cache_key) + # Serialize stringified tuple response from cache + serialized_response = literal_eval(cache_response) + response_output, response_status = serialized_response logger.debug(f"Cache match for: {cache_key}, returning cached entry") logger.debug(f"Cache Output: {response_output}") @@ -229,4 +241,4 @@ def hyperglass_main(): lg_data["location"], lg_data["target"], ).inc() - return Response(*response) + return response.html(response_output, status=response_status) diff --git a/hyperglass/hyperglass.service.example b/hyperglass/hyperglass.service.example index 65018e6..6097340 100644 --- a/hyperglass/hyperglass.service.example +++ b/hyperglass/hyperglass.service.example @@ -7,7 +7,7 @@ Requires=redis-server.service User=www-data Group=www-data WorkingDirectory=/opt/hyperglass -ExecStart=/usr/local/bin/gunicorn -c /opt/hyperglass/hyperglass/gunicorn_config.py hyperglass.wsgi +ExecStart=/usr/local/bin/python3 -m sanic hyperglass.web.app [Install] WantedBy=multi-user.target diff --git a/hyperglass/render/__init__.py b/hyperglass/render/__init__.py index 7ad0f32..f5c5a55 100644 --- a/hyperglass/render/__init__.py +++ b/hyperglass/render/__init__.py @@ -81,7 +81,7 @@ link: {{ general.org_name }} BGP Communities Performs BGP table lookup based on [Extended](https://tools.ietf.org/html/rfc4360) \ or [Large](https://tools.ietf.org/html/rfc8195) community value. -{{ info["link"] }} +{{ info["link"] | safe }} """, "bgp_aspath": """ --- @@ -90,7 +90,7 @@ link: Supported BGP AS Path Expressions --- Performs BGP table lookup based on `AS_PATH` regular expression. -{{ info["link"] }} +{{ info["link"] | safe }} """, "ping": """ --- diff --git a/hyperglass/static/js/hyperglass.js b/hyperglass/static/js/hyperglass.js index 206b837..6d2181f 100644 --- a/hyperglass/static/js/hyperglass.js +++ b/hyperglass/static/js/hyperglass.js @@ -9,7 +9,7 @@ clearPage(); // Bulma Toggable Dropdown - help text $('#help-dropdown').click( - function(event) { + function (event) { event.stopPropagation(); $(this).toggleClass('is-active'); } @@ -18,21 +18,21 @@ $('#help-dropdown').click( // ClipboardJS Elements var btn_copy = document.getElementById('btn-copy'); var clipboard = new ClipboardJS(btn_copy); -clipboard.on('success', function(e) { +clipboard.on('success', function (e) { console.log(e); $('#btn-copy').addClass('is-success').addClass('is-outlined'); $('#copy-icon').removeClass('icofont-ui-copy').addClass('icofont-check'); - setTimeout(function() { + setTimeout(function () { $('#btn-copy').removeClass('is-success').removeClass('is-outlined'); $('#copy-icon').removeClass('icofont-check').addClass('icofont-ui-copy'); }, 1000); }); -clipboard.on('error', function(e) { +clipboard.on('error', function (e) { console.log(e); }); $('.modal-background, .modal-close').click( - function(event) { + function (event) { event.stopPropagation(); $('.modal').removeClass("is-active"); } @@ -40,21 +40,21 @@ $('.modal-background, .modal-close').click( // Adjust behavior of help text dropdown based on device screen size $('#help-dropdown-button').click( - function(event) { + function (event) { if (window.innerWidth < 1024) { - $('#help-dropdown').removeClass('is-right'); - $('.lg-help').addClass('lg-help-mobile').removeClass('lg-help'); - } + $('#help-dropdown').removeClass('is-right'); + $('.lg-help').addClass('lg-help-mobile').removeClass('lg-help'); + } } ); function adjustDropdowns() { - var actual_width = window.innerWidth; - if (actual_width < 1024) { - $('#lg-netlocdropdown').removeClass('has-addons').removeClass('has-addons-centered').addClass('is-grouped').addClass('is-grouped-centered').addClass('is-grouped-multiline'); - $('#network').css('width', actual_width * 0.85); - $('#location').css('width', actual_width * 0.85); - } + var actual_width = window.innerWidth; + if (actual_width < 1024) { + $('#lg-netlocdropdown').removeClass('has-addons').removeClass('has-addons-centered').addClass('is-grouped').addClass('is-grouped-centered').addClass('is-grouped-multiline'); + $('#network').css('width', actual_width * 0.85); + $('#location').css('width', actual_width * 0.85); + } } function clearErrors() { @@ -81,179 +81,179 @@ function clearPage() { } function prepResults() { - progress.show(); - resultsbox.show(); + progress.show(); + resultsbox.show(); } -$(document).ready(function() { +$(document).ready(function () { var defaultasn = $("#network").val(); $.ajax({ - url: '/locations/'+defaultasn, + url: '/locations/' + defaultasn, context: document.body, type: 'get', - success: function(data) { - selectedRouters = JSON.parse(data); + success: function (data) { + selectedRouters = data; console.log(selectedRouters); updateRouters(selectedRouters); }, - error: function(err) { + error: function (err) { console.log(err); } }); }); -$('#network').on('change', (function(event) { - var asn = $("select[id=network").val(); - $('#location').children(":not(#text_location)").remove(); - $.ajax({ - url: '/locations/'+asn, - type: 'get', - success: function(data) { - clearPage(); - updateRouters(JSON.parse(data)); - }, - error: function(err) { - console.log(err); - } - }); +$('#network').on('change', (function (event) { + var asn = $("select[id=network").val(); + $('#location').children(":not(#text_location)").remove(); + $.ajax({ + url: '/locations/' + asn, + type: 'get', + success: function (data) { + clearPage(); + updateRouters(JSON.parse(data)); + }, + error: function (err) { + console.log(err); + } + }); })); function updateRouters(locations) { - locations.forEach(function(r) { + locations.forEach(function (r) { $('#location').append($("