mirror of
https://github.com/thatmattlove/hyperglass.git
synced 2026-01-17 08:48:05 +00:00
remove legacy parsers
This commit is contained in:
parent
2c7456c317
commit
e494db5337
5 changed files with 0 additions and 258 deletions
|
|
@ -1 +0,0 @@
|
||||||
"""Parsing utilities for command output."""
|
|
||||||
|
|
@ -1,57 +0,0 @@
|
||||||
"""Parse Arista JSON Response to Structured Data."""
|
|
||||||
|
|
||||||
# Standard Library
|
|
||||||
import json
|
|
||||||
from typing import Dict, Sequence
|
|
||||||
|
|
||||||
# Third Party
|
|
||||||
from pydantic import ValidationError
|
|
||||||
|
|
||||||
# Project
|
|
||||||
from hyperglass.log import log
|
|
||||||
from hyperglass.exceptions.private import ParsingError
|
|
||||||
from hyperglass.models.parsing.arista_eos import AristaRoute
|
|
||||||
|
|
||||||
|
|
||||||
def parse_arista(output: Sequence[str]) -> Dict: # noqa: C901
|
|
||||||
"""Parse a Arista BGP JSON response."""
|
|
||||||
data = {}
|
|
||||||
|
|
||||||
for i, response in enumerate(output):
|
|
||||||
|
|
||||||
try:
|
|
||||||
data: Dict = json.loads(response)
|
|
||||||
|
|
||||||
log.debug("Pre-parsed data: {}", data)
|
|
||||||
|
|
||||||
vrf = list(data["vrfs"].keys())[0]
|
|
||||||
routes = data["vrfs"][vrf]
|
|
||||||
|
|
||||||
log.debug("Pre-validated data: {}", routes)
|
|
||||||
|
|
||||||
validated = AristaRoute(**routes)
|
|
||||||
serialized = validated.serialize().export_dict()
|
|
||||||
|
|
||||||
if i == 0:
|
|
||||||
data.update(serialized)
|
|
||||||
else:
|
|
||||||
data["routes"].extend(serialized["routes"])
|
|
||||||
|
|
||||||
except json.JSONDecodeError as err:
|
|
||||||
log.critical("Error decoding JSON: {}", str(err))
|
|
||||||
raise ParsingError("Error parsing response data") from err
|
|
||||||
|
|
||||||
except KeyError as err:
|
|
||||||
log.critical("'{}' was not found in the response", str(err))
|
|
||||||
raise ParsingError("Error parsing response data") from err
|
|
||||||
|
|
||||||
except IndexError as err:
|
|
||||||
log.critical(str(err))
|
|
||||||
raise ParsingError("Error parsing response data") from err
|
|
||||||
|
|
||||||
except ValidationError as err:
|
|
||||||
log.critical(str(err))
|
|
||||||
raise ParsingError(err.errors()) from err
|
|
||||||
|
|
||||||
log.debug("Serialized: {}", data)
|
|
||||||
return data
|
|
||||||
|
|
@ -1,103 +0,0 @@
|
||||||
"""Parse Juniper XML Response to Structured Data."""
|
|
||||||
|
|
||||||
# Standard Library
|
|
||||||
import re
|
|
||||||
from typing import Dict, List, Sequence, Generator
|
|
||||||
|
|
||||||
# Third Party
|
|
||||||
import xmltodict # type:ignore
|
|
||||||
from pydantic import ValidationError
|
|
||||||
|
|
||||||
# Project
|
|
||||||
from hyperglass.log import log
|
|
||||||
from hyperglass.exceptions.private import ParsingError
|
|
||||||
from hyperglass.models.parsing.juniper import JuniperRoute
|
|
||||||
|
|
||||||
REMOVE_PATTERNS = (
|
|
||||||
# The XML response can a CLI banner appended to the end of the XML
|
|
||||||
# string. For example:
|
|
||||||
# ```
|
|
||||||
# <rpc-reply>
|
|
||||||
# ...
|
|
||||||
# <cli>
|
|
||||||
# <banner>{master}</banner>
|
|
||||||
# </cli>
|
|
||||||
# </rpc-reply>
|
|
||||||
#
|
|
||||||
# {master} noqa: E800
|
|
||||||
# ```
|
|
||||||
#
|
|
||||||
# This pattern will remove anything inside braces, including the braces.
|
|
||||||
r"\{.+\}",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def clean_xml_output(output: str) -> str:
|
|
||||||
"""Remove Juniper-specific patterns from output."""
|
|
||||||
|
|
||||||
def scrub(lines: List[str]) -> Generator[str, None, None]:
|
|
||||||
"""Clean & remove each pattern from each line."""
|
|
||||||
for pattern in REMOVE_PATTERNS:
|
|
||||||
for line in lines:
|
|
||||||
# Remove the pattern & strip extra newlines
|
|
||||||
scrubbed = re.sub(pattern, "", line.strip())
|
|
||||||
# Only return non-empty and non-newline lines
|
|
||||||
if scrubbed and scrubbed != "\n":
|
|
||||||
yield scrubbed
|
|
||||||
|
|
||||||
lines = scrub(output.splitlines())
|
|
||||||
|
|
||||||
return "\n".join(lines)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_juniper(output: Sequence) -> Dict: # noqa: C901
|
|
||||||
"""Parse a Juniper BGP XML response."""
|
|
||||||
data = {}
|
|
||||||
|
|
||||||
for i, response in enumerate(output):
|
|
||||||
cleaned = clean_xml_output(response)
|
|
||||||
|
|
||||||
try:
|
|
||||||
parsed = xmltodict.parse(cleaned, force_list=("rt", "rt-entry", "community"))
|
|
||||||
|
|
||||||
log.debug("Initially Parsed Response: \n{}", parsed)
|
|
||||||
|
|
||||||
if "rpc-reply" in parsed.keys():
|
|
||||||
if "xnm:error" in parsed["rpc-reply"]:
|
|
||||||
if "message" in parsed["rpc-reply"]["xnm:error"]:
|
|
||||||
err = parsed["rpc-reply"]["xnm:error"]["message"]
|
|
||||||
raise ParsingError('Error from device: "{}"', err)
|
|
||||||
|
|
||||||
parsed_base = parsed["rpc-reply"]["route-information"]
|
|
||||||
elif "route-information" in parsed.keys():
|
|
||||||
parsed_base = parsed["route-information"]
|
|
||||||
|
|
||||||
if "route-table" not in parsed_base:
|
|
||||||
return data
|
|
||||||
|
|
||||||
if "rt" not in parsed_base["route-table"]:
|
|
||||||
return data
|
|
||||||
|
|
||||||
parsed = parsed_base["route-table"]
|
|
||||||
|
|
||||||
validated = JuniperRoute(**parsed)
|
|
||||||
serialized = validated.serialize().export_dict()
|
|
||||||
|
|
||||||
if i == 0:
|
|
||||||
data.update(serialized)
|
|
||||||
else:
|
|
||||||
data["routes"].extend(serialized["routes"])
|
|
||||||
|
|
||||||
except xmltodict.expat.ExpatError as err:
|
|
||||||
log.critical(str(err))
|
|
||||||
raise ParsingError("Error parsing response data") from err
|
|
||||||
|
|
||||||
except KeyError as err:
|
|
||||||
log.critical("{} was not found in the response", str(err))
|
|
||||||
raise ParsingError("Error parsing response data") from err
|
|
||||||
|
|
||||||
except ValidationError as err:
|
|
||||||
log.critical(str(err))
|
|
||||||
raise ParsingError(err.errors()) from err
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
@ -1,79 +0,0 @@
|
||||||
# flake8: noqa
|
|
||||||
# WORK IN PROGRESS
|
|
||||||
|
|
||||||
"""Linux-style parsers for ping & traceroute."""
|
|
||||||
|
|
||||||
# Standard Library
|
|
||||||
import re
|
|
||||||
|
|
||||||
# Project
|
|
||||||
from hyperglass.exceptions.private import ParsingError
|
|
||||||
|
|
||||||
|
|
||||||
def _process_numbers(numbers):
|
|
||||||
"""Convert string to float or int."""
|
|
||||||
for num in numbers:
|
|
||||||
num = float(num)
|
|
||||||
if num.is_integer():
|
|
||||||
num = int(num)
|
|
||||||
yield num
|
|
||||||
|
|
||||||
|
|
||||||
def parse_linux_ping(output):
|
|
||||||
"""Parse standard Linux-style ping output to structured data.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
64 bytes from 1.1.1.1: icmp_seq=0 ttl=59 time=1.151 ms
|
|
||||||
64 bytes from 1.1.1.1: icmp_seq=1 ttl=59 time=1.180 ms
|
|
||||||
64 bytes from 1.1.1.1: icmp_seq=2 ttl=59 time=1.170 ms
|
|
||||||
64 bytes from 1.1.1.1: icmp_seq=3 ttl=59 time=1.338 ms
|
|
||||||
64 bytes from 1.1.1.1: icmp_seq=4 ttl=59 time=4.913 ms
|
|
||||||
|
|
||||||
--- 1.1.1.1 ping statistics ---
|
|
||||||
5 packets transmitted, 5 packets received, 0% packet loss
|
|
||||||
round-trip min/avg/max/stddev = 1.151/1.950/4.913/1.483 ms
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Extract target host
|
|
||||||
host = re.findall(r"^PING (.+) \(.+\): \d+ data bytes", output)[0]
|
|
||||||
|
|
||||||
# Separate echo replies from summary info
|
|
||||||
replies, _stats = re.split(r"--- .+ ---", output)
|
|
||||||
replies = [l for l in replies.splitlines()[1:] if l]
|
|
||||||
|
|
||||||
reply_stats = []
|
|
||||||
for line in replies:
|
|
||||||
# Extract the numerical values from each echo reply line
|
|
||||||
bytes_seq_ttl_rtt = re.findall(
|
|
||||||
r"(\d+) bytes.+ icmp_seq=(\d+) ttl=(\d+) time=(\d+\.\d+).*", line
|
|
||||||
)[0]
|
|
||||||
|
|
||||||
_bytes, seq, ttl, rtt = _process_numbers(bytes_seq_ttl_rtt)
|
|
||||||
|
|
||||||
reply_stats.append({"bytes": _bytes, "sequence": seq, "ttl": ttl, "rtt": rtt})
|
|
||||||
|
|
||||||
stats = [l for l in _stats.splitlines() if l]
|
|
||||||
|
|
||||||
# Extract the top summary line numbers & process
|
|
||||||
tx_rx_loss = re.findall(r"(\d+) .+, (\d+) .+, (\d+)\%.+", stats[0])[0]
|
|
||||||
tx, rx, loss = _process_numbers(tx_rx_loss)
|
|
||||||
|
|
||||||
# Extract the bottom summary line numbers & process
|
|
||||||
rt = stats[1].split(" = ")[1]
|
|
||||||
min_max_avg = rt.split("/")[:-1]
|
|
||||||
_min, _max, _avg = _process_numbers(min_max_avg)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"host": host,
|
|
||||||
"transmitted": tx,
|
|
||||||
"received": rx,
|
|
||||||
"loss_percent": loss,
|
|
||||||
"min_rtt": _min,
|
|
||||||
"max_rtt": _max,
|
|
||||||
"avg_rtt": _avg,
|
|
||||||
"replies": reply_stats,
|
|
||||||
}
|
|
||||||
|
|
||||||
except (KeyError, ValueError) as err:
|
|
||||||
# KeyError for empty findalls, ValueError for regex errors
|
|
||||||
raise ParsingError("Error parsing ping response: {e}", e=str(err))
|
|
||||||
|
|
@ -1,18 +0,0 @@
|
||||||
"""Map NOS and Commands to Parsing Functions."""
|
|
||||||
|
|
||||||
# Local
|
|
||||||
from .arista import parse_arista
|
|
||||||
from .juniper import parse_juniper
|
|
||||||
|
|
||||||
structured_parsers = {
|
|
||||||
"juniper": {
|
|
||||||
"bgp_route": parse_juniper,
|
|
||||||
"bgp_aspath": parse_juniper,
|
|
||||||
"bgp_community": parse_juniper,
|
|
||||||
},
|
|
||||||
"arista_eos": {
|
|
||||||
"bgp_route": parse_arista,
|
|
||||||
"bgp_aspath": parse_arista,
|
|
||||||
"bgp_community": parse_arista,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
Loading…
Add table
Reference in a new issue