forked from mirrors/thatmattlove-hyperglass
fix webhook construction errors, swap ripestat for bgp.tools
This commit is contained in:
parent
6ae1c67298
commit
ab309bd418
4 changed files with 302 additions and 124 deletions
|
|
@ -6,9 +6,9 @@ from ipaddress import ip_network
|
|||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
from hyperglass.external import bgptools
|
||||
from hyperglass.exceptions import InputInvalid, InputNotAllowed
|
||||
from hyperglass.configuration import params
|
||||
from hyperglass.external.ripestat import RIPEStat
|
||||
|
||||
|
||||
def _member_of(target, network):
|
||||
|
|
@ -137,11 +137,9 @@ def validate_ip(value, query_type, query_vrf): # noqa: C901
|
|||
# enabled (the default), convert the host query to a network
|
||||
# query.
|
||||
elif query_type in ("bgp_route",) and vrf_afi.force_cidr:
|
||||
|
||||
with RIPEStat() as ripe:
|
||||
valid_ip = ripe.network_info_sync(valid_ip.network_address).get(
|
||||
"prefix"
|
||||
)
|
||||
valid_ip = ip_network(
|
||||
bgptools.network_info_sync(valid_ip.network_address).get("prefix")
|
||||
)
|
||||
|
||||
# For a host query with bgp_route query type and force_cidr
|
||||
# disabled, convert the host query to a single IP address.
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ from hyperglass.log import log
|
|||
from hyperglass.util import clean_name, process_headers, import_public_key
|
||||
from hyperglass.cache import Cache
|
||||
from hyperglass.encode import jwt_decode
|
||||
from hyperglass.external import Webhook, RIPEStat
|
||||
from hyperglass.external import Webhook, bgptools
|
||||
from hyperglass.exceptions import HyperglassError
|
||||
from hyperglass.configuration import REDIS_CONFIG, params, devices
|
||||
from hyperglass.api.models.query import Query
|
||||
|
|
@ -26,7 +26,7 @@ from hyperglass.api.models.cert_import import EncodedRequest
|
|||
APP_PATH = os.environ["hyperglass_directory"]
|
||||
|
||||
|
||||
async def send_webhook(query_data: Query, request: Request, timestamp: datetime) -> int:
|
||||
async def send_webhook(query_data: Query, request: Request, timestamp: datetime):
|
||||
"""If webhooks are enabled, get request info and send a webhook.
|
||||
|
||||
Args:
|
||||
|
|
@ -36,25 +36,34 @@ async def send_webhook(query_data: Query, request: Request, timestamp: datetime)
|
|||
Returns:
|
||||
int: Returns 1 regardless of result
|
||||
"""
|
||||
if params.logging.http is not None:
|
||||
headers = await process_headers(headers=request.headers)
|
||||
try:
|
||||
if params.logging.http is not None:
|
||||
headers = await process_headers(headers=request.headers)
|
||||
|
||||
async with RIPEStat() as ripe:
|
||||
host = headers.get(
|
||||
"x-real-ip", headers.get("x-forwarded-for", request.client.host)
|
||||
)
|
||||
network_info = await ripe.network_info(host, serialize=True)
|
||||
if headers.get("x-real-ip") is not None:
|
||||
host = headers["x-real-ip"]
|
||||
elif headers.get("x-forwarded-for") is not None:
|
||||
host = headers["x-forwarded-for"]
|
||||
else:
|
||||
host = request.client.host
|
||||
|
||||
async with Webhook(params.logging.http) as hook:
|
||||
await hook.send(
|
||||
query={
|
||||
**query_data.export_dict(pretty=True),
|
||||
"headers": headers,
|
||||
"source": request.client.host,
|
||||
"network": network_info,
|
||||
"timestamp": timestamp,
|
||||
}
|
||||
)
|
||||
network_info = await bgptools.network_info(host)
|
||||
|
||||
async with Webhook(params.logging.http) as hook:
|
||||
|
||||
await hook.send(
|
||||
query={
|
||||
**query_data.export_dict(pretty=True),
|
||||
"headers": headers,
|
||||
"source": host,
|
||||
"network": network_info,
|
||||
"timestamp": timestamp,
|
||||
}
|
||||
)
|
||||
except Exception as err:
|
||||
log.error(
|
||||
"Error sending webhook to {}: {}", params.logging.http.provider, str(err)
|
||||
)
|
||||
|
||||
|
||||
async def query(query_data: Query, request: Request, background_tasks: BackgroundTasks):
|
||||
|
|
|
|||
150
hyperglass/external/bgptools.py
vendored
Normal file
150
hyperglass/external/bgptools.py
vendored
Normal file
|
|
@ -0,0 +1,150 @@
|
|||
"""Query & parse data from bgp.tools."""
|
||||
|
||||
# Standard Library
|
||||
import re
|
||||
import socket
|
||||
import asyncio
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
|
||||
REPLACE_KEYS = {
|
||||
"AS": "asn",
|
||||
"IP": "ip",
|
||||
"BGP Prefix": "prefix",
|
||||
"CC": "country",
|
||||
"Registry": "rir",
|
||||
"Allocated": "allocated",
|
||||
"AS Name": "org",
|
||||
}
|
||||
|
||||
|
||||
def parse_whois(output: str):
|
||||
"""Parse raw whois output from bgp.tools.
|
||||
|
||||
Sample output:
|
||||
AS | IP | BGP Prefix | CC | Registry | Allocated | AS Name # noqa: E501
|
||||
13335 | 1.1.1.1 | 1.1.1.0/24 | US | ARIN | 2010-07-14 | Cloudflare, Inc.
|
||||
"""
|
||||
|
||||
# Each new line is a row.
|
||||
rawlines = output.split("\n")
|
||||
|
||||
lines = ()
|
||||
for rawline in rawlines:
|
||||
|
||||
# Split each row into fields, separated by a pipe.
|
||||
line = ()
|
||||
rawfields = rawline.split("|")
|
||||
|
||||
for rawfield in rawfields:
|
||||
|
||||
# Remove newline and leading/trailing whitespaces.
|
||||
field = re.sub(r"(\n|\r)", "", rawfield).strip(" ")
|
||||
line += (field,)
|
||||
|
||||
lines += (line,)
|
||||
|
||||
headers = lines[0]
|
||||
row = lines[1]
|
||||
data = {}
|
||||
|
||||
for i, header in enumerate(headers):
|
||||
# Try to replace bgp.tools key names with easier to parse key names
|
||||
key = REPLACE_KEYS.get(header, header)
|
||||
data.update({key: row[i]})
|
||||
|
||||
log.debug("Parsed bgp.tools data: {}", data)
|
||||
return data
|
||||
|
||||
|
||||
async def run_whois(resource: str):
|
||||
"""Open raw socket to bgp.tools and execute query."""
|
||||
|
||||
# Open the socket to bgp.tools
|
||||
log.debug("Opening connection to bgp.tools")
|
||||
reader, writer = await asyncio.open_connection("bgp.tools", port=43)
|
||||
|
||||
# Send the query
|
||||
writer.write(str(resource).encode())
|
||||
if writer.can_write_eof():
|
||||
writer.write_eof()
|
||||
await writer.drain()
|
||||
|
||||
# Read the response
|
||||
response = b""
|
||||
while True:
|
||||
data = await reader.read(128)
|
||||
if data:
|
||||
response += data
|
||||
else:
|
||||
log.debug("Closing connection to bgp.tools")
|
||||
writer.close()
|
||||
break
|
||||
|
||||
return response.decode()
|
||||
|
||||
|
||||
def run_whois_sync(resource: str):
|
||||
"""Open raw socket to bgp.tools and execute query."""
|
||||
|
||||
# Open the socket to bgp.tools
|
||||
log.debug("Opening connection to bgp.tools")
|
||||
sock = socket.socket()
|
||||
sock.connect(("bgp.tools", 43))
|
||||
sock.send(f"{resource}\n".encode())
|
||||
|
||||
# Read the response
|
||||
response = b""
|
||||
while True:
|
||||
data = sock.recv(128)
|
||||
if data:
|
||||
response += data
|
||||
|
||||
else:
|
||||
log.debug("Closing connection to bgp.tools")
|
||||
sock.shutdown(1)
|
||||
sock.close()
|
||||
break
|
||||
|
||||
return response.decode()
|
||||
|
||||
|
||||
async def network_info(resource: str):
|
||||
"""Get ASN, Containing Prefix, and other info about an internet resource."""
|
||||
|
||||
data = {v: "" for v in REPLACE_KEYS.values()}
|
||||
|
||||
try:
|
||||
|
||||
if resource is not None:
|
||||
whoisdata = await run_whois(resource)
|
||||
|
||||
if whoisdata:
|
||||
# If the response is not empty, parse it.
|
||||
data = parse_whois(whoisdata)
|
||||
|
||||
except Exception as err:
|
||||
log.error(str(err))
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def network_info_sync(resource: str):
|
||||
"""Get ASN, Containing Prefix, and other info about an internet resource."""
|
||||
|
||||
data = {v: "" for v in REPLACE_KEYS.values()}
|
||||
|
||||
try:
|
||||
|
||||
if resource is not None:
|
||||
whoisdata = run_whois_sync(resource)
|
||||
|
||||
if whoisdata:
|
||||
# If the response is not empty, parse it.
|
||||
data = parse_whois(whoisdata)
|
||||
|
||||
except Exception as err:
|
||||
log.error(str(err))
|
||||
|
||||
return data
|
||||
|
|
@ -6,7 +6,14 @@ from typing import TypeVar, Optional
|
|||
from datetime import datetime
|
||||
|
||||
# Third Party
|
||||
from pydantic import HttpUrl, BaseModel, StrictInt, StrictStr, StrictFloat
|
||||
from pydantic import (
|
||||
HttpUrl,
|
||||
BaseModel,
|
||||
StrictInt,
|
||||
StrictStr,
|
||||
StrictFloat,
|
||||
root_validator,
|
||||
)
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
|
|
@ -183,11 +190,13 @@ class WebhookHeaders(HyperglassModel):
|
|||
}
|
||||
|
||||
|
||||
class WebhookNetwork(HyperglassModel):
|
||||
class WebhookNetwork(HyperglassModelExtra):
|
||||
"""Webhook data model."""
|
||||
|
||||
prefix: Optional[StrictStr]
|
||||
asn: Optional[StrictStr]
|
||||
prefix: StrictStr = "Unknown"
|
||||
asn: StrictStr = "Unknown"
|
||||
org: StrictStr = "Unknown"
|
||||
country: StrictStr = "Unknown"
|
||||
|
||||
|
||||
class Webhook(HyperglassModel):
|
||||
|
|
@ -198,10 +207,17 @@ class Webhook(HyperglassModel):
|
|||
query_vrf: StrictStr
|
||||
query_target: StrictStr
|
||||
headers: WebhookHeaders
|
||||
source: StrictStr
|
||||
source: StrictStr = "Unknown"
|
||||
network: WebhookNetwork
|
||||
timestamp: datetime
|
||||
|
||||
@root_validator(pre=True)
|
||||
def validate_webhook(cls, values):
|
||||
"""Reset network attributes if the source is localhost."""
|
||||
if values.get("source") in ("127.0.0.1", "::1"):
|
||||
values["network"] = {}
|
||||
return values
|
||||
|
||||
def msteams(self):
|
||||
"""Format the webhook data as a Microsoft Teams card."""
|
||||
|
||||
|
|
@ -209,55 +225,46 @@ class Webhook(HyperglassModel):
|
|||
"""Wrap argument in backticks for markdown inline code formatting."""
|
||||
return f"`{str(value)}`"
|
||||
|
||||
try:
|
||||
|
||||
header_data = [
|
||||
{"name": k, "value": code(v)}
|
||||
for k, v in self.headers.dict(by_alias=True).items()
|
||||
]
|
||||
|
||||
time_fmt = self.timestamp.strftime("%Y %m %d %H:%M:%S")
|
||||
payload = {
|
||||
"@type": "MessageCard",
|
||||
"@context": "http://schema.org/extensions",
|
||||
"themeColor": "118ab2",
|
||||
"summary": _WEBHOOK_TITLE,
|
||||
"sections": [
|
||||
{
|
||||
"activityTitle": _WEBHOOK_TITLE,
|
||||
"activitySubtitle": f"{time_fmt} UTC",
|
||||
"activityImage": _ICON_URL,
|
||||
"facts": [
|
||||
{"name": "Query Location", "value": self.query_location},
|
||||
{"name": "Query Target", "value": code(self.query_target)},
|
||||
{"name": "Query Type", "value": self.query_type},
|
||||
{"name": "Query VRF", "value": self.query_vrf},
|
||||
],
|
||||
},
|
||||
{"markdown": True, "text": "**Source Information**"},
|
||||
{"markdown": True, "text": "---"},
|
||||
{
|
||||
"markdown": True,
|
||||
"facts": [
|
||||
{"name": "Source IP", "value": code(self.source)},
|
||||
{
|
||||
"name": "Source Prefix",
|
||||
"value": code(self.network.prefix),
|
||||
},
|
||||
{"name": "Source ASN", "value": code(self.network.asn)},
|
||||
],
|
||||
},
|
||||
{"markdown": True, "text": "**Request Headers**"},
|
||||
{"markdown": True, "text": "---"},
|
||||
{"markdown": True, "facts": header_data},
|
||||
],
|
||||
}
|
||||
|
||||
log.debug("Created MS Teams webhook: {}", str(payload))
|
||||
|
||||
except Exception as err:
|
||||
log.error("Error while creating webhook: {}", str(err))
|
||||
payload = {}
|
||||
header_data = [
|
||||
{"name": k, "value": code(v)}
|
||||
for k, v in self.headers.dict(by_alias=True).items()
|
||||
]
|
||||
time_fmt = self.timestamp.strftime("%Y %m %d %H:%M:%S")
|
||||
payload = {
|
||||
"@type": "MessageCard",
|
||||
"@context": "http://schema.org/extensions",
|
||||
"themeColor": "118ab2",
|
||||
"summary": _WEBHOOK_TITLE,
|
||||
"sections": [
|
||||
{
|
||||
"activityTitle": _WEBHOOK_TITLE,
|
||||
"activitySubtitle": f"{time_fmt} UTC",
|
||||
"activityImage": _ICON_URL,
|
||||
"facts": [
|
||||
{"name": "Query Location", "value": self.query_location},
|
||||
{"name": "Query Target", "value": code(self.query_target)},
|
||||
{"name": "Query Type", "value": self.query_type},
|
||||
{"name": "Query VRF", "value": self.query_vrf},
|
||||
],
|
||||
},
|
||||
{"markdown": True, "text": "**Source Information**"},
|
||||
{"markdown": True, "text": "---"},
|
||||
{
|
||||
"markdown": True,
|
||||
"facts": [
|
||||
{"name": "IP", "value": code(self.source)},
|
||||
{"name": "Prefix", "value": code(self.network.prefix)},
|
||||
{"name": "ASN", "value": code(self.network.asn)},
|
||||
{"name": "Country", "value": self.network.country},
|
||||
{"name": "Organization", "value": self.network.org},
|
||||
],
|
||||
},
|
||||
{"markdown": True, "text": "**Request Headers**"},
|
||||
{"markdown": True, "text": "---"},
|
||||
{"markdown": True, "facts": header_data},
|
||||
],
|
||||
}
|
||||
log.debug("Created MS Teams webhook: {}", str(payload))
|
||||
|
||||
return payload
|
||||
|
||||
|
|
@ -269,54 +276,68 @@ class Webhook(HyperglassModel):
|
|||
value = f"`{value}`"
|
||||
return f"*{key}*\n{value}"
|
||||
|
||||
try:
|
||||
header_data = []
|
||||
for k, v in self.headers.dict(by_alias=True).items():
|
||||
field = make_field(k, v, code=True)
|
||||
header_data.append(field)
|
||||
header_data = []
|
||||
for k, v in self.headers.dict(by_alias=True).items():
|
||||
field = make_field(k, v, code=True)
|
||||
header_data.append(field)
|
||||
|
||||
query_data = [
|
||||
query_data = [
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": make_field("Query Location", self.query_location),
|
||||
},
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": make_field("Query Target", self.query_target, code=True),
|
||||
},
|
||||
{"type": "mrkdwn", "text": make_field("Query Type", self.query_type)},
|
||||
{"type": "mrkdwn", "text": make_field("Query VRF", self.query_vrf)},
|
||||
]
|
||||
|
||||
source_data = [
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": make_field("Source IP", self.source, code=True),
|
||||
},
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": make_field("Source Prefix", self.network.prefix, code=True),
|
||||
},
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": make_field("Source ASN", self.network.asn, code=True),
|
||||
},
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": make_field("Source Country", self.network.country),
|
||||
},
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": make_field("Source Organization", self.network.org),
|
||||
},
|
||||
]
|
||||
|
||||
time_fmt = self.timestamp.strftime("%Y %m %d %H:%M:%S")
|
||||
|
||||
payload = {
|
||||
"text": _WEBHOOK_TITLE,
|
||||
"blocks": [
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": make_field("Query Location", self.query_location),
|
||||
"type": "section",
|
||||
"text": {"type": "mrkdwn", "text": f"*{time_fmt} UTC*"},
|
||||
},
|
||||
{"type": "section", "fields": query_data},
|
||||
{"type": "divider"},
|
||||
{"type": "section", "fields": source_data},
|
||||
{"type": "divider"},
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": make_field("Query Target", self.query_target, code=True),
|
||||
},
|
||||
{"type": "mrkdwn", "text": make_field("Query Type", self.query_type)},
|
||||
{"type": "mrkdwn", "text": make_field("Query VRF", self.query_vrf)},
|
||||
]
|
||||
|
||||
source_details = (
|
||||
("Source IP", self.source),
|
||||
("Source Prefix", self.network.prefix),
|
||||
("Source ASN", self.network.asn),
|
||||
)
|
||||
|
||||
source_data = []
|
||||
for k, v in source_details:
|
||||
field = make_field(k, v, code=True)
|
||||
source_data.append({"type": "mrkdwn", "text": field})
|
||||
|
||||
payload = {
|
||||
"text": _WEBHOOK_TITLE,
|
||||
"blocks": [
|
||||
{"type": "section", "fields": query_data},
|
||||
{"type": "divider"},
|
||||
{"type": "section", "fields": source_data},
|
||||
{"type": "divider"},
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": "*Headers*\n" + "\n".join(header_data),
|
||||
},
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": "*Headers*\n" + "\n".join(header_data),
|
||||
},
|
||||
],
|
||||
}
|
||||
log.debug("Created Slack webhook: {}", str(payload))
|
||||
except Exception as err:
|
||||
log.error("Error while creating webhook: {}", str(err))
|
||||
payload = {}
|
||||
},
|
||||
],
|
||||
}
|
||||
log.debug("Created Slack webhook: {}", str(payload))
|
||||
return payload
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue