mirror of
https://github.com/thatmattlove/hyperglass.git
synced 2026-01-17 00:38:06 +00:00
begin initial rewrite
This commit is contained in:
parent
5e5acae4aa
commit
8afc0a616e
322 changed files with 981 additions and 60498 deletions
23
.flake8
23
.flake8
|
|
@ -1,23 +0,0 @@
|
|||
[flake8]
|
||||
max-line-length=88
|
||||
count=True
|
||||
show-source=False
|
||||
statistics=True
|
||||
exclude=.git, __pycache__, hyperglass/api/examples/*.py, hyperglass/compat/_sshtunnel.py, test.py
|
||||
filename=*.py
|
||||
per-file-ignores=
|
||||
hyperglass/main.py:E402
|
||||
# Disable classmethod warning for validator decorators
|
||||
hyperglass/models/*.py:N805,E0213,R0903,E501,C0301
|
||||
hyperglass/models/api/*.py:N805,E0213,R0903,E501,C0301
|
||||
hyperglass/models/commands/*.py:N805,E0213,R0903,E501,C0301
|
||||
hyperglass/parsing/models/*.py:N805,E0213,R0903
|
||||
hyperglass/configuration/models/*.py:N805,E0213,R0903,E501,C0301
|
||||
# Disable unused import warning for modules
|
||||
hyperglass/*/__init__.py:F401
|
||||
hyperglass/models/*/__init__.py:F401
|
||||
ignore=W503,C0330,R504,D202,S403,S301,S404
|
||||
select=B, BLK, C, D, E, F, I, II, N, P, PIE, S, R, W
|
||||
disable-noqa=False
|
||||
hang-closing=False
|
||||
max-complexity=10
|
||||
48
.github/workflows/backend.yml
vendored
48
.github/workflows/backend.yml
vendored
|
|
@ -1,48 +0,0 @@
|
|||
name: Backend Testing
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
backend:
|
||||
name: Backend Tests
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [14.x]
|
||||
redis-version: [5, 6]
|
||||
poetry-version: [1.1.4]
|
||||
python-version: [3.6, 3.8]
|
||||
os: [ubuntu-20.04]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- name: Git Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install Poetry
|
||||
uses: abatilo/actions-poetry@v2.0.0
|
||||
with:
|
||||
poetry-version: ${{ matrix.poetry-version }}
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Start Redis
|
||||
uses: supercharge/redis-github-action@1.1.0
|
||||
with:
|
||||
redis-version: ${{ matrix.redis-version }}
|
||||
|
||||
- name: Install Python Dependencies
|
||||
run: poetry install
|
||||
|
||||
- name: Run Flake8
|
||||
run: poetry run flake8 hyperglass
|
||||
|
||||
- name: Run hyperglass
|
||||
run: '.tests/ga-backend-app.sh'
|
||||
35
.github/workflows/frontend.yml
vendored
35
.github/workflows/frontend.yml
vendored
|
|
@ -1,35 +0,0 @@
|
|||
name: Frontend Testing
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
frontend:
|
||||
name: Frontend Tests
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [14.x]
|
||||
os: [ubuntu-20.04]
|
||||
runs-on: ${{ matrix.os }}
|
||||
env:
|
||||
working-directory: ./hyperglass/ui
|
||||
steps:
|
||||
- name: Git Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Install Dependencies
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: yarn install
|
||||
|
||||
- name: TypeScript
|
||||
run: ./.tests/pre-commit-frontend.sh --typescript
|
||||
|
||||
- name: ESLint
|
||||
run: ./.tests/pre-commit-frontend.sh --eslint
|
||||
|
||||
- name: Prettier
|
||||
run: ./.tests/pre-commit-frontend.sh --prettier
|
||||
18
.github/workflows/installer.yml
vendored
18
.github/workflows/installer.yml
vendored
|
|
@ -1,18 +0,0 @@
|
|||
name: Installer Testing
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
installer:
|
||||
name: Installer Tests
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-20.04]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- name: Git Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Run hyperglass
|
||||
run: "sudo bash ./install.sh"
|
||||
37
.github/workflows/release-pypi.yml
vendored
37
.github/workflows/release-pypi.yml
vendored
|
|
@ -1,37 +0,0 @@
|
|||
name: Release to PyPI
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
release:
|
||||
name: Release to PyPI
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: [3.6]
|
||||
poetry-version: [1.1.4]
|
||||
os: [ubuntu-latest]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Git Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install Poetry
|
||||
uses: abatilo/actions-poetry@v2.0.0
|
||||
with:
|
||||
poetry-version: ${{ matrix.poetry-version }}
|
||||
|
||||
- name: Build hyperglass
|
||||
run: |
|
||||
poetry config pypi-token.pypi ${{ secrets.PYPI_API_TOKEN }}
|
||||
poetry build
|
||||
|
||||
- name: Publish hyperglass release
|
||||
run: poetry publish
|
||||
14
.isort.cfg
14
.isort.cfg
|
|
@ -1,14 +0,0 @@
|
|||
[settings]
|
||||
skip_glob = hyperglass/api/examples/*.py
|
||||
line_length = 88
|
||||
indent = ' '
|
||||
include_trailing_comma = True
|
||||
multi_line_output = 3
|
||||
balanced_wrapping = True
|
||||
length_sort = True
|
||||
force_single_line = False
|
||||
import_heading_stdlib = Standard Library
|
||||
import_heading_thirdparty = Third Party
|
||||
import_heading_firstparty = Project
|
||||
import_heading_localfolder = Local
|
||||
known_third_party = starlette,fastapi,inquirer
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v2.3.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
stages:
|
||||
- commit
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: typescript
|
||||
name: TypeScript
|
||||
files: 'hyperglass/ui/*'
|
||||
exclude: 'hyperglass/ui/node_modules|hyperglass/ui/.next'
|
||||
stages:
|
||||
- commit
|
||||
entry: ./.tests/pre-commit-frontend.sh --typescript
|
||||
language: script
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: eslint
|
||||
name: ESLint
|
||||
files: 'hyperglass/ui/*'
|
||||
exclude: 'hyperglass/ui/node_modules|hyperglass/ui/.next'
|
||||
stages:
|
||||
- commit
|
||||
entry: ./.tests/pre-commit-frontend.sh --eslint
|
||||
language: script
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: prettier
|
||||
name: Prettier
|
||||
files: 'hyperglass/ui/*'
|
||||
exclude: 'hyperglass/ui/node_modules|hyperglass/ui/.next'
|
||||
stages:
|
||||
- commit
|
||||
entry: ./.tests/pre-commit-frontend.sh --prettier
|
||||
language: script
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
echo "[INFO] Starting Redis..."
|
||||
redis-server &
|
||||
|
||||
cd /tmp/hyperglass
|
||||
|
||||
echo "[INFO] Starting setup..."
|
||||
poetry run hyperglass setup -d
|
||||
echo "[SUCCESS] Setup completed."
|
||||
sleep 2
|
||||
|
||||
echo "listen_address: 127.0.0.1" >> /root/hyperglass/hyperglass.yaml
|
||||
|
||||
echo "[INFO] Starting UI build."
|
||||
poetry run hyperglass build-ui
|
||||
|
||||
if [[ ! $? == 0 ]]; then
|
||||
echo "[ERROR] Failed to start hyperglass."
|
||||
exit 1
|
||||
else
|
||||
echo "[SUCCESS] UI build completed."
|
||||
fi
|
||||
|
||||
echo "[INFO] Starting hyperglass..."
|
||||
poetry run hyperglass start &> /var/log/hyperglassci.log &
|
||||
sleep 180
|
||||
|
||||
if [[ ! $? == 0 ]]; then
|
||||
echo "[ERROR] Failed to start hyperglass."
|
||||
exit 1
|
||||
else
|
||||
echo "[SUCCESS] Started hyperglass."
|
||||
fi
|
||||
|
||||
echo "[INFO] Running HTTP test..."
|
||||
|
||||
STATUS=$(curl -s -o /dev/null -w "%{http_code}" http://127.0.0.1:8001)
|
||||
|
||||
echo "[INFO] Status code: $STATUS"
|
||||
|
||||
if [[ ! $? == 0 ]]; then
|
||||
echo "[ERROR] HTTP test failed."
|
||||
exit 1
|
||||
elif [[ ! "$STATUS" == "200" ]]; then
|
||||
echo "[ERROR] HTTP test failed. Startup log:"
|
||||
cat /var/log/hyperglassci.log
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "[SUCCESS] Tests ran successfully."
|
||||
exit 0
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
FROM ubuntu:bionic as base
|
||||
ENV LC_ALL=C.UTF-8
|
||||
ENV LANG=C.UTF-8
|
||||
WORKDIR /tmp
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y git curl net-tools \
|
||||
&& curl -sL https://deb.nodesource.com/setup_14.x | bash - \
|
||||
&& curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
|
||||
&& echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y python3 python3-pip python3-venv redis-server nodejs yarn \
|
||||
# && curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python3 \
|
||||
#
|
||||
# Pinning Poetry installer to this specific version. As of 2020 07 24, the script from master
|
||||
# fails to install due to Python 2's executable matching first. See #2106
|
||||
#
|
||||
&& curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/e70ee3112ab06374dfef4ab84e6dded2382cc7dd/get-poetry.py | python3 \
|
||||
&& python3 --version \
|
||||
&& echo "NodeJS $(node --version)" \
|
||||
&& echo "Yarn $(yarn --version)"
|
||||
COPY ./ /tmp/hyperglass
|
||||
ENV PATH=$PATH:/root/.poetry/bin
|
||||
|
||||
FROM base as install
|
||||
WORKDIR /tmp/hyperglass
|
||||
RUN poetry install --no-ansi
|
||||
|
||||
FROM install as setup
|
||||
WORKDIR /tmp/hyperglass
|
||||
COPY .tests/app/setup.sh /tmp/setup.sh
|
||||
RUN ls -lsah /tmp
|
||||
|
|
@ -1,61 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
LOG_FILE="$HOME/hyperglass-ci.log"
|
||||
|
||||
export POETRY_HYPERGLASS_UI_BUILD_TIMEOUT="600"
|
||||
echo "[INFO] Set build timeout to $POETRY_HYPERGLASS_UI_BUILD_TIMEOUT seconds"
|
||||
|
||||
echo "[INFO] Starting setup..."
|
||||
poetry run hyperglass setup -d &> $LOG_FILE
|
||||
echo "[SUCCESS] Setup completed."
|
||||
sleep 2
|
||||
|
||||
echo "[INFO] Copying devices.yaml file..."
|
||||
cp ./hyperglass/examples/devices.yaml $HOME/hyperglass/devices.yaml
|
||||
|
||||
echo "[INFO] Setting listen_address..."
|
||||
echo "listen_address: 127.0.0.1" >> $HOME/hyperglass/hyperglass.yaml
|
||||
|
||||
echo "[INFO] Starting UI build."
|
||||
poetry run hyperglass build-ui &> $LOG_FILE
|
||||
|
||||
if [[ ! $? == 0 ]]; then
|
||||
echo "[ERROR] Failed to build hyperglass ui."
|
||||
cat /tmp/hyperglass.log
|
||||
cat $LOG_FILE
|
||||
exit 1
|
||||
else
|
||||
echo "[SUCCESS] UI build completed."
|
||||
fi
|
||||
|
||||
echo "[INFO] Starting hyperglass..."
|
||||
poetry run hyperglass start &> $LOG_FILE &
|
||||
sleep 120
|
||||
|
||||
if [[ ! $? == 0 ]]; then
|
||||
echo "[ERROR] Failed to start hyperglass."
|
||||
cat /tmp/hyperglass.log
|
||||
cat $LOG_FILE
|
||||
exit 1
|
||||
else
|
||||
echo "[SUCCESS] Started hyperglass."
|
||||
fi
|
||||
|
||||
echo "[INFO] Running HTTP test..."
|
||||
|
||||
STATUS=$(curl -s -o /dev/null -w "%{http_code}" http://127.0.0.1:8001)
|
||||
|
||||
echo "[INFO] Status code: $STATUS"
|
||||
|
||||
if [[ ! $? == 0 ]]; then
|
||||
echo "[ERROR] HTTP test failed."
|
||||
exit 1
|
||||
elif [[ ! "$STATUS" == "200" ]]; then
|
||||
echo "[ERROR] HTTP test failed."
|
||||
cat /tmp/hyperglass.log
|
||||
cat $LOG_FILE
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "[SUCCESS] Tests ran successfully."
|
||||
exit 0
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
FROM ubuntu:bionic as base
|
||||
WORKDIR /tmp
|
||||
COPY .tests/install/ubuntu/setup.sh /tmp/init.sh
|
||||
COPY ./install.sh /tmp/install.sh
|
||||
RUN bash /tmp/init.sh
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
echo "[INFO] Disabling multiverse repos..."
|
||||
sed -i -e '/multiverse/s/^#*/#\ /g' /etc/apt/sources.list
|
||||
cat /etc/apt/sources.list
|
||||
|
||||
echo "[INFO] Updating package repos..."
|
||||
apt-get update &> /dev/null
|
||||
|
||||
echo "[INFO] Installing apt-utils..."
|
||||
apt-get install -y apt-utils > /dev/null
|
||||
|
||||
echo "[INFO] Installing base dependencies..."
|
||||
apt-get install -y curl git gnupg dialog build-essential > /dev/null
|
||||
|
||||
echo '[SUCCESS] Completed build'
|
||||
exit 0
|
||||
|
|
@ -1,38 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
UI_DIR="$(pwd)/hyperglass/ui"
|
||||
|
||||
check_typescript () {
|
||||
cd $UI_DIR
|
||||
node_modules/.bin/tsc --noEmit
|
||||
}
|
||||
|
||||
check_eslint () {
|
||||
cd $UI_DIR
|
||||
node_modules/.bin/eslint . --ext .ts --ext .tsx
|
||||
}
|
||||
|
||||
check_prettier () {
|
||||
cd $UI_DIR
|
||||
node_modules/.bin/prettier -c .
|
||||
}
|
||||
|
||||
for arg in "$@"
|
||||
do
|
||||
if [ "$arg" == "--typescript" ]
|
||||
then
|
||||
check_typescript
|
||||
exit $?
|
||||
elif [ "$arg" == "--eslint" ]
|
||||
then
|
||||
check_eslint
|
||||
exit $?
|
||||
elif [ "$arg" == "--prettier" ]
|
||||
then
|
||||
check_prettier
|
||||
exit $?
|
||||
else
|
||||
echo "Arguments --typescript, --eslint, or --prettier required."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
47
cmd/cmd.go
Normal file
47
cmd/cmd.go
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
package cmd
|
||||
|
||||
import (
|
||||
"github.com/k0kubun/pp/v3"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/thatmattlove/hyperglass/core/api"
|
||||
"github.com/thatmattlove/hyperglass/core/infrastructure/database"
|
||||
"github.com/thatmattlove/hyperglass/core/migrations"
|
||||
"github.com/thatmattlove/hyperglass/core/system"
|
||||
)
|
||||
|
||||
func start(cmd *cobra.Command, args []string) {
|
||||
cobra.CheckErr(api.Start())
|
||||
}
|
||||
|
||||
func settings(cmd *cobra.Command, args []string) {
|
||||
db, err := database.New()
|
||||
cobra.CheckErr(err)
|
||||
settings := db.Settings()
|
||||
pp.Print(settings)
|
||||
}
|
||||
|
||||
func initializeCmd() {
|
||||
err := system.InitializeDirs()
|
||||
cobra.CheckErr(err)
|
||||
err = migrations.AutoMigrate()
|
||||
cobra.CheckErr(err)
|
||||
}
|
||||
|
||||
func Main() error {
|
||||
initializeCmd()
|
||||
root := &cobra.Command{
|
||||
Use: "hyperglass",
|
||||
Short: "hyperglass is the network looking glass that tries to make the internet better.",
|
||||
}
|
||||
root.AddCommand(&cobra.Command{
|
||||
Use: "start",
|
||||
Short: "Start hyperglass",
|
||||
Run: start,
|
||||
})
|
||||
root.AddCommand(&cobra.Command{
|
||||
Use: "settings",
|
||||
Short: "Show hyperglass settings",
|
||||
Run: settings,
|
||||
})
|
||||
return root.Execute()
|
||||
}
|
||||
25
core/api/api.go
Normal file
25
core/api/api.go
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/gofiber/fiber/v2"
|
||||
"github.com/thatmattlove/hyperglass/core/controllers"
|
||||
"github.com/thatmattlove/hyperglass/core/infrastructure/database"
|
||||
)
|
||||
|
||||
func Start() (err error) {
|
||||
db, err := database.New()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
settings := db.Settings()
|
||||
config := fiber.Config{
|
||||
ServerHeader: "hyperglass",
|
||||
AppName: fmt.Sprintf("%s Looking Glass", settings.OrganizationName),
|
||||
Network: "tcp",
|
||||
}
|
||||
app := fiber.New(config)
|
||||
app.Post("/api/query", controllers.QueryController)
|
||||
return app.Listen(":8080")
|
||||
}
|
||||
63
core/connections/ssh/auth.go
Normal file
63
core/connections/ssh/auth.go
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
package terminal
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/melbahja/goph"
|
||||
)
|
||||
|
||||
type AuthMethod int
|
||||
|
||||
const (
|
||||
AUTH_PASSWORD AuthMethod = 0
|
||||
AUTH_KEY AuthMethod = 1
|
||||
)
|
||||
|
||||
type PasswordAuth struct {
|
||||
Password string
|
||||
}
|
||||
|
||||
type KeyAuth struct {
|
||||
Key string
|
||||
KeyPassword string
|
||||
}
|
||||
|
||||
type AuthOptions struct {
|
||||
Method AuthMethod
|
||||
Username string
|
||||
Password string
|
||||
Key string
|
||||
KeyPassword string
|
||||
}
|
||||
|
||||
func (a *PasswordAuth) Backend() (*goph.Auth, error) {
|
||||
auth := goph.Password(a.Password)
|
||||
return &auth, nil
|
||||
}
|
||||
|
||||
func (a *KeyAuth) Backend() (*goph.Auth, error) {
|
||||
auth, err := goph.Key(a.Key, a.KeyPassword)
|
||||
return &auth, err
|
||||
}
|
||||
|
||||
type Auth interface {
|
||||
Backend() (*goph.Auth, error)
|
||||
}
|
||||
|
||||
func NewSSHAuth(opts *AuthOptions) (Auth, error) {
|
||||
switch opts.Method {
|
||||
case AUTH_PASSWORD:
|
||||
auth := &PasswordAuth{
|
||||
Password: opts.Password,
|
||||
}
|
||||
return auth, nil
|
||||
case AUTH_KEY:
|
||||
auth := &KeyAuth{
|
||||
Key: opts.Key,
|
||||
KeyPassword: opts.KeyPassword,
|
||||
}
|
||||
return auth, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported SSH auth method '%d'", opts.Method)
|
||||
}
|
||||
}
|
||||
80
core/connections/ssh/ssh.go
Normal file
80
core/connections/ssh/ssh.go
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
package terminal
|
||||
|
||||
import (
|
||||
"log"
|
||||
"time"
|
||||
|
||||
"github.com/melbahja/goph"
|
||||
"golang.org/x/crypto/ssh"
|
||||
)
|
||||
|
||||
type Connection struct {
|
||||
Auth *Auth
|
||||
Address string
|
||||
Port uint
|
||||
Client *goph.Client
|
||||
}
|
||||
|
||||
type ConnectionOptions struct {
|
||||
Method AuthMethod
|
||||
Address string
|
||||
Port uint
|
||||
Username string
|
||||
Password string
|
||||
Key string
|
||||
KeyPassword string
|
||||
Timeout time.Duration
|
||||
}
|
||||
|
||||
func (conn *Connection) Run(cmd string) (res string, err error) {
|
||||
defer conn.Client.Close()
|
||||
b, err := conn.Client.Run(cmd)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
res = string(b)
|
||||
return
|
||||
}
|
||||
|
||||
func NewConnection(opts *ConnectionOptions) (*Connection, error) {
|
||||
sshAuth, err := NewSSHAuth(&AuthOptions{
|
||||
Method: opts.Method,
|
||||
Username: opts.Username,
|
||||
Password: opts.Password,
|
||||
Key: opts.Key,
|
||||
KeyPassword: opts.KeyPassword,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
auth, err := sshAuth.Backend()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cfg := &goph.Config{
|
||||
Auth: *auth,
|
||||
Addr: opts.Address,
|
||||
Port: opts.Port,
|
||||
User: opts.Username,
|
||||
Timeout: opts.Timeout,
|
||||
BannerCallback: func(msg string) error {
|
||||
log.Println(msg)
|
||||
return nil
|
||||
},
|
||||
Callback: ssh.InsecureIgnoreHostKey(),
|
||||
}
|
||||
|
||||
client, err := goph.NewConn(cfg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
connection := &Connection{
|
||||
Auth: &sshAuth,
|
||||
Address: opts.Address,
|
||||
Port: opts.Port,
|
||||
Client: client,
|
||||
}
|
||||
return connection, nil
|
||||
}
|
||||
30
core/controllers/query.go
Normal file
30
core/controllers/query.go
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
package controllers
|
||||
|
||||
import (
|
||||
"github.com/go-playground/validator/v10"
|
||||
"github.com/gofiber/fiber/v2"
|
||||
"github.com/thatmattlove/hyperglass/core/entities"
|
||||
"github.com/thatmattlove/hyperglass/core/interfaces"
|
||||
)
|
||||
|
||||
func QueryController(ctx *fiber.Ctx) error {
|
||||
var query *entities.QueryRequest
|
||||
err := ctx.BodyParser(&query)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
validate := validator.New()
|
||||
err = validate.Struct(query)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
iface, err := interfaces.NewQueryInterface(ctx, query)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
res, err := iface.Query()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return ctx.Status(200).JSON(res)
|
||||
}
|
||||
18
core/entities/query.go
Normal file
18
core/entities/query.go
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
package entities
|
||||
|
||||
import "time"
|
||||
|
||||
type QueryRequest struct {
|
||||
Devices []uint `json:"devices" validate:"min=1"`
|
||||
Target string `json:"target" validate:"cidr|ip"`
|
||||
Type string `json:"type" validate:"required"`
|
||||
}
|
||||
|
||||
type PlainQueryResponse struct {
|
||||
Random string `json:"random" validate:"required"`
|
||||
Cached bool `json:"cached" validate:"boolean"`
|
||||
Runtime float64 `json:"runtime" validate:"required"`
|
||||
Timestamp time.Time `json:"timestamp" validate:"required"`
|
||||
Format string `json:"format" validate:"oneof=application/json text/plain"`
|
||||
Output string `json:"output" validate:"required"`
|
||||
}
|
||||
87
core/infrastructure/cache/cache.go
vendored
Normal file
87
core/infrastructure/cache/cache.go
vendored
Normal file
|
|
@ -0,0 +1,87 @@
|
|||
package cache
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path"
|
||||
|
||||
"go.etcd.io/bbolt"
|
||||
)
|
||||
|
||||
type Cache struct {
|
||||
File *os.File
|
||||
Backend *bbolt.DB
|
||||
Bucket []byte
|
||||
}
|
||||
|
||||
func (c *Cache) Get(key string) (value string, err error) {
|
||||
err = c.Transaction(func(db *bbolt.DB) error {
|
||||
err = db.View(func(tx *bbolt.Tx) error {
|
||||
b := tx.Bucket(c.Bucket)
|
||||
v := b.Get([]byte(key))
|
||||
value = string(v)
|
||||
return nil
|
||||
})
|
||||
return err
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
func (c *Cache) Set(key, value string) (err error) {
|
||||
return c.Transaction(func(db *bbolt.DB) error {
|
||||
return db.Update(func(tx *bbolt.Tx) error {
|
||||
b := tx.Bucket(c.Bucket)
|
||||
return b.Put([]byte(key), []byte(value))
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func (c *Cache) Transaction(cb func(db *bbolt.DB) error) (err error) {
|
||||
db, err := bbolt.Open(c.File.Name(), 0666, nil)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
defer db.Close()
|
||||
defer c.File.Close()
|
||||
return cb(db)
|
||||
}
|
||||
|
||||
func (c *Cache) Destroy(name string) (err error) {
|
||||
err = c.Backend.Close()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
err = os.Remove(c.File.Name())
|
||||
return
|
||||
}
|
||||
|
||||
func New(name string) (c *Cache, err error) {
|
||||
cacheDir, err := os.MkdirTemp("", "hyperglass-cache-*")
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
cacheFile, err := os.Create(path.Join(cacheDir, "hyperglass.cache"))
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
db, err := bbolt.Open(cacheFile.Name(), 0666, nil)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer db.Close()
|
||||
defer cacheFile.Close()
|
||||
bucket := []byte(name)
|
||||
err = db.Update(func(tx *bbolt.Tx) (err error) {
|
||||
_, err = tx.CreateBucket(bucket)
|
||||
return
|
||||
})
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
c = &Cache{
|
||||
Backend: db,
|
||||
File: cacheFile,
|
||||
Bucket: bucket,
|
||||
}
|
||||
return
|
||||
}
|
||||
28
core/infrastructure/cache/cache_test.go
vendored
Normal file
28
core/infrastructure/cache/cache_test.go
vendored
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
package cache_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/thatmattlove/hyperglass/core/infrastructure/cache"
|
||||
)
|
||||
|
||||
func Test_Cache(t *testing.T) {
|
||||
name := "test"
|
||||
c, err := cache.New(name)
|
||||
assert.NoError(t, err)
|
||||
|
||||
t.Run("get/set values", func(t *testing.T) {
|
||||
key := "test-key"
|
||||
value := "test-value"
|
||||
err := c.Set(key, value)
|
||||
assert.NoError(t, err)
|
||||
valueC, err := c.Get(key)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, value, valueC)
|
||||
})
|
||||
t.Cleanup(func() {
|
||||
err := c.Destroy(name)
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
}
|
||||
43
core/infrastructure/database/database.go
Normal file
43
core/infrastructure/database/database.go
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
package database
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path"
|
||||
|
||||
"github.com/thatmattlove/hyperglass/core/models/settings"
|
||||
"github.com/thatmattlove/hyperglass/core/system"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
type IDB struct {
|
||||
File string
|
||||
DB *gorm.DB
|
||||
}
|
||||
|
||||
func (idb *IDB) Settings() (settings *settings.Settings) {
|
||||
idb.DB.Limit(1).Find(&settings)
|
||||
return
|
||||
}
|
||||
|
||||
func New() (idb *IDB, err error) {
|
||||
appDir, err := system.GetAppDir()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
dbFile := path.Join(appDir, "hyperglass.db")
|
||||
if _, err = os.Stat(dbFile); os.IsNotExist(err) {
|
||||
_, err = os.Create(dbFile)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
db, err := gorm.Open(sqlite.Open(dbFile), &gorm.Config{
|
||||
DisableForeignKeyConstraintWhenMigrating: true,
|
||||
})
|
||||
idb = &IDB{
|
||||
File: dbFile,
|
||||
DB: db,
|
||||
}
|
||||
return
|
||||
}
|
||||
36
core/interfaces/query.go
Normal file
36
core/interfaces/query.go
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
package interfaces
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/go-playground/validator/v10"
|
||||
"github.com/gofiber/fiber/v2"
|
||||
"github.com/thatmattlove/hyperglass/core/entities"
|
||||
)
|
||||
|
||||
type QueryInterface struct {
|
||||
Ctx *fiber.Ctx
|
||||
Request *entities.QueryRequest
|
||||
}
|
||||
|
||||
func (qi *QueryInterface) Query() (res any, err error) {
|
||||
res = &entities.PlainQueryResponse{
|
||||
Random: "random string",
|
||||
Cached: false,
|
||||
Runtime: 30,
|
||||
Timestamp: time.Now(),
|
||||
Format: "text/plain",
|
||||
Output: "some output",
|
||||
}
|
||||
validate := validator.New()
|
||||
err = validate.Struct(res)
|
||||
return
|
||||
}
|
||||
|
||||
func NewQueryInterface(ctx *fiber.Ctx, req *entities.QueryRequest) (iface *QueryInterface, err error) {
|
||||
iface = &QueryInterface{
|
||||
Ctx: ctx,
|
||||
Request: req,
|
||||
}
|
||||
return
|
||||
}
|
||||
39
core/migrations/auto.go
Normal file
39
core/migrations/auto.go
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
package migrations
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/thatmattlove/hyperglass/core/infrastructure/database"
|
||||
"github.com/thatmattlove/hyperglass/core/models"
|
||||
"github.com/thatmattlove/hyperglass/core/models/settings"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
func AutoMigrate() (err error) {
|
||||
db, err := database.New()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
err = db.DB.AutoMigrate(
|
||||
settings.Logging{},
|
||||
settings.Message{},
|
||||
models.Group{},
|
||||
models.Credential{},
|
||||
models.Device{},
|
||||
models.Proxy{},
|
||||
)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if err = db.DB.AutoMigrate(&settings.Settings{}); err == nil && db.DB.Migrator().HasTable(&settings.Settings{}) {
|
||||
if err := db.DB.First(&settings.Settings{}).Error; errors.Is(err, gorm.ErrRecordNotFound) {
|
||||
s, err := settings.Seed()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
tx := db.DB.Create(s)
|
||||
return tx.Error
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
11
core/models/common.go
Normal file
11
core/models/common.go
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
package models
|
||||
|
||||
import "gorm.io/gorm"
|
||||
|
||||
type Model struct {
|
||||
// ID uuid.UUID `gorm:"primaryKey; unique; type:uuid; column:id; default:uuid_generate_v4()"`
|
||||
// CreatedAt time.Time
|
||||
// UpdatedAt time.Time
|
||||
// DeletedAt *time.Time `sql:"index"`
|
||||
gorm.Model
|
||||
}
|
||||
15
core/models/credential.go
Normal file
15
core/models/credential.go
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
package models
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
)
|
||||
|
||||
type Credential struct {
|
||||
Model
|
||||
Username string
|
||||
Mode uint `gorm:"default:1"`
|
||||
Password sql.NullString
|
||||
Key sql.NullString
|
||||
Devices []Device
|
||||
Proxies []Proxy
|
||||
}
|
||||
20
core/models/device.go
Normal file
20
core/models/device.go
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
package models
|
||||
|
||||
import (
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
type Device struct {
|
||||
Model
|
||||
Name string
|
||||
Description string
|
||||
Address string
|
||||
Port uint
|
||||
Platform string
|
||||
GroupID uuid.UUID
|
||||
Group Group
|
||||
CredentialID uuid.UUID
|
||||
Credential Credential
|
||||
ProxyID *uuid.UUID
|
||||
Proxy *Proxy
|
||||
}
|
||||
7
core/models/group.go
Normal file
7
core/models/group.go
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
package models
|
||||
|
||||
type Group struct {
|
||||
Model
|
||||
Name string
|
||||
Devices []Device
|
||||
}
|
||||
18
core/models/proxy.go
Normal file
18
core/models/proxy.go
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
package models
|
||||
|
||||
import (
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
type Proxy struct {
|
||||
Model
|
||||
Name string
|
||||
Address string
|
||||
CredentialID uuid.UUID
|
||||
Credential Credential
|
||||
Devices []Device
|
||||
}
|
||||
|
||||
func (Proxy) TableName() string {
|
||||
return "proxies"
|
||||
}
|
||||
44
core/models/settings/logging.go
Normal file
44
core/models/settings/logging.go
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
package settings
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
|
||||
"code.cloudfoundry.org/bytefmt"
|
||||
"github.com/thatmattlove/hyperglass/core/system"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
const (
|
||||
LOG_FORMAT_JSON string = "json"
|
||||
LOG_FORMAT_TEXT string = "text"
|
||||
)
|
||||
|
||||
type Logging struct {
|
||||
gorm.Model
|
||||
Directory string
|
||||
Format string
|
||||
MaxSize uint64
|
||||
EnableSyslog sql.NullBool `gorm:"default:false"`
|
||||
SyslogHost *string
|
||||
SyslogPort *int
|
||||
EnableHTTP sql.NullBool `gorm:"default:false"`
|
||||
HTTPHost *string
|
||||
SettingsID uint
|
||||
}
|
||||
|
||||
func SeedLogging() (logging Logging, err error) {
|
||||
loggingDir, err := system.GetLogDir()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defaultLogSize, err := bytefmt.ToBytes("50MB")
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
logging = Logging{
|
||||
Directory: loggingDir,
|
||||
Format: LOG_FORMAT_TEXT,
|
||||
MaxSize: defaultLogSize,
|
||||
}
|
||||
return
|
||||
}
|
||||
49
core/models/settings/messages.go
Normal file
49
core/models/settings/messages.go
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
package settings
|
||||
|
||||
import "gorm.io/gorm"
|
||||
|
||||
type Message struct {
|
||||
gorm.Model
|
||||
Name string
|
||||
Value string
|
||||
SettingsID uint
|
||||
}
|
||||
|
||||
const (
|
||||
MESSAGE_MissingField string = "MissingField"
|
||||
MESSAGE_TargetNotAllowed string = "TargetNotAllowed"
|
||||
MESSAGE_FeatureNotEnabled string = "FeatureNotEnabled"
|
||||
MESSAGE_InvalidInput string = "InvalidInput"
|
||||
MESSAGE_InvalidField string = "InvalidField"
|
||||
MESSAGE_UnknownError string = "UnknownError"
|
||||
MESSAGE_RequestTimeout string = "RequestTimeout"
|
||||
MESSAGE_ConnectionError string = "ConnectionError"
|
||||
MESSAGE_AuthenticationError string = "AuthenticationError"
|
||||
MESSAGE_ResponseParsingFailure string = "ResponseParsingFailure"
|
||||
MESSAGE_EmptyResponse string = "EmptyResponse"
|
||||
)
|
||||
|
||||
var DefaultMessages map[string]string = map[string]string{
|
||||
MESSAGE_MissingField: "{.Value} must be specified.",
|
||||
MESSAGE_TargetNotAllowed: "{.Value} is not allowed.",
|
||||
MESSAGE_FeatureNotEnabled: "{.Value} is not enabled.",
|
||||
MESSAGE_InvalidInput: "{.Value} is invalid.",
|
||||
MESSAGE_InvalidField: "{.Value} is an invalid {.Type}",
|
||||
MESSAGE_UnknownError: "Something went wrong.",
|
||||
MESSAGE_RequestTimeout: "Request timed out.",
|
||||
MESSAGE_ConnectionError: "Error connecting to {.Value}: {.Error}",
|
||||
MESSAGE_AuthenticationError: "Error authenticating to {.Value}: {.Error}",
|
||||
MESSAGE_ResponseParsingFailure: "Error reading response.",
|
||||
MESSAGE_EmptyResponse: "The query completed, but no results were found.",
|
||||
}
|
||||
|
||||
func SeedMessages() (messages []Message) {
|
||||
for n, v := range DefaultMessages {
|
||||
msg := Message{
|
||||
Name: n,
|
||||
Value: v,
|
||||
}
|
||||
messages = append(messages, msg)
|
||||
}
|
||||
return
|
||||
}
|
||||
25
core/models/settings/settings.go
Normal file
25
core/models/settings/settings.go
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
package settings
|
||||
|
||||
import "gorm.io/gorm"
|
||||
|
||||
type Settings struct {
|
||||
gorm.Model
|
||||
RequestTimeout int `gorm:"default:90"`
|
||||
OrganizationName string `gorm:"default:Beloved Hyperglass User"`
|
||||
UITitle string `gorm:"default:hyperglass"`
|
||||
UIDescription string `gorm:"Network Looking Glass"`
|
||||
Messages []Message
|
||||
Logging Logging
|
||||
}
|
||||
|
||||
func Seed() (settings *Settings, err error) {
|
||||
logging, err := SeedLogging()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
settings = &Settings{
|
||||
Messages: SeedMessages(),
|
||||
Logging: logging,
|
||||
}
|
||||
return
|
||||
}
|
||||
77
core/system/dirs.go
Normal file
77
core/system/dirs.go
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
package system
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path"
|
||||
"runtime"
|
||||
)
|
||||
|
||||
func GetLogDir() (loggingDir string, err error) {
|
||||
loggingDir = "/var/log/hyperglass"
|
||||
switch runtime.GOOS {
|
||||
case "windows":
|
||||
var userDir string
|
||||
userDir, err = os.UserHomeDir()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
loggingDir = path.Join(userDir, "AppData", "Local", "hyperglass", "logs")
|
||||
case "darwin":
|
||||
var userDir string
|
||||
userDir, err = os.UserHomeDir()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
loggingDir = path.Join(userDir, "Library", "Logs", "hyperglass")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func GetAppDir() (appDir string, err error) {
|
||||
appDir = "/etc/hyperglass"
|
||||
configDir, err := os.UserConfigDir()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
switch runtime.GOOS {
|
||||
case "windows":
|
||||
appDir = path.Join(configDir, "hyperglass")
|
||||
case "darwin":
|
||||
appDir = path.Join(configDir, "hyperglass")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func InitializeDirs() (err error) {
|
||||
logDir, err := GetLogDir()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
appDir, err := GetAppDir()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
_, err = os.Stat(logDir)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
err = os.Mkdir(logDir, 0755)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
_, err = os.Stat(appDir)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
err = os.Mkdir(appDir, 0755)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
52
go.mod
Normal file
52
go.mod
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
module github.com/thatmattlove/hyperglass
|
||||
|
||||
go 1.20
|
||||
|
||||
require (
|
||||
code.cloudfoundry.org/bytefmt v0.0.0-20230612151507-41ef4d1f67a4
|
||||
github.com/go-playground/validator/v10 v10.14.1
|
||||
github.com/gofiber/fiber/v2 v2.48.0
|
||||
github.com/google/uuid v1.3.0
|
||||
github.com/k0kubun/pp/v3 v3.2.0
|
||||
github.com/spf13/cobra v1.7.0
|
||||
github.com/stretchr/testify v1.8.3
|
||||
github.com/twitchtv/twirp v8.1.3+incompatible
|
||||
go.etcd.io/bbolt v1.3.7
|
||||
google.golang.org/protobuf v1.26.0
|
||||
gorm.io/driver/sqlite v1.5.2
|
||||
gorm.io/gorm v1.25.2
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/andybalholm/brotli v1.0.5 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.2 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||
github.com/jinzhu/now v1.1.5 // indirect
|
||||
github.com/klauspost/compress v1.16.3 // indirect
|
||||
github.com/kr/fs v0.1.0 // indirect
|
||||
github.com/kr/pretty v0.3.1 // indirect
|
||||
github.com/leodido/go-urn v1.2.4 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-isatty v0.0.19 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.14 // indirect
|
||||
github.com/mattn/go-sqlite3 v1.14.17 // indirect
|
||||
github.com/melbahja/goph v1.3.1 // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/pkg/sftp v1.13.5 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/rivo/uniseg v0.2.0 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
||||
github.com/valyala/fasthttp v1.48.0 // indirect
|
||||
github.com/valyala/tcplisten v1.0.0 // indirect
|
||||
golang.org/x/crypto v0.7.0 // indirect
|
||||
golang.org/x/net v0.10.0 // indirect
|
||||
golang.org/x/sys v0.10.0 // indirect
|
||||
golang.org/x/text v0.9.0 // indirect
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
152
go.sum
Normal file
152
go.sum
Normal file
|
|
@ -0,0 +1,152 @@
|
|||
code.cloudfoundry.org/bytefmt v0.0.0-20230612151507-41ef4d1f67a4 h1:9G5F8zgma5v0GdDvNz6iZwwJp3RS/z0SY/aHGfVwvTo=
|
||||
code.cloudfoundry.org/bytefmt v0.0.0-20230612151507-41ef4d1f67a4/go.mod h1:wYHCXH/gI19ujoFVuMkY48qPpPCoHLKBKXPkn67h/Yc=
|
||||
github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/cCs=
|
||||
github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU=
|
||||
github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA=
|
||||
github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0=
|
||||
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
||||
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
||||
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
||||
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||
github.com/go-playground/validator/v10 v10.14.1 h1:9c50NUPC30zyuKprjL3vNZ0m5oG+jU0zvx4AqHGnv4k=
|
||||
github.com/go-playground/validator/v10 v10.14.1/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
|
||||
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI=
|
||||
github.com/gofiber/fiber/v2 v2.48.0 h1:cRVMCb9aUJDsyHxGFLwz/sGzDggdailZZyptU9F9cU0=
|
||||
github.com/gofiber/fiber/v2 v2.48.0/go.mod h1:xqJgfqrc23FJuqGOW6DVgi3HyZEm2Mn9pRqUb2kHSX8=
|
||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
||||
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38 h1:yAJXTCF9TqKcTiHJAE8dj7HMvPfh66eeA2JYW7eFpSE=
|
||||
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
|
||||
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
|
||||
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
|
||||
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
|
||||
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||
github.com/k0kubun/pp/v3 v3.2.0 h1:h33hNTZ9nVFNP3u2Fsgz8JXiF5JINoZfFq4SvKJwNcs=
|
||||
github.com/k0kubun/pp/v3 v3.2.0/go.mod h1:ODtJQbQcIRfAD3N+theGCV1m/CBxweERz2dapdz1EwA=
|
||||
github.com/klauspost/compress v1.16.3 h1:XuJt9zzcnaz6a16/OU53ZjWp/v7/42WcR5t2a0PcNQY=
|
||||
github.com/klauspost/compress v1.16.3/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
||||
github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8=
|
||||
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
|
||||
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
|
||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA=
|
||||
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-runewidth v0.0.14 h1:+xnbZSEeDbOIg5/mE6JF0w6n9duR1l3/WmbinWVwUuU=
|
||||
github.com/mattn/go-runewidth v0.0.14/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||
github.com/mattn/go-sqlite3 v1.14.17 h1:mCRHCLDUBXgpKAqIKsaAaAsrAlbkeomtRFKXh2L6YIM=
|
||||
github.com/mattn/go-sqlite3 v1.14.17/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
|
||||
github.com/melbahja/goph v1.3.1 h1:FxFevAwCCpLkM4WBmnVVxcJBcBz6lKQpsN5biV2hA6w=
|
||||
github.com/melbahja/goph v1.3.1/go.mod h1:uG+VfK2Dlhk+O32zFrRlc3kYKTlV6+BtvPWd/kK7U68=
|
||||
github.com/onsi/ginkgo/v2 v2.9.2 h1:BA2GMJOtfGAfagzYtrAlufIP0lq6QERkFmHLMLPwFSU=
|
||||
github.com/onsi/gomega v1.27.4 h1:Z2AnStgsdSayCMDiCU42qIz+HLqEPcgiOCXjAU/w+8E=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/sftp v1.13.5 h1:a3RLUqkyjYRtBTZJZ1VRrKbN3zhuPLlUc3sphVz81go=
|
||||
github.com/pkg/sftp v1.13.5/go.mod h1:wHDZ0IZX6JcBYRK1TH9bcVq8G7TLpVHYIGJRFnmPfxg=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
|
||||
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I=
|
||||
github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0=
|
||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.8.3 h1:RP3t2pwF7cMEbC1dqtB6poj3niw/9gnV4Cjg5oW5gtY=
|
||||
github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/twitchtv/twirp v8.1.3+incompatible h1:+F4TdErPgSUbMZMwp13Q/KgDVuI7HJXP61mNV3/7iuU=
|
||||
github.com/twitchtv/twirp v8.1.3+incompatible/go.mod h1:RRJoFSAmTEh2weEqWtpPE3vFK5YBhA6bqp2l1kfCC5A=
|
||||
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||
github.com/valyala/fasthttp v1.48.0 h1:oJWvHb9BIZToTQS3MuQ2R3bJZiNSa2KiNdeI8A+79Tc=
|
||||
github.com/valyala/fasthttp v1.48.0/go.mod h1:k2zXd82h/7UZc3VOdJ2WaUqt1uZ/XpXAfE9i+HBC3lA=
|
||||
github.com/valyala/tcplisten v1.0.0 h1:rBHj/Xf+E1tRGZyWIWwJDiRY0zc1Js+CV5DqwacVSA8=
|
||||
github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
go.etcd.io/bbolt v1.3.7 h1:j+zJOnnEjF/kyHlDDgGnVL/AIqIJPq8UoB2GSNfkUfQ=
|
||||
go.etcd.io/bbolt v1.3.7/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58=
|
||||
golang.org/x/crypto v0.7.0 h1:AvwMYaRytfdeVt3u6mLaxYtErKYjxA2OXjJ1HHq6t3A=
|
||||
golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M=
|
||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA=
|
||||
golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.7.0 h1:W4OVu8VVOaIO0yzWMNdepAulS7YfoS3Zabrm8DOXXU4=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk=
|
||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gorm.io/driver/sqlite v1.5.2 h1:TpQ+/dqCY4uCigCFyrfnrJnrW9zjpelWVoEVNy5qJkc=
|
||||
gorm.io/driver/sqlite v1.5.2/go.mod h1:qxAuCol+2r6PannQDpOP1FP6ag3mKi4esLnB/jHed+4=
|
||||
gorm.io/gorm v1.25.2 h1:gs1o6Vsa+oVKG/a9ElL3XgyGfghFfkKA2SInQaCyMho=
|
||||
gorm.io/gorm v1.25.2/go.mod h1:L4uxeKpfBml98NYqVqwAdmV1a2nBtAec/cf3fpucW/k=
|
||||
24
hooks.sh
24
hooks.sh
|
|
@ -1,24 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
function isort_all () {
|
||||
isort -y hyperglass/*.py
|
||||
if [[ ! $? == 0 ]]; then
|
||||
exit 1
|
||||
fi
|
||||
isort -y hyperglass/**/*.py
|
||||
if [[ ! $? == 0 ]]; then
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function validate_examples () {
|
||||
python3 ./validate_examples.py
|
||||
if [[ ! $? == 0 ]]; then
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# isort_all
|
||||
validate_examples
|
||||
|
||||
exit 0
|
||||
10
hyperglass/.gitignore
vendored
10
hyperglass/.gitignore
vendored
|
|
@ -1,10 +0,0 @@
|
|||
.DS_Store
|
||||
.sass-cache/
|
||||
.flask_cache
|
||||
.flask_cache/*
|
||||
gunicorn_config.py
|
||||
gunicorn_dev_config.py
|
||||
test.py
|
||||
__pycache__/
|
||||
*_old
|
||||
certs/
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
"""hyperglass is a modern, customizable network looking glass written in Python 3.
|
||||
|
||||
https://github.com/thatmattlove/hyperglass
|
||||
|
||||
The Clear BSD License
|
||||
|
||||
Copyright (c) 2021 Matthew Love
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted (subject to the limitations in the disclaimer
|
||||
below) provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from this
|
||||
software without specific prior written permission.
|
||||
|
||||
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY
|
||||
THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
|
||||
CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
|
||||
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
||||
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
|
||||
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
|
||||
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGE.
|
||||
"""
|
||||
|
||||
# Third Party
|
||||
import uvloop
|
||||
|
||||
# Project
|
||||
from hyperglass.util import set_app_path
|
||||
from hyperglass.constants import METADATA
|
||||
|
||||
# Find hyperglass application directory.
|
||||
set_app_path()
|
||||
|
||||
# Use Uvloop for performance.
|
||||
uvloop.install()
|
||||
|
||||
__name__, __version__, __author__, __copyright__, __license__ = METADATA
|
||||
|
|
@ -1,271 +0,0 @@
|
|||
"""hyperglass REST API & Web UI."""
|
||||
|
||||
# Standard Library
|
||||
import sys
|
||||
from typing import List
|
||||
from pathlib import Path
|
||||
|
||||
# Third Party
|
||||
from fastapi import FastAPI
|
||||
from fastapi.exceptions import ValidationError, RequestValidationError
|
||||
from starlette.responses import JSONResponse
|
||||
from starlette.exceptions import HTTPException as StarletteHTTPException
|
||||
from fastapi.openapi.utils import get_openapi
|
||||
from starlette.staticfiles import StaticFiles
|
||||
from starlette.middleware.cors import CORSMiddleware
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
from hyperglass.util import cpu_count
|
||||
from hyperglass.constants import TRANSPORT_REST, __version__
|
||||
from hyperglass.api.events import on_startup, on_shutdown
|
||||
from hyperglass.api.routes import (
|
||||
docs,
|
||||
info,
|
||||
query,
|
||||
queries,
|
||||
routers,
|
||||
communities,
|
||||
import_certificate,
|
||||
)
|
||||
from hyperglass.exceptions import HyperglassError
|
||||
from hyperglass.configuration import URL_DEV, STATIC_PATH, params, devices
|
||||
from hyperglass.api.error_handlers import (
|
||||
app_handler,
|
||||
http_handler,
|
||||
default_handler,
|
||||
validation_handler,
|
||||
)
|
||||
from hyperglass.models.api.response import (
|
||||
QueryError,
|
||||
InfoResponse,
|
||||
QueryResponse,
|
||||
RoutersResponse,
|
||||
CommunityResponse,
|
||||
SupportedQueryResponse,
|
||||
)
|
||||
|
||||
WORKING_DIR = Path(__file__).parent
|
||||
EXAMPLES_DIR = WORKING_DIR / "examples"
|
||||
|
||||
UI_DIR = STATIC_PATH / "ui"
|
||||
CUSTOM_DIR = STATIC_PATH / "custom"
|
||||
IMAGES_DIR = STATIC_PATH / "images"
|
||||
|
||||
EXAMPLE_DEVICES_PY = EXAMPLES_DIR / "devices.py"
|
||||
EXAMPLE_QUERIES_PY = EXAMPLES_DIR / "queries.py"
|
||||
EXAMPLE_QUERY_PY = EXAMPLES_DIR / "query.py"
|
||||
EXAMPLE_DEVICES_CURL = EXAMPLES_DIR / "devices.sh"
|
||||
EXAMPLE_QUERIES_CURL = EXAMPLES_DIR / "queries.sh"
|
||||
EXAMPLE_QUERY_CURL = EXAMPLES_DIR / "query.sh"
|
||||
|
||||
ASGI_PARAMS = {
|
||||
"host": str(params.listen_address),
|
||||
"port": params.listen_port,
|
||||
"debug": params.debug,
|
||||
"workers": cpu_count(2),
|
||||
}
|
||||
DOCS_PARAMS = {}
|
||||
if params.docs.enable:
|
||||
DOCS_PARAMS.update({"openapi_url": params.docs.openapi_uri})
|
||||
if params.docs.mode == "redoc":
|
||||
DOCS_PARAMS.update({"docs_url": None, "redoc_url": params.docs.uri})
|
||||
elif params.docs.mode == "swagger":
|
||||
DOCS_PARAMS.update({"docs_url": params.docs.uri, "redoc_url": None})
|
||||
|
||||
for directory in (UI_DIR, IMAGES_DIR):
|
||||
if not directory.exists():
|
||||
log.warning("Directory '{d}' does not exist, creating...", d=str(directory))
|
||||
directory.mkdir()
|
||||
|
||||
# Main App Definition
|
||||
app = FastAPI(
|
||||
debug=params.debug,
|
||||
title=params.site_title,
|
||||
description=params.site_description,
|
||||
version=__version__,
|
||||
default_response_class=JSONResponse,
|
||||
**DOCS_PARAMS,
|
||||
)
|
||||
|
||||
# Add Event Handlers
|
||||
for startup in on_startup:
|
||||
app.add_event_handler("startup", startup)
|
||||
|
||||
for shutdown in on_shutdown:
|
||||
app.add_event_handler("shutdown", shutdown)
|
||||
|
||||
# HTTP Error Handler
|
||||
app.add_exception_handler(StarletteHTTPException, http_handler)
|
||||
|
||||
# Backend Application Error Handler
|
||||
app.add_exception_handler(HyperglassError, app_handler)
|
||||
|
||||
# Request Validation Error Handler
|
||||
app.add_exception_handler(RequestValidationError, validation_handler)
|
||||
|
||||
# App Validation Error Handler
|
||||
app.add_exception_handler(ValidationError, validation_handler)
|
||||
|
||||
# Uncaught Error Handler
|
||||
app.add_exception_handler(Exception, default_handler)
|
||||
|
||||
|
||||
def _custom_openapi():
|
||||
"""Generate custom OpenAPI config."""
|
||||
openapi_schema = get_openapi(
|
||||
title=params.docs.title.format(site_title=params.site_title),
|
||||
version=__version__,
|
||||
description=params.docs.description,
|
||||
routes=app.routes,
|
||||
)
|
||||
openapi_schema["info"]["x-logo"] = {
|
||||
"url": "/images/light" + params.web.logo.light.suffix
|
||||
}
|
||||
|
||||
query_samples = []
|
||||
queries_samples = []
|
||||
devices_samples = []
|
||||
|
||||
with EXAMPLE_QUERY_CURL.open("r") as e:
|
||||
example = e.read()
|
||||
query_samples.append(
|
||||
{"lang": "cURL", "source": example % str(params.docs.base_url)}
|
||||
)
|
||||
|
||||
with EXAMPLE_QUERY_PY.open("r") as e:
|
||||
example = e.read()
|
||||
query_samples.append(
|
||||
{"lang": "Python", "source": example % str(params.docs.base_url)}
|
||||
)
|
||||
|
||||
with EXAMPLE_DEVICES_CURL.open("r") as e:
|
||||
example = e.read()
|
||||
queries_samples.append(
|
||||
{"lang": "cURL", "source": example % str(params.docs.base_url)}
|
||||
)
|
||||
with EXAMPLE_DEVICES_PY.open("r") as e:
|
||||
example = e.read()
|
||||
queries_samples.append(
|
||||
{"lang": "Python", "source": example % str(params.docs.base_url)}
|
||||
)
|
||||
|
||||
with EXAMPLE_QUERIES_CURL.open("r") as e:
|
||||
example = e.read()
|
||||
devices_samples.append(
|
||||
{"lang": "cURL", "source": example % str(params.docs.base_url)}
|
||||
)
|
||||
|
||||
with EXAMPLE_QUERIES_PY.open("r") as e:
|
||||
example = e.read()
|
||||
devices_samples.append(
|
||||
{"lang": "Python", "source": example % str(params.docs.base_url)}
|
||||
)
|
||||
|
||||
openapi_schema["paths"]["/api/query/"]["post"]["x-code-samples"] = query_samples
|
||||
openapi_schema["paths"]["/api/devices"]["get"]["x-code-samples"] = devices_samples
|
||||
openapi_schema["paths"]["/api/queries"]["get"]["x-code-samples"] = queries_samples
|
||||
|
||||
app.openapi_schema = openapi_schema
|
||||
return app.openapi_schema
|
||||
|
||||
|
||||
CORS_ORIGINS = params.cors_origins.copy()
|
||||
if params.developer_mode:
|
||||
CORS_ORIGINS.append(URL_DEV)
|
||||
|
||||
# CORS Configuration
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=CORS_ORIGINS,
|
||||
allow_methods=["GET", "POST", "OPTIONS"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
app.add_api_route(
|
||||
path="/api/info",
|
||||
endpoint=info,
|
||||
methods=["GET"],
|
||||
response_model=InfoResponse,
|
||||
response_class=JSONResponse,
|
||||
summary=params.docs.info.summary,
|
||||
description=params.docs.info.description,
|
||||
tags=[params.docs.info.title],
|
||||
)
|
||||
|
||||
app.add_api_route(
|
||||
path="/api/devices",
|
||||
endpoint=routers,
|
||||
methods=["GET"],
|
||||
response_model=List[RoutersResponse],
|
||||
response_class=JSONResponse,
|
||||
summary=params.docs.devices.summary,
|
||||
description=params.docs.devices.description,
|
||||
tags=[params.docs.devices.title],
|
||||
)
|
||||
|
||||
app.add_api_route(
|
||||
path="/api/communities",
|
||||
endpoint=communities,
|
||||
methods=["GET"],
|
||||
response_model=List[CommunityResponse],
|
||||
summary=params.docs.communities.summary,
|
||||
tags=[params.docs.communities.title],
|
||||
)
|
||||
|
||||
app.add_api_route(
|
||||
path="/api/queries",
|
||||
endpoint=queries,
|
||||
methods=["GET"],
|
||||
response_class=JSONResponse,
|
||||
response_model=List[SupportedQueryResponse],
|
||||
summary=params.docs.queries.summary,
|
||||
description=params.docs.queries.description,
|
||||
tags=[params.docs.queries.title],
|
||||
)
|
||||
|
||||
app.add_api_route(
|
||||
path="/api/query/",
|
||||
endpoint=query,
|
||||
methods=["POST"],
|
||||
summary=params.docs.query.summary,
|
||||
description=params.docs.query.description,
|
||||
responses={
|
||||
400: {"model": QueryError, "description": "Request Content Error"},
|
||||
422: {"model": QueryError, "description": "Request Format Error"},
|
||||
500: {"model": QueryError, "description": "Server Error"},
|
||||
},
|
||||
response_model=QueryResponse,
|
||||
tags=[params.docs.query.title],
|
||||
response_class=JSONResponse,
|
||||
)
|
||||
|
||||
# Enable certificate import route only if a device using
|
||||
# hyperglass-agent is defined.
|
||||
if [n for n in devices.all_nos if n in TRANSPORT_REST]:
|
||||
app.add_api_route(
|
||||
path="/api/import-agent-certificate/",
|
||||
endpoint=import_certificate,
|
||||
methods=["POST"],
|
||||
include_in_schema=False,
|
||||
)
|
||||
|
||||
if params.docs.enable:
|
||||
app.add_api_route(path=params.docs.uri, endpoint=docs, include_in_schema=False)
|
||||
app.openapi = _custom_openapi
|
||||
log.debug("API Docs config: {}", app.openapi())
|
||||
|
||||
app.mount("/images", StaticFiles(directory=IMAGES_DIR), name="images")
|
||||
app.mount("/custom", StaticFiles(directory=CUSTOM_DIR), name="custom")
|
||||
app.mount("/", StaticFiles(directory=UI_DIR, html=True), name="ui")
|
||||
|
||||
|
||||
def start(**kwargs):
|
||||
"""Start the web server with Uvicorn ASGI."""
|
||||
# Third Party
|
||||
import uvicorn
|
||||
|
||||
try:
|
||||
uvicorn.run("hyperglass.api:app", **ASGI_PARAMS, **kwargs)
|
||||
except KeyboardInterrupt:
|
||||
sys.exit(0)
|
||||
|
|
@ -1,40 +0,0 @@
|
|||
"""API Error Handlers."""
|
||||
|
||||
# Third Party
|
||||
from starlette.responses import JSONResponse
|
||||
|
||||
# Project
|
||||
from hyperglass.configuration import params
|
||||
|
||||
|
||||
async def default_handler(request, exc):
|
||||
"""Handle uncaught errors."""
|
||||
return JSONResponse(
|
||||
{"output": params.messages.general, "level": "danger", "keywords": []},
|
||||
status_code=500,
|
||||
)
|
||||
|
||||
|
||||
async def http_handler(request, exc):
|
||||
"""Handle web server errors."""
|
||||
return JSONResponse(
|
||||
{"output": exc.detail, "level": "danger", "keywords": []},
|
||||
status_code=exc.status_code,
|
||||
)
|
||||
|
||||
|
||||
async def app_handler(request, exc):
|
||||
"""Handle application errors."""
|
||||
return JSONResponse(
|
||||
{"output": exc.message, "level": exc.level, "keywords": exc.keywords},
|
||||
status_code=exc.status_code,
|
||||
)
|
||||
|
||||
|
||||
async def validation_handler(request, exc):
|
||||
"""Handle Pydantic validation errors raised by FastAPI."""
|
||||
error = exc.errors()[0]
|
||||
return JSONResponse(
|
||||
{"output": error["msg"], "level": "error", "keywords": error["loc"]},
|
||||
status_code=422,
|
||||
)
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
"""API Events."""
|
||||
|
||||
# Project
|
||||
from hyperglass.cache import AsyncCache
|
||||
from hyperglass.configuration import REDIS_CONFIG, params
|
||||
|
||||
|
||||
async def check_redis() -> bool:
|
||||
"""Ensure Redis is running before starting server."""
|
||||
cache = AsyncCache(db=params.cache.database, **REDIS_CONFIG)
|
||||
await cache.test()
|
||||
return True
|
||||
|
||||
|
||||
on_startup = (check_redis,)
|
||||
on_shutdown = ()
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
# Third Party
|
||||
import httpx
|
||||
|
||||
request = httpx.get("%s/api/devices")
|
||||
|
||||
print(request.json())
|
||||
|
|
@ -1 +0,0 @@
|
|||
curl %s/api/devices
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
# Third Party
|
||||
import httpx
|
||||
|
||||
request = httpx.get("%s/api/queries")
|
||||
|
||||
print(request.json())
|
||||
|
|
@ -1 +0,0 @@
|
|||
curl %s/api/queries
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
# Third Party
|
||||
import httpx
|
||||
|
||||
query = {
|
||||
"query_location": "router01",
|
||||
"query_type": "bgp_route",
|
||||
"query_vrf": "default",
|
||||
"query_target": "1.1.1.0/24",
|
||||
}
|
||||
|
||||
request = httpx.post("%s/api/query/", data=query)
|
||||
|
||||
print(request.json().get("output"))
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
curl -X POST %s/api/query/ -d \
|
||||
'{
|
||||
"query_location": "router01",
|
||||
"query_type": "bgp_route",
|
||||
"query_vrf": "default",
|
||||
"query_target": "1.1.1.0/24"
|
||||
}'
|
||||
|
|
@ -1,174 +0,0 @@
|
|||
"""Return fake, static data for development purposes."""
|
||||
|
||||
# Standard Library
|
||||
from typing import Dict, Union
|
||||
|
||||
PLAIN = r"""
|
||||
BGP routing table entry for 4.0.0.0/9, version 1017877672
|
||||
BGP Bestpath: deterministic-med
|
||||
Paths: (10 available, best #9, table default)
|
||||
Advertised to update-groups:
|
||||
50
|
||||
1299 3356, (aggregated by 3356 4.69.130.24)
|
||||
216.250.230.1 (metric 2000) from 216.250.230.1 (216.250.230.1)
|
||||
Origin IGP, metric 0, localpref 100, weight 100, valid, internal, atomic-aggregate
|
||||
Community: 1299:25000 14525:0 14525:40 14525:601 14525:1021 14525:2840 14525:3003 14525:4002 14525:9003
|
||||
1299 3356, (aggregated by 3356 4.69.130.24), (received-only)
|
||||
216.250.230.1 (metric 2000) from 216.250.230.1 (216.250.230.1)
|
||||
Origin IGP, metric 0, localpref 150, valid, internal, atomic-aggregate
|
||||
Community: 1299:25000 14525:0 14525:40 14525:601 14525:1021 14525:2840 14525:3003 14525:4002 14525:9003
|
||||
1299 3356, (aggregated by 3356 4.69.130.184)
|
||||
199.34.92.9 (metric 1000) from 199.34.92.9 (199.34.92.9)
|
||||
Origin IGP, metric 0, localpref 100, weight 100, valid, internal, atomic-aggregate
|
||||
Community: 1299:25000 14525:0 14525:40 14525:601 14525:1021 14525:2840 14525:3001 14525:4001 14525:9003
|
||||
1299 3356, (aggregated by 3356 4.69.130.184), (received-only)
|
||||
199.34.92.9 (metric 1000) from 199.34.92.9 (199.34.92.9)
|
||||
Origin IGP, metric 0, localpref 150, valid, internal, atomic-aggregate
|
||||
Community: 1299:25000 14525:0 14525:40 14525:601 14525:1021 14525:2840 14525:3001 14525:4001 14525:9003
|
||||
174 3356, (aggregated by 3356 4.69.130.4)
|
||||
199.34.92.10 (metric 1000) from 199.34.92.10 (199.34.92.10)
|
||||
Origin IGP, metric 0, localpref 100, weight 100, valid, internal, atomic-aggregate
|
||||
Community: 174:21000 174:22013 14525:0 14525:40 14525:601 14525:1021 14525:2840 14525:3001 14525:4001 14525:9001
|
||||
174 3356, (aggregated by 3356 4.69.130.4), (received-only)
|
||||
199.34.92.10 (metric 1000) from 199.34.92.10 (199.34.92.10)
|
||||
Origin IGP, metric 0, localpref 150, valid, internal, atomic-aggregate
|
||||
Community: 174:21000 174:22013 14525:0 14525:40 14525:601 14525:1021 14525:2840 14525:3001 14525:4001 14525:9001
|
||||
209 3356, (aggregated by 3356 4.69.130.2)
|
||||
199.34.92.5 (metric 101) from 199.34.92.5 (199.34.92.5)
|
||||
Origin IGP, metric 8006570, localpref 150, weight 200, valid, internal, atomic-aggregate
|
||||
Community: 209:88 209:888 3356:0 3356:3 3356:100 3356:123 3356:575 3356:2011 14525:0 14525:40 14525:1021 14525:2840 14525:3002 14525:4003 14525:9005
|
||||
209 3356, (aggregated by 3356 4.69.130.2), (received-only)
|
||||
199.34.92.5 (metric 101) from 199.34.92.5 (199.34.92.5)
|
||||
Origin IGP, metric 8006570, localpref 150, valid, internal, atomic-aggregate
|
||||
Community: 209:88 209:888 3356:0 3356:3 3356:100 3356:123 3356:575 3356:2011 14525:0 14525:40 14525:1021 14525:2840 14525:3002 14525:4003 14525:9005
|
||||
6939 3356, (aggregated by 3356 4.69.130.4)
|
||||
184.105.247.177 from 184.105.247.177 (216.218.252.234)
|
||||
Origin IGP, localpref 150, weight 200, valid, external, atomic-aggregate, best
|
||||
Community: 6939:7016 6939:8840 6939:9001 14525:0 14525:40 14525:1021 14525:2840 14525:3002 14525:4003 14525:9002
|
||||
6939 3356, (aggregated by 3356 4.69.130.4), (received-only)
|
||||
184.105.247.177 from 184.105.247.177 (216.218.252.234)
|
||||
Origin IGP, localpref 100, valid, external, atomic-aggregate
|
||||
Community: 6939:7016 6939:8840 6939:9001
|
||||
""" # noqa: W291,E501
|
||||
|
||||
ROUTES = [
|
||||
{
|
||||
"prefix": "1.1.1.0/24",
|
||||
"active": True,
|
||||
"age": 1025337,
|
||||
"weight": 170,
|
||||
"med": 0,
|
||||
"local_preference": 175,
|
||||
"as_path": [1299, 13335],
|
||||
"communities": [
|
||||
"1299:35000",
|
||||
"14525:0",
|
||||
"14525:41",
|
||||
"14525:600",
|
||||
"14525:1021",
|
||||
"14525:2840",
|
||||
"14525:3001",
|
||||
"14525:4001",
|
||||
"14525:9003",
|
||||
],
|
||||
"next_hop": "62.115.189.136",
|
||||
"source_as": 13335,
|
||||
"source_rid": "141.101.72.1",
|
||||
"peer_rid": "2.255.254.43",
|
||||
"rpki_state": 1,
|
||||
},
|
||||
{
|
||||
"prefix": "1.1.1.0/24",
|
||||
"active": False,
|
||||
"age": 1584622,
|
||||
"weight": 200,
|
||||
"med": 0,
|
||||
"local_preference": 250,
|
||||
"as_path": [13335],
|
||||
"communities": [
|
||||
"14525:0",
|
||||
"14525:20",
|
||||
"14525:600",
|
||||
"14525:1021",
|
||||
"14525:2840",
|
||||
"14525:3002",
|
||||
"14525:4003",
|
||||
"14525:9009",
|
||||
],
|
||||
"next_hop": "",
|
||||
"source_as": 13335,
|
||||
"source_rid": "172.68.129.1",
|
||||
"peer_rid": "199.34.92.5",
|
||||
"rpki_state": 3,
|
||||
},
|
||||
{
|
||||
"prefix": "1.1.1.0/24",
|
||||
"active": False,
|
||||
"age": 982517,
|
||||
"weight": 200,
|
||||
"med": 0,
|
||||
"local_preference": 250,
|
||||
"as_path": [13335],
|
||||
"communities": [
|
||||
"14525:0",
|
||||
"14525:20",
|
||||
"14525:600",
|
||||
"14525:1021",
|
||||
"14525:2840",
|
||||
"14525:3002",
|
||||
"14525:4003",
|
||||
"14525:9009",
|
||||
],
|
||||
"next_hop": "",
|
||||
"source_as": 13335,
|
||||
"source_rid": "172.68.129.1",
|
||||
"peer_rid": "199.34.92.6",
|
||||
"rpki_state": 3,
|
||||
},
|
||||
{
|
||||
"prefix": "1.1.1.0/24",
|
||||
"active": False,
|
||||
"age": 1000101,
|
||||
"weight": 200,
|
||||
"med": 0,
|
||||
"local_preference": 250,
|
||||
"as_path": [13335],
|
||||
"communities": [
|
||||
"13335:10014",
|
||||
"13335:19000",
|
||||
"13335:20050",
|
||||
"13335:20500",
|
||||
"13335:20530",
|
||||
"14525:0",
|
||||
"14525:20",
|
||||
"14525:600",
|
||||
"14525:1021",
|
||||
"14525:2840",
|
||||
"14525:3003",
|
||||
"14525:4002",
|
||||
"14525:9009",
|
||||
],
|
||||
"next_hop": "",
|
||||
"source_as": 13335,
|
||||
"source_rid": "141.101.73.1",
|
||||
"peer_rid": "216.250.230.2",
|
||||
"rpki_state": 3,
|
||||
},
|
||||
]
|
||||
|
||||
STRUCTURED = {
|
||||
"vrf": "default",
|
||||
"count": len(ROUTES),
|
||||
"routes": ROUTES,
|
||||
"winning_weight": "high",
|
||||
}
|
||||
|
||||
|
||||
async def fake_output(structured: bool) -> Union[str, Dict]:
|
||||
"""Bypass the standard execution process and return static, fake output."""
|
||||
output = PLAIN
|
||||
|
||||
if structured:
|
||||
output = STRUCTURED
|
||||
|
||||
return output
|
||||
|
|
@ -1,266 +0,0 @@
|
|||
"""API Routes."""
|
||||
|
||||
# Standard Library
|
||||
import os
|
||||
import json
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
# Third Party
|
||||
from fastapi import HTTPException, BackgroundTasks
|
||||
from starlette.requests import Request
|
||||
from fastapi.openapi.docs import get_redoc_html, get_swagger_ui_html
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
from hyperglass.cache import AsyncCache
|
||||
from hyperglass.encode import jwt_decode
|
||||
from hyperglass.external import Webhook, bgptools
|
||||
from hyperglass.api.tasks import process_headers, import_public_key
|
||||
from hyperglass.constants import __version__
|
||||
from hyperglass.exceptions import HyperglassError
|
||||
from hyperglass.models.api import Query, EncodedRequest
|
||||
from hyperglass.configuration import REDIS_CONFIG, params, devices
|
||||
from hyperglass.execution.main import execute
|
||||
|
||||
# Local
|
||||
from .fake_output import fake_output
|
||||
|
||||
APP_PATH = os.environ["hyperglass_directory"]
|
||||
|
||||
|
||||
async def send_webhook(query_data: Query, request: Request, timestamp: datetime):
|
||||
"""If webhooks are enabled, get request info and send a webhook.
|
||||
|
||||
Args:
|
||||
query_data (Query): Valid query
|
||||
request (Request): Starlette/FastAPI request
|
||||
|
||||
Returns:
|
||||
int: Returns 1 regardless of result
|
||||
"""
|
||||
try:
|
||||
if params.logging.http is not None:
|
||||
headers = await process_headers(headers=request.headers)
|
||||
|
||||
if headers.get("x-real-ip") is not None:
|
||||
host = headers["x-real-ip"]
|
||||
elif headers.get("x-forwarded-for") is not None:
|
||||
host = headers["x-forwarded-for"]
|
||||
else:
|
||||
host = request.client.host
|
||||
|
||||
network_info = await bgptools.network_info(host)
|
||||
|
||||
async with Webhook(params.logging.http) as hook:
|
||||
|
||||
await hook.send(
|
||||
query={
|
||||
**query_data.export_dict(pretty=True),
|
||||
"headers": headers,
|
||||
"source": host,
|
||||
"network": network_info.get(host, {}),
|
||||
"timestamp": timestamp,
|
||||
}
|
||||
)
|
||||
except Exception as err:
|
||||
log.error(
|
||||
"Error sending webhook to {}: {}", params.logging.http.provider, str(err)
|
||||
)
|
||||
|
||||
|
||||
async def query(query_data: Query, request: Request, background_tasks: BackgroundTasks):
|
||||
"""Ingest request data pass it to the backend application to perform the query."""
|
||||
|
||||
timestamp = datetime.utcnow()
|
||||
background_tasks.add_task(send_webhook, query_data, request, timestamp)
|
||||
|
||||
# Initialize cache
|
||||
cache = AsyncCache(db=params.cache.database, **REDIS_CONFIG)
|
||||
log.debug("Initialized cache {}", repr(cache))
|
||||
|
||||
# Use hashed query_data string as key for for k/v cache store so
|
||||
# each command output value is unique.
|
||||
cache_key = query_data.digest()
|
||||
|
||||
# Define cache entry expiry time
|
||||
cache_timeout = params.cache.timeout
|
||||
|
||||
log.debug("Cache Timeout: {}", cache_timeout)
|
||||
log.info("Starting query execution for query {}", query_data.summary)
|
||||
|
||||
cache_response = await cache.get_dict(cache_key, "output")
|
||||
|
||||
json_output = False
|
||||
|
||||
if query_data.device.structured_output and query_data.query_type in (
|
||||
"bgp_route",
|
||||
"bgp_community",
|
||||
"bgp_aspath",
|
||||
):
|
||||
json_output = True
|
||||
|
||||
cached = False
|
||||
runtime = 65535
|
||||
if cache_response:
|
||||
log.debug("Query {} exists in cache", cache_key)
|
||||
|
||||
# If a cached response exists, reset the expiration time.
|
||||
await cache.expire(cache_key, seconds=cache_timeout)
|
||||
|
||||
cached = True
|
||||
runtime = 0
|
||||
timestamp = await cache.get_dict(cache_key, "timestamp")
|
||||
|
||||
elif not cache_response:
|
||||
log.debug("No existing cache entry for query {}", cache_key)
|
||||
log.debug(
|
||||
"Created new cache key {} entry for query {}", cache_key, query_data.summary
|
||||
)
|
||||
|
||||
timestamp = query_data.timestamp
|
||||
|
||||
starttime = time.time()
|
||||
|
||||
if params.fake_output:
|
||||
# Return fake, static data for development purposes, if enabled.
|
||||
cache_output = await fake_output(json_output)
|
||||
else:
|
||||
# Pass request to execution module
|
||||
cache_output = await execute(query_data)
|
||||
|
||||
endtime = time.time()
|
||||
elapsedtime = round(endtime - starttime, 4)
|
||||
log.debug("Query {} took {} seconds to run.", cache_key, elapsedtime)
|
||||
|
||||
if cache_output is None:
|
||||
raise HyperglassError(message=params.messages.general, alert="danger")
|
||||
|
||||
# Create a cache entry
|
||||
if json_output:
|
||||
raw_output = json.dumps(cache_output)
|
||||
else:
|
||||
raw_output = str(cache_output)
|
||||
await cache.set_dict(cache_key, "output", raw_output)
|
||||
await cache.set_dict(cache_key, "timestamp", timestamp)
|
||||
await cache.expire(cache_key, seconds=cache_timeout)
|
||||
|
||||
log.debug("Added cache entry for query: {}", cache_key)
|
||||
|
||||
runtime = int(round(elapsedtime, 0))
|
||||
|
||||
# If it does, return the cached entry
|
||||
cache_response = await cache.get_dict(cache_key, "output")
|
||||
response_format = "text/plain"
|
||||
|
||||
if json_output:
|
||||
response_format = "application/json"
|
||||
|
||||
log.debug("Cache match for {}:\n{}", cache_key, cache_response)
|
||||
log.success("Completed query execution for query {}", query_data.summary)
|
||||
|
||||
return {
|
||||
"output": cache_response,
|
||||
"id": cache_key,
|
||||
"cached": cached,
|
||||
"runtime": runtime,
|
||||
"timestamp": timestamp,
|
||||
"format": response_format,
|
||||
"random": query_data.random(),
|
||||
"level": "success",
|
||||
"keywords": [],
|
||||
}
|
||||
|
||||
|
||||
async def import_certificate(encoded_request: EncodedRequest):
|
||||
"""Import a certificate from hyperglass-agent."""
|
||||
|
||||
# Try to match the requested device name with configured devices
|
||||
log.debug(
|
||||
"Attempting certificate import for device '{}'", devices[encoded_request.device]
|
||||
)
|
||||
try:
|
||||
matched_device = devices[encoded_request.device]
|
||||
except AttributeError:
|
||||
raise HTTPException(
|
||||
detail=f"Device {str(encoded_request.device)} not found", status_code=404
|
||||
)
|
||||
|
||||
try:
|
||||
# Decode JSON Web Token
|
||||
decoded_request = await jwt_decode(
|
||||
payload=encoded_request.encoded,
|
||||
secret=matched_device.credential.password.get_secret_value(),
|
||||
)
|
||||
except HyperglassError as decode_error:
|
||||
raise HTTPException(detail=str(decode_error), status_code=400)
|
||||
|
||||
try:
|
||||
# Write certificate to file
|
||||
import_public_key(
|
||||
app_path=APP_PATH,
|
||||
device_name=matched_device._id,
|
||||
keystring=decoded_request,
|
||||
)
|
||||
except RuntimeError as err:
|
||||
raise HyperglassError(str(err), level="danger")
|
||||
|
||||
log.info("Added public key for {}", encoded_request.device)
|
||||
return {
|
||||
"output": f"Added public key for {encoded_request.device}",
|
||||
"level": "success",
|
||||
"keywords": [encoded_request.device],
|
||||
}
|
||||
|
||||
|
||||
async def docs():
|
||||
"""Serve custom docs."""
|
||||
if params.docs.enable:
|
||||
docs_func_map = {"swagger": get_swagger_ui_html, "redoc": get_redoc_html}
|
||||
docs_func = docs_func_map[params.docs.mode]
|
||||
return docs_func(
|
||||
openapi_url=params.docs.openapi_url, title=params.site_title + " - API Docs"
|
||||
)
|
||||
else:
|
||||
raise HTTPException(detail="Not found", status_code=404)
|
||||
|
||||
|
||||
async def routers():
|
||||
"""Serve list of configured routers and attributes."""
|
||||
return [
|
||||
d.dict(
|
||||
include={
|
||||
"name": ...,
|
||||
"network": ...,
|
||||
"display_name": ...,
|
||||
"vrfs": {-1: {"name", "display_name"}},
|
||||
}
|
||||
)
|
||||
for d in devices.objects
|
||||
]
|
||||
|
||||
|
||||
async def communities():
|
||||
"""Serve list of configured communities if mode is select."""
|
||||
if params.queries.bgp_community.mode != "select":
|
||||
raise HTTPException(detail="BGP community mode is not select", status_code=404)
|
||||
|
||||
return [c.export_dict() for c in params.queries.bgp_community.communities]
|
||||
|
||||
|
||||
async def queries():
|
||||
"""Serve list of enabled query types."""
|
||||
return params.queries.list
|
||||
|
||||
|
||||
async def info():
|
||||
"""Serve general information about this instance of hyperglass."""
|
||||
return {
|
||||
"name": params.site_title,
|
||||
"organization": params.org_name,
|
||||
"primary_asn": int(params.primary_asn),
|
||||
"version": f"hyperglass {__version__}",
|
||||
}
|
||||
|
||||
|
||||
endpoints = [query, docs, routers, info]
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
"""Tasks to be executed from web API."""
|
||||
|
||||
# Standard Library
|
||||
from typing import Dict, Union
|
||||
from pathlib import Path
|
||||
|
||||
# Third Party
|
||||
from httpx import Headers
|
||||
|
||||
|
||||
def import_public_key(
|
||||
app_path: Union[Path, str], device_name: str, keystring: str
|
||||
) -> bool:
|
||||
"""Import a public key for hyperglass-agent."""
|
||||
if not isinstance(app_path, Path):
|
||||
app_path = Path(app_path)
|
||||
|
||||
cert_dir = app_path / "certs"
|
||||
|
||||
if not cert_dir.exists():
|
||||
cert_dir.mkdir()
|
||||
|
||||
if not cert_dir.exists():
|
||||
raise RuntimeError(f"Failed to create certs directory at {str(cert_dir)}")
|
||||
|
||||
filename = f"{device_name}.pem"
|
||||
cert_file = cert_dir / filename
|
||||
|
||||
with cert_file.open("w+") as file:
|
||||
file.write(str(keystring))
|
||||
|
||||
with cert_file.open("r") as file:
|
||||
read_file = file.read().strip()
|
||||
if not keystring == read_file:
|
||||
raise RuntimeError("Wrote key, but written file did not match input key")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def process_headers(headers: Headers) -> Dict:
|
||||
"""Filter out unwanted headers and return as a dictionary."""
|
||||
headers = dict(headers)
|
||||
header_keys = (
|
||||
"user-agent",
|
||||
"referer",
|
||||
"accept-encoding",
|
||||
"accept-language",
|
||||
"x-real-ip",
|
||||
"x-forwarded-for",
|
||||
)
|
||||
return {k: headers.get(k) for k in header_keys}
|
||||
7
hyperglass/cache/__init__.py
vendored
7
hyperglass/cache/__init__.py
vendored
|
|
@ -1,7 +0,0 @@
|
|||
"""Redis cache handlers."""
|
||||
|
||||
# Project
|
||||
from hyperglass.cache.aio import AsyncCache
|
||||
from hyperglass.cache.sync import SyncCache
|
||||
|
||||
__all__ = ("AsyncCache", "SyncCache")
|
||||
148
hyperglass/cache/aio.py
vendored
148
hyperglass/cache/aio.py
vendored
|
|
@ -1,148 +0,0 @@
|
|||
"""Asyncio Redis cache handler."""
|
||||
|
||||
# Standard Library
|
||||
import json
|
||||
import time
|
||||
import pickle
|
||||
import asyncio
|
||||
from typing import Any, Dict
|
||||
|
||||
# Third Party
|
||||
from aredis import StrictRedis as AsyncRedis
|
||||
from aredis.pubsub import PubSub as AsyncPubSub
|
||||
from aredis.exceptions import RedisError
|
||||
|
||||
# Project
|
||||
from hyperglass.cache.base import BaseCache
|
||||
from hyperglass.exceptions import HyperglassError
|
||||
|
||||
|
||||
class AsyncCache(BaseCache):
|
||||
"""Asynchronous Redis cache handler."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialize Redis connection."""
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
password = self.password
|
||||
if password is not None:
|
||||
password = password.get_secret_value()
|
||||
|
||||
self.instance: AsyncRedis = AsyncRedis(
|
||||
db=self.db,
|
||||
host=self.host,
|
||||
port=self.port,
|
||||
password=password,
|
||||
decode_responses=self.decode_responses,
|
||||
**self.redis_args,
|
||||
)
|
||||
|
||||
async def test(self):
|
||||
"""Send an echo to Redis to ensure it can be reached."""
|
||||
try:
|
||||
await self.instance.echo("hyperglass test")
|
||||
except RedisError as err:
|
||||
err_msg = str(err)
|
||||
if not err_msg and hasattr(err, "__context__"):
|
||||
# Some Redis exceptions are raised without a message
|
||||
# even if they are raised from another exception that
|
||||
# does have a message.
|
||||
err_msg = str(err.__context__)
|
||||
|
||||
if "auth" in err_msg.lower():
|
||||
raise HyperglassError(
|
||||
"Authentication to Redis server {server} failed.".format(
|
||||
server=repr(self)
|
||||
),
|
||||
level="danger",
|
||||
) from None
|
||||
else:
|
||||
raise HyperglassError(
|
||||
"Unable to connect to Redis server {server}".format(
|
||||
server=repr(self)
|
||||
),
|
||||
level="danger",
|
||||
) from None
|
||||
|
||||
async def get(self, *args: str) -> Any:
|
||||
"""Get item(s) from cache."""
|
||||
if len(args) == 1:
|
||||
raw = await self.instance.get(args[0])
|
||||
else:
|
||||
raw = await self.instance.mget(args)
|
||||
return self.parse_types(raw)
|
||||
|
||||
async def get_dict(self, key: str, field: str = "") -> Any:
|
||||
"""Get hash map (dict) item(s)."""
|
||||
if not field:
|
||||
raw = await self.instance.hgetall(key)
|
||||
else:
|
||||
raw = await self.instance.hget(key, field)
|
||||
|
||||
return self.parse_types(raw)
|
||||
|
||||
async def set(self, key: str, value: str) -> bool:
|
||||
"""Set cache values."""
|
||||
return await self.instance.set(key, value)
|
||||
|
||||
async def set_dict(self, key: str, field: str, value: str) -> bool:
|
||||
"""Set hash map (dict) values."""
|
||||
success = False
|
||||
|
||||
if isinstance(value, Dict):
|
||||
value = json.dumps(value)
|
||||
else:
|
||||
value = str(value)
|
||||
|
||||
response = await self.instance.hset(key, field, value)
|
||||
|
||||
if response in (0, 1):
|
||||
success = True
|
||||
|
||||
return success
|
||||
|
||||
async def wait(self, pubsub: AsyncPubSub, timeout: int = 30, **kwargs) -> Any:
|
||||
"""Wait for pub/sub messages & return posted message."""
|
||||
now = time.time()
|
||||
timeout = now + timeout
|
||||
|
||||
while now < timeout:
|
||||
|
||||
message = await pubsub.get_message(ignore_subscribe_messages=True, **kwargs)
|
||||
|
||||
if message is not None and message["type"] == "message":
|
||||
data = message["data"]
|
||||
return self.parse_types(data)
|
||||
|
||||
await asyncio.sleep(0.01)
|
||||
now = time.time()
|
||||
|
||||
return None
|
||||
|
||||
async def pubsub(self) -> AsyncPubSub:
|
||||
"""Provide an aredis.pubsub.Pubsub instance."""
|
||||
return self.instance.pubsub()
|
||||
|
||||
async def pub(self, key: str, value: str) -> None:
|
||||
"""Publish a value."""
|
||||
await asyncio.sleep(1)
|
||||
await self.instance.publish(key, value)
|
||||
|
||||
async def clear(self) -> None:
|
||||
"""Clear the cache."""
|
||||
await self.instance.flushdb()
|
||||
|
||||
async def delete(self, *keys: str) -> None:
|
||||
"""Delete a cache key."""
|
||||
await self.instance.delete(*keys)
|
||||
|
||||
async def expire(self, *keys: str, seconds: int) -> None:
|
||||
"""Set timeout of key in seconds."""
|
||||
for key in keys:
|
||||
await self.instance.expire(key, seconds)
|
||||
|
||||
async def get_config(self) -> Dict:
|
||||
"""Get picked config object from cache."""
|
||||
|
||||
pickled = await self.instance.get("HYPERGLASS_CONFIG")
|
||||
return pickle.loads(pickled)
|
||||
66
hyperglass/cache/base.py
vendored
66
hyperglass/cache/base.py
vendored
|
|
@ -1,66 +0,0 @@
|
|||
"""Base Redis cache handler."""
|
||||
|
||||
# Standard Library
|
||||
import re
|
||||
import json
|
||||
from typing import Any, Optional
|
||||
|
||||
# Third Party
|
||||
from pydantic import SecretStr
|
||||
|
||||
|
||||
class BaseCache:
|
||||
"""Redis cache handler."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
db: int,
|
||||
host: str = "localhost",
|
||||
port: int = 6379,
|
||||
password: Optional[SecretStr] = None,
|
||||
decode_responses: bool = True,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Initialize Redis connection."""
|
||||
self.db: int = db
|
||||
self.host: str = str(host)
|
||||
self.port: int = port
|
||||
self.password: Optional[SecretStr] = password
|
||||
self.decode_responses: bool = decode_responses
|
||||
self.redis_args: dict = kwargs
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Represent class state."""
|
||||
return "HyperglassCache(db={}, host={}, port={}, password={})".format(
|
||||
self.db, self.host, self.port, self.password
|
||||
)
|
||||
|
||||
def parse_types(self, value: str) -> Any:
|
||||
"""Parse a string to standard python types."""
|
||||
|
||||
def parse_string(str_value: str):
|
||||
|
||||
is_float = (re.compile(r"^(\d+\.\d+)$"), float)
|
||||
is_int = (re.compile(r"^(\d+)$"), int)
|
||||
is_bool = (re.compile(r"^(True|true|False|false)$"), bool)
|
||||
is_none = (re.compile(r"^(None|none|null|nil|\(nil\))$"), lambda v: None)
|
||||
is_jsonable = (re.compile(r"^[\{\[].*[\}\]]$"), json.loads)
|
||||
|
||||
for pattern, factory in (is_float, is_int, is_bool, is_none, is_jsonable):
|
||||
if isinstance(str_value, str) and bool(re.match(pattern, str_value)):
|
||||
str_value = factory(str_value)
|
||||
break
|
||||
return str_value
|
||||
|
||||
if isinstance(value, str):
|
||||
value = parse_string(value)
|
||||
elif isinstance(value, bytes):
|
||||
value = parse_string(value.decode("utf-8"))
|
||||
elif isinstance(value, list):
|
||||
value = [parse_string(i) for i in value]
|
||||
elif isinstance(value, tuple):
|
||||
value = tuple(parse_string(i) for i in value)
|
||||
elif isinstance(value, dict):
|
||||
value = {k: self.parse_types(v) for k, v in value.items()}
|
||||
|
||||
return value
|
||||
147
hyperglass/cache/sync.py
vendored
147
hyperglass/cache/sync.py
vendored
|
|
@ -1,147 +0,0 @@
|
|||
"""Non-asyncio Redis cache handler."""
|
||||
|
||||
# Standard Library
|
||||
import json
|
||||
import time
|
||||
import pickle
|
||||
from typing import Any, Dict
|
||||
|
||||
# Third Party
|
||||
from redis import Redis as SyncRedis
|
||||
from redis.client import PubSub as SyncPubsSub
|
||||
from redis.exceptions import RedisError
|
||||
|
||||
# Project
|
||||
from hyperglass.cache.base import BaseCache
|
||||
from hyperglass.exceptions import HyperglassError
|
||||
|
||||
|
||||
class SyncCache(BaseCache):
|
||||
"""Synchronous Redis cache handler."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialize Redis connection."""
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
password = self.password
|
||||
if password is not None:
|
||||
password = password.get_secret_value()
|
||||
|
||||
self.instance: SyncRedis = SyncRedis(
|
||||
db=self.db,
|
||||
host=self.host,
|
||||
port=self.port,
|
||||
password=password,
|
||||
decode_responses=self.decode_responses,
|
||||
**self.redis_args,
|
||||
)
|
||||
|
||||
def test(self):
|
||||
"""Send an echo to Redis to ensure it can be reached."""
|
||||
try:
|
||||
self.instance.echo("hyperglass test")
|
||||
except RedisError as err:
|
||||
err_msg = str(err)
|
||||
if not err_msg and hasattr(err, "__context__"):
|
||||
# Some Redis exceptions are raised without a message
|
||||
# even if they are raised from another exception that
|
||||
# does have a message.
|
||||
err_msg = str(err.__context__)
|
||||
|
||||
if "auth" in err_msg.lower():
|
||||
raise HyperglassError(
|
||||
"Authentication to Redis server {server} failed.".format(
|
||||
server=repr(self)
|
||||
),
|
||||
level="danger",
|
||||
) from None
|
||||
else:
|
||||
raise HyperglassError(
|
||||
"Unable to connect to Redis server {server}".format(
|
||||
server=repr(self)
|
||||
),
|
||||
level="danger",
|
||||
) from None
|
||||
|
||||
def get(self, *args: str) -> Any:
|
||||
"""Get item(s) from cache."""
|
||||
if len(args) == 1:
|
||||
raw = self.instance.get(args[0])
|
||||
else:
|
||||
raw = self.instance.mget(args)
|
||||
return self.parse_types(raw)
|
||||
|
||||
def get_dict(self, key: str, field: str = "") -> Any:
|
||||
"""Get hash map (dict) item(s)."""
|
||||
if not field:
|
||||
raw = self.instance.hgetall(key)
|
||||
else:
|
||||
raw = self.instance.hget(key, str(field))
|
||||
|
||||
return self.parse_types(raw)
|
||||
|
||||
def set(self, key: str, value: str) -> bool:
|
||||
"""Set cache values."""
|
||||
return self.instance.set(key, str(value))
|
||||
|
||||
def set_dict(self, key: str, field: str, value: str) -> bool:
|
||||
"""Set hash map (dict) values."""
|
||||
success = False
|
||||
|
||||
if isinstance(value, Dict):
|
||||
value = json.dumps(value)
|
||||
else:
|
||||
value = str(value)
|
||||
|
||||
response = self.instance.hset(key, str(field), value)
|
||||
|
||||
if response in (0, 1):
|
||||
success = True
|
||||
|
||||
return success
|
||||
|
||||
def wait(self, pubsub: SyncPubsSub, timeout: int = 30, **kwargs) -> Any:
|
||||
"""Wait for pub/sub messages & return posted message."""
|
||||
now = time.time()
|
||||
timeout = now + timeout
|
||||
|
||||
while now < timeout:
|
||||
|
||||
message = pubsub.get_message(ignore_subscribe_messages=True, **kwargs)
|
||||
|
||||
if message is not None and message["type"] == "message":
|
||||
data = message["data"]
|
||||
return self.parse_types(data)
|
||||
|
||||
time.sleep(0.01)
|
||||
now = time.time()
|
||||
|
||||
return None
|
||||
|
||||
def pubsub(self) -> SyncPubsSub:
|
||||
"""Provide a redis.client.Pubsub instance."""
|
||||
return self.instance.pubsub()
|
||||
|
||||
def pub(self, key: str, value: str) -> None:
|
||||
"""Publish a value."""
|
||||
time.sleep(1)
|
||||
self.instance.publish(key, value)
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Clear the cache."""
|
||||
self.instance.flushdb()
|
||||
|
||||
def delete(self, *keys: str) -> None:
|
||||
"""Delete a cache key."""
|
||||
self.instance.delete(*keys)
|
||||
|
||||
def expire(self, *keys: str, seconds: int) -> None:
|
||||
"""Set timeout of key in seconds."""
|
||||
for key in keys:
|
||||
self.instance.expire(key, seconds)
|
||||
|
||||
def get_config(self) -> Dict:
|
||||
"""Get picked config object from cache."""
|
||||
|
||||
pickled = self.instance.get("HYPERGLASS_CONFIG")
|
||||
return pickle.loads(pickled)
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
"""hyperglass cli module."""
|
||||
|
||||
# Project
|
||||
from hyperglass.cli.commands import hg
|
||||
|
||||
CLI = hg
|
||||
|
|
@ -1,205 +0,0 @@
|
|||
"""CLI Command definitions."""
|
||||
|
||||
# Standard Library
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Third Party
|
||||
from click import group, option, help_option
|
||||
|
||||
# Project
|
||||
from hyperglass.util import cpu_count
|
||||
|
||||
# Local
|
||||
from .echo import error, label, success, warning, cmd_help
|
||||
from .util import build_ui
|
||||
from .static import LABEL, CLI_HELP, E
|
||||
from .installer import Installer
|
||||
from .formatting import HelpColorsGroup, HelpColorsCommand, random_colors
|
||||
|
||||
# Define working directory
|
||||
WORKING_DIR = Path(__file__).parent
|
||||
|
||||
supports_color = "utf" in sys.getfilesystemencoding().lower()
|
||||
|
||||
|
||||
def _print_version(ctx, param, value):
|
||||
# Project
|
||||
from hyperglass import __version__
|
||||
|
||||
if not value or ctx.resilient_parsing:
|
||||
return
|
||||
label("hyperglass version: {v}", v=__version__)
|
||||
ctx.exit()
|
||||
|
||||
|
||||
@group(
|
||||
cls=HelpColorsGroup,
|
||||
help=CLI_HELP,
|
||||
context_settings={"help_option_names": ["-h", "--help"], "color": supports_color},
|
||||
help_headers_color=LABEL,
|
||||
help_options_custom_colors=random_colors(
|
||||
"build-ui", "start", "secret", "setup", "system-info", "clear-cache"
|
||||
),
|
||||
)
|
||||
@option(
|
||||
"-v",
|
||||
"--version",
|
||||
is_flag=True,
|
||||
callback=_print_version,
|
||||
expose_value=False,
|
||||
is_eager=True,
|
||||
help=cmd_help(E.NUMBERS, "hyperglass version", supports_color),
|
||||
)
|
||||
@help_option(
|
||||
"-h",
|
||||
"--help",
|
||||
help=cmd_help(E.FOLDED_HANDS, "Show this help message", supports_color),
|
||||
)
|
||||
def hg():
|
||||
"""Initialize Click Command Group."""
|
||||
pass
|
||||
|
||||
|
||||
@hg.command(
|
||||
"build-ui", help=cmd_help(E.BUTTERFLY, "Create a new UI build", supports_color)
|
||||
)
|
||||
@option("-t", "--timeout", required=False, default=180, help="Timeout in seconds")
|
||||
def build_frontend(timeout):
|
||||
"""Create a new UI build."""
|
||||
return build_ui(timeout)
|
||||
|
||||
|
||||
@hg.command( # noqa: C901
|
||||
"start",
|
||||
help=cmd_help(E.ROCKET, "Start web server", supports_color),
|
||||
cls=HelpColorsCommand,
|
||||
help_options_custom_colors=random_colors("-b", "-d", "-w"),
|
||||
)
|
||||
@option("-b", "--build", is_flag=True, help="Render theme & build frontend assets")
|
||||
@option(
|
||||
"-d",
|
||||
"--direct",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Start hyperglass directly instead of through process manager",
|
||||
)
|
||||
@option(
|
||||
"-w",
|
||||
"--workers",
|
||||
type=int,
|
||||
required=False,
|
||||
default=0,
|
||||
help=f"Number of workers. By default, calculated from CPU cores [{cpu_count(2)}]",
|
||||
)
|
||||
def start(build, direct, workers): # noqa: C901
|
||||
"""Start web server and optionally build frontend assets."""
|
||||
# Project
|
||||
from hyperglass.api import start as uvicorn_start
|
||||
from hyperglass.main import start
|
||||
|
||||
kwargs = {}
|
||||
if workers != 0:
|
||||
kwargs["workers"] = workers
|
||||
|
||||
try:
|
||||
|
||||
if build:
|
||||
build_complete = build_ui(timeout=180)
|
||||
|
||||
if build_complete and not direct:
|
||||
start(**kwargs)
|
||||
elif build_complete and direct:
|
||||
uvicorn_start(**kwargs)
|
||||
|
||||
if not build and not direct:
|
||||
start(**kwargs)
|
||||
|
||||
elif not build and direct:
|
||||
uvicorn_start(**kwargs)
|
||||
|
||||
except (KeyboardInterrupt, SystemExit) as err:
|
||||
error_message = str(err)
|
||||
if (len(error_message)) > 1:
|
||||
warning(str(err))
|
||||
error("Stopping hyperglass due to keyboard interrupt.")
|
||||
|
||||
|
||||
@hg.command(
|
||||
"secret",
|
||||
help=cmd_help(E.LOCK, "Generate agent secret", supports_color),
|
||||
cls=HelpColorsCommand,
|
||||
help_options_custom_colors=random_colors("-l"),
|
||||
)
|
||||
@option(
|
||||
"-l", "--length", "length", default=32, help="Number of characters [default: 32]"
|
||||
)
|
||||
def generate_secret(length):
|
||||
"""Generate secret for hyperglass-agent.
|
||||
|
||||
Arguments:
|
||||
length {int} -- Length of secret
|
||||
"""
|
||||
# Standard Library
|
||||
import secrets
|
||||
|
||||
gen_secret = secrets.token_urlsafe(length)
|
||||
label("Secret: {s}", s=gen_secret)
|
||||
|
||||
|
||||
@hg.command(
|
||||
"setup",
|
||||
help=cmd_help(E.TOOLBOX, "Run the setup wizard", supports_color),
|
||||
cls=HelpColorsCommand,
|
||||
help_options_custom_colors=random_colors("-d"),
|
||||
)
|
||||
@option(
|
||||
"-d",
|
||||
"--use-defaults",
|
||||
"unattended",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
help="Use hyperglass defaults (requires no input)",
|
||||
)
|
||||
def setup(unattended):
|
||||
"""Define application directory, move example files, generate systemd service."""
|
||||
|
||||
installer = Installer(unattended=unattended)
|
||||
installer.install()
|
||||
|
||||
success(
|
||||
"""Completed hyperglass installation.
|
||||
After adding your hyperglass.yaml file, you should run the `hyperglass build-ui` command.""" # noqa: E501
|
||||
)
|
||||
|
||||
|
||||
@hg.command(
|
||||
"system-info",
|
||||
help=cmd_help(
|
||||
E.THERMOMETER, " Get system information for a bug report", supports_color
|
||||
),
|
||||
cls=HelpColorsCommand,
|
||||
)
|
||||
def get_system_info():
|
||||
"""Get CPU, Memory, Disk, Python, & hyperglass version."""
|
||||
# Project
|
||||
from hyperglass.cli.util import system_info
|
||||
|
||||
system_info()
|
||||
|
||||
|
||||
@hg.command(
|
||||
"clear-cache",
|
||||
help=cmd_help(E.SOAP, "Clear the Redis cache", supports_color),
|
||||
cls=HelpColorsCommand,
|
||||
)
|
||||
def clear_cache():
|
||||
"""Clear the Redis Cache."""
|
||||
# Project
|
||||
from hyperglass.util import sync_clear_redis_cache
|
||||
|
||||
try:
|
||||
sync_clear_redis_cache()
|
||||
success("Cleared Redis Cache")
|
||||
except RuntimeError as err:
|
||||
error(str(err))
|
||||
|
|
@ -1,142 +0,0 @@
|
|||
"""Helper functions for CLI message printing."""
|
||||
# Standard Library
|
||||
import re
|
||||
|
||||
# Third Party
|
||||
from click import echo, style
|
||||
|
||||
# Project
|
||||
from hyperglass.cli.static import CMD_HELP, Message
|
||||
from hyperglass.cli.exceptions import CliError
|
||||
|
||||
|
||||
def cmd_help(emoji="", help_text="", supports_color=False):
|
||||
"""Print formatted command help."""
|
||||
if supports_color:
|
||||
help_str = emoji + style(help_text, **CMD_HELP)
|
||||
else:
|
||||
help_str = help_text
|
||||
return help_str
|
||||
|
||||
|
||||
def _base_formatter(_text, _state, _callback, *args, **kwargs):
|
||||
"""Format text block, replace template strings with keyword arguments.
|
||||
|
||||
Arguments:
|
||||
state {dict} -- Text format attributes
|
||||
label {dict} -- Keyword format attributes
|
||||
text {[type]} -- Text to format
|
||||
callback {function} -- Callback function
|
||||
|
||||
Returns:
|
||||
{str|ClickException} -- Formatted output
|
||||
"""
|
||||
fmt = Message(_state)
|
||||
|
||||
if _callback is None:
|
||||
_callback = style
|
||||
|
||||
nargs = ()
|
||||
for i in args:
|
||||
if not isinstance(i, str):
|
||||
nargs += (str(i),)
|
||||
else:
|
||||
nargs += (i,)
|
||||
|
||||
for k, v in kwargs.items():
|
||||
if not isinstance(v, str):
|
||||
v = str(v)
|
||||
kwargs[k] = style(v, **fmt.kw)
|
||||
|
||||
text_all = re.split(r"(\{\w+\})", _text)
|
||||
text_all = [style(i, **fmt.msg) for i in text_all]
|
||||
text_all = [i.format(*nargs, **kwargs) for i in text_all]
|
||||
|
||||
if fmt.emoji:
|
||||
text_all.insert(0, fmt.emoji)
|
||||
|
||||
text_fmt = "".join(text_all)
|
||||
|
||||
return _callback(text_fmt)
|
||||
|
||||
|
||||
def info(text, *args, **kwargs):
|
||||
"""Generate formatted informational text.
|
||||
|
||||
Arguments:
|
||||
text {str} -- Text to format
|
||||
callback {callable} -- Callback function (default: {echo})
|
||||
|
||||
Returns:
|
||||
{str} -- Informational output
|
||||
"""
|
||||
return _base_formatter(_state="info", _text=text, _callback=echo, *args, **kwargs)
|
||||
|
||||
|
||||
def error(text, *args, **kwargs):
|
||||
"""Generate formatted exception.
|
||||
|
||||
Arguments:
|
||||
text {str} -- Text to format
|
||||
callback {callable} -- Callback function (default: {echo})
|
||||
|
||||
Raises:
|
||||
ClickException: Raised after formatting
|
||||
"""
|
||||
raise _base_formatter(text, "error", CliError, *args, **kwargs)
|
||||
|
||||
|
||||
def success(text, *args, **kwargs):
|
||||
"""Generate formatted success text.
|
||||
|
||||
Arguments:
|
||||
text {str} -- Text to format
|
||||
callback {callable} -- Callback function (default: {echo})
|
||||
|
||||
Returns:
|
||||
{str} -- Success output
|
||||
"""
|
||||
return _base_formatter(
|
||||
_state="success", _text=text, _callback=echo, *args, **kwargs
|
||||
)
|
||||
|
||||
|
||||
def warning(text, *args, **kwargs):
|
||||
"""Generate formatted warning text.
|
||||
|
||||
Arguments:
|
||||
text {str} -- Text to format
|
||||
callback {callable} -- Callback function (default: {echo})
|
||||
|
||||
Returns:
|
||||
{str} -- Warning output
|
||||
"""
|
||||
return _base_formatter(
|
||||
_state="warning", _text=text, _callback=echo, *args, **kwargs
|
||||
)
|
||||
|
||||
|
||||
def label(text, *args, **kwargs):
|
||||
"""Generate formatted info text with accented labels.
|
||||
|
||||
Arguments:
|
||||
text {str} -- Text to format
|
||||
callback {callable} -- Callback function (default: {echo})
|
||||
|
||||
Returns:
|
||||
{str} -- Label output
|
||||
"""
|
||||
return _base_formatter(_state="label", _text=text, _callback=echo, *args, **kwargs)
|
||||
|
||||
|
||||
def status(text, *args, **kwargs):
|
||||
"""Generate formatted status text.
|
||||
|
||||
Arguments:
|
||||
text {str} -- Text to format
|
||||
callback {callable} -- Callback function (default: {echo})
|
||||
|
||||
Returns:
|
||||
{str} -- Status output
|
||||
"""
|
||||
return _base_formatter(_state="status", _text=text, _callback=echo, *args, **kwargs)
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
"""hyperglass CLI custom exceptions."""
|
||||
|
||||
# Third Party
|
||||
from click import ClickException, echo
|
||||
from click._compat import get_text_stderr
|
||||
|
||||
|
||||
class CliError(ClickException):
|
||||
"""Custom exception to exclude the 'Error:' prefix from echos."""
|
||||
|
||||
def show(self, file=None):
|
||||
"""Exclude 'Error:' prefix from raised exceptions."""
|
||||
if file is None:
|
||||
file = get_text_stderr()
|
||||
echo(self.format_message())
|
||||
|
|
@ -1,178 +0,0 @@
|
|||
"""Help formatting.
|
||||
|
||||
https://github.com/click-contrib/click-help-colors
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2016 Roman Tonkonozhko
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
# Standard Library
|
||||
import random
|
||||
|
||||
# Third Party
|
||||
import click
|
||||
|
||||
|
||||
def random_colors(*commands):
|
||||
"""From tuple of commands, generate random but unique colors."""
|
||||
colors = ["blue", "green", "red", "yellow", "magenta", "cyan", "white"]
|
||||
num_colors = len(colors)
|
||||
num_commands = len(commands)
|
||||
|
||||
if num_commands >= num_colors:
|
||||
colors += colors
|
||||
|
||||
unique_colors = random.sample(colors, num_commands)
|
||||
commands_fmt = {}
|
||||
for i, cmd in enumerate(commands):
|
||||
commands_fmt.update({cmd: {"fg": unique_colors[i], "bold": True}})
|
||||
commands_fmt.update({"--help": {"fg": "white"}})
|
||||
return commands_fmt
|
||||
|
||||
|
||||
class HelpColorsFormatter(click.HelpFormatter):
|
||||
"""Click help formatting plugin. See file docstring for license.
|
||||
|
||||
Modified from original copy to support click.style() instead of
|
||||
direct ANSII string formatting.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
headers_color=None,
|
||||
options_color=None,
|
||||
options_custom_colors=None,
|
||||
*args,
|
||||
**kwargs
|
||||
):
|
||||
"""Initialize help formatter.
|
||||
|
||||
Keyword Arguments:
|
||||
headers_color {dict} -- click.style() paramters for header
|
||||
options_color {dict} -- click.style() paramters for options
|
||||
options_custom_colors {dict} -- click.style() paramters for options by name
|
||||
"""
|
||||
self.headers_color = headers_color or {}
|
||||
self.options_color = options_color or {}
|
||||
self.options_custom_colors = options_custom_colors or {}
|
||||
|
||||
super().__init__(indent_increment=3, *args, **kwargs)
|
||||
|
||||
def _pick_color(self, option_name):
|
||||
"""Filter options and pass relevant click.style() options for command."""
|
||||
opt = option_name.split()[0].strip(",")
|
||||
color = {}
|
||||
if self.options_custom_colors and opt in self.options_custom_colors.keys():
|
||||
color = self.options_custom_colors[opt]
|
||||
else:
|
||||
color = self.options_color
|
||||
return color
|
||||
|
||||
def write_usage(self, prog, args="", prefix="Usage: "):
|
||||
"""Write Usage: section."""
|
||||
prefix_fmt = click.style(prefix, **self.headers_color)
|
||||
super().write_usage(prog, args, prefix=prefix_fmt)
|
||||
|
||||
def write_heading(self, heading):
|
||||
"""Write Heading section."""
|
||||
heading_fmt = click.style(heading, **self.headers_color)
|
||||
super().write_heading(heading_fmt)
|
||||
|
||||
def write_dl(self, rows, **kwargs):
|
||||
"""Write Options section."""
|
||||
colorized_rows = [
|
||||
(click.style(row[0], **self._pick_color(row[0])), row[1]) for row in rows
|
||||
]
|
||||
super().write_dl(colorized_rows, **kwargs)
|
||||
|
||||
|
||||
class HelpColorsMixin:
|
||||
"""Click help formatting plugin. See file docstring for license.
|
||||
|
||||
Modified from original copy to support click.style() instead of
|
||||
direct ANSII string formatting.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
help_headers_color=None,
|
||||
help_options_color=None,
|
||||
help_options_custom_colors=None,
|
||||
*args,
|
||||
**kwargs
|
||||
):
|
||||
"""Initialize help mixin."""
|
||||
self.help_headers_color = help_headers_color or {}
|
||||
self.help_options_color = help_options_color or {}
|
||||
self.help_options_custom_colors = help_options_custom_colors or {}
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def get_help(self, ctx):
|
||||
"""Format help."""
|
||||
formatter = HelpColorsFormatter(
|
||||
width=ctx.terminal_width,
|
||||
max_width=ctx.max_content_width,
|
||||
headers_color=self.help_headers_color,
|
||||
options_color=self.help_options_color,
|
||||
options_custom_colors=self.help_options_custom_colors,
|
||||
)
|
||||
self.format_help(ctx, formatter)
|
||||
return formatter.getvalue().rstrip("\n")
|
||||
|
||||
|
||||
class HelpColorsGroup(HelpColorsMixin, click.Group):
|
||||
"""Click help formatting plugin. See file docstring for license.
|
||||
|
||||
Modified from original copy to support click.style() instead of
|
||||
direct ANSII string formatting.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialize group formatter."""
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def command(self, *args, **kwargs):
|
||||
"""Set command values."""
|
||||
kwargs.setdefault("cls", HelpColorsCommand)
|
||||
kwargs.setdefault("help_headers_color", self.help_headers_color)
|
||||
kwargs.setdefault("help_options_color", self.help_options_color)
|
||||
kwargs.setdefault("help_options_custom_colors", self.help_options_custom_colors)
|
||||
return super().command(*args, **kwargs)
|
||||
|
||||
def group(self, *args, **kwargs):
|
||||
"""Set group values."""
|
||||
kwargs.setdefault("cls", HelpColorsGroup)
|
||||
kwargs.setdefault("help_headers_color", self.help_headers_color)
|
||||
kwargs.setdefault("help_options_color", self.help_options_color)
|
||||
kwargs.setdefault("help_options_custom_colors", self.help_options_custom_colors)
|
||||
return super().group(*args, **kwargs)
|
||||
|
||||
|
||||
class HelpColorsCommand(HelpColorsMixin, click.Command):
|
||||
"""Click help formatting plugin. See file docstring for license.
|
||||
|
||||
Modified from original copy to support click.style() instead of
|
||||
direct ANSII string formatting.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialize command formatter."""
|
||||
super().__init__(*args, **kwargs)
|
||||
|
|
@ -1,114 +0,0 @@
|
|||
"""Install hyperglass."""
|
||||
|
||||
# Standard Library
|
||||
import os
|
||||
import shutil
|
||||
from filecmp import dircmp
|
||||
from pathlib import Path
|
||||
|
||||
# Third Party
|
||||
import inquirer
|
||||
|
||||
# Local
|
||||
from .echo import error, success, warning
|
||||
from .util import create_dir
|
||||
|
||||
USER_PATH = Path.home() / "hyperglass"
|
||||
ROOT_PATH = Path("/etc/hyperglass/")
|
||||
ASSET_DIR = Path(__file__).parent.parent / "images"
|
||||
IGNORED_FILES = [".DS_Store"]
|
||||
|
||||
INSTALL_PATHS = [
|
||||
inquirer.List(
|
||||
"install_path",
|
||||
message="Choose a directory for hyperglass",
|
||||
choices=[USER_PATH, ROOT_PATH],
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def prompt_for_path() -> str:
|
||||
"""Recursively prompt the user for an app path until one is provided."""
|
||||
|
||||
answer = inquirer.prompt(INSTALL_PATHS)
|
||||
|
||||
if answer is None:
|
||||
warning("A directory for hyperglass is required")
|
||||
answer = prompt_for_path()
|
||||
|
||||
return answer["install_path"]
|
||||
|
||||
|
||||
class Installer:
|
||||
"""Install hyperglass."""
|
||||
|
||||
def __init__(self, unattended: bool):
|
||||
"""Initialize installer."""
|
||||
|
||||
self.unattended = unattended
|
||||
|
||||
def install(self) -> None:
|
||||
"""Complete the installation."""
|
||||
|
||||
self.app_path = self._get_app_path()
|
||||
self._scaffold()
|
||||
self._migrate_static_assets()
|
||||
|
||||
def _get_app_path(self) -> Path:
|
||||
"""Find the app path from env variables or a prompt."""
|
||||
|
||||
if self.unattended:
|
||||
return USER_PATH
|
||||
|
||||
app_path = os.environ.get("HYPERGLASS_PATH", None)
|
||||
|
||||
if app_path is None:
|
||||
app_path = prompt_for_path()
|
||||
|
||||
return app_path
|
||||
|
||||
def _scaffold(self) -> None:
|
||||
"""Create the file structure necessary for hyperglass to run."""
|
||||
|
||||
ui_dir = self.app_path / "static" / "ui"
|
||||
images_dir = self.app_path / "static" / "images"
|
||||
favicon_dir = images_dir / "favicons"
|
||||
custom_dir = self.app_path / "static" / "custom"
|
||||
|
||||
create_dir(self.app_path)
|
||||
|
||||
for path in (ui_dir, images_dir, favicon_dir, custom_dir):
|
||||
create_dir(path, parents=True)
|
||||
|
||||
def _migrate_static_assets(self) -> bool:
|
||||
"""Synchronize the project assets with the installation assets."""
|
||||
|
||||
target_dir = self.app_path / "static" / "images"
|
||||
|
||||
if not target_dir.exists():
|
||||
shutil.copytree(ASSET_DIR, target_dir)
|
||||
|
||||
# Compare the contents of the project's asset directory (considered
|
||||
# the source of truth) with the installation directory. If they do
|
||||
# not match, delete the installation directory's asset directory and
|
||||
# re-copy it.
|
||||
compare_initial = dircmp(ASSET_DIR, target_dir, ignore=IGNORED_FILES)
|
||||
|
||||
if not compare_initial.left_list == compare_initial.right_list:
|
||||
shutil.rmtree(target_dir)
|
||||
shutil.copytree(ASSET_DIR, target_dir)
|
||||
|
||||
# Re-compare the source and destination directory contents to
|
||||
# ensure they match.
|
||||
compare_post = dircmp(ASSET_DIR, target_dir, ignore=IGNORED_FILES)
|
||||
|
||||
if not compare_post.left_list == compare_post.right_list:
|
||||
error(
|
||||
"Files in {a} do not match files in {b}",
|
||||
a=str(ASSET_DIR),
|
||||
b=str(target_dir),
|
||||
)
|
||||
return False
|
||||
|
||||
success("Migrated assets from {a} to {b}", a=str(ASSET_DIR), b=str(target_dir))
|
||||
return True
|
||||
|
|
@ -1,127 +0,0 @@
|
|||
"""Static string definitions."""
|
||||
# Third Party
|
||||
import click
|
||||
|
||||
|
||||
class Char:
|
||||
"""Helper class for single-character strings."""
|
||||
|
||||
def __init__(self, char):
|
||||
"""Set instance character."""
|
||||
self.char = char
|
||||
|
||||
def __getitem__(self, i):
|
||||
"""Subscription returns the instance's character * n."""
|
||||
return self.char * i
|
||||
|
||||
def __str__(self):
|
||||
"""Stringify the instance character."""
|
||||
return str(self.char)
|
||||
|
||||
def __repr__(self):
|
||||
"""Stringify the instance character for representation."""
|
||||
return str(self.char)
|
||||
|
||||
def __add__(self, other):
|
||||
"""Addition method for string concatenation."""
|
||||
return str(self.char) + str(other)
|
||||
|
||||
|
||||
class Emoji:
|
||||
"""Helper class for unicode emoji."""
|
||||
|
||||
BUTTERFLY = "\U0001F98B "
|
||||
CHECK = "\U00002705 "
|
||||
INFO = "\U00002755 "
|
||||
ERROR = "\U0000274C "
|
||||
WARNING = "\U000026A0\U0000FE0F "
|
||||
TOOLBOX = "\U0001F9F0 "
|
||||
NUMBERS = "\U0001F522 "
|
||||
FOLDED_HANDS = "\U0001F64F "
|
||||
ROCKET = "\U0001F680 "
|
||||
SPARKLES = "\U00002728 "
|
||||
PAPERCLIP = "\U0001F4CE "
|
||||
KEY = "\U0001F511 "
|
||||
LOCK = "\U0001F512 "
|
||||
CLAMP = "\U0001F5DC "
|
||||
BOOKS = "\U0001F4DA "
|
||||
THERMOMETER = "\U0001F321 "
|
||||
SOAP = "\U0001F9FC "
|
||||
|
||||
|
||||
WS = Char(" ")
|
||||
NL = Char("\n")
|
||||
CL = Char(":")
|
||||
E = Emoji()
|
||||
|
||||
CLI_HELP = (
|
||||
click.style("hyperglass", fg="magenta", bold=True)
|
||||
+ WS[1]
|
||||
+ click.style("Command Line Interface", fg="white")
|
||||
)
|
||||
|
||||
# Click Style Helpers
|
||||
SUCCESS = {"fg": "green", "bold": True}
|
||||
WARNING = {"fg": "yellow"}
|
||||
ERROR = {"fg": "red", "bold": True}
|
||||
LABEL = {"fg": "white"}
|
||||
INFO = {"fg": "blue", "bold": True}
|
||||
STATUS = {"fg": "black"}
|
||||
VALUE = {"fg": "magenta", "bold": True}
|
||||
CMD_HELP = {"fg": "white"}
|
||||
|
||||
|
||||
class Message:
|
||||
"""Helper class for single-character strings."""
|
||||
|
||||
colors = {
|
||||
"warning": "yellow",
|
||||
"success": "green",
|
||||
"error": "red",
|
||||
"info": "blue",
|
||||
"status": "black",
|
||||
"label": "white",
|
||||
}
|
||||
label_colors = {
|
||||
"warning": "yellow",
|
||||
"success": "green",
|
||||
"error": "red",
|
||||
"info": "blue",
|
||||
"status": "black",
|
||||
"label": "magenta",
|
||||
}
|
||||
emojis = {
|
||||
"warning": E.WARNING,
|
||||
"success": E.CHECK,
|
||||
"error": E.ERROR,
|
||||
"info": E.INFO,
|
||||
"status": "",
|
||||
"label": "",
|
||||
}
|
||||
|
||||
def __init__(self, state):
|
||||
"""Set instance character."""
|
||||
self.state = state
|
||||
self.color = self.colors[self.state]
|
||||
self.label_color = self.label_colors[self.state]
|
||||
|
||||
@property
|
||||
def msg(self):
|
||||
"""Click style attributes for message text."""
|
||||
return {"fg": self.color}
|
||||
|
||||
@property
|
||||
def kw(self):
|
||||
"""Click style attributes for keywords."""
|
||||
return {"fg": self.label_color, "bold": True, "underline": True}
|
||||
|
||||
@property
|
||||
def emoji(self):
|
||||
"""Match emoji from state."""
|
||||
return self.emojis[self.state]
|
||||
|
||||
def __repr__(self):
|
||||
"""Stringify the instance character for representation."""
|
||||
return "Message(msg={m}, kw={k}, emoji={e})".format(
|
||||
m=self.msg, k=self.kw, e=self.emoji
|
||||
)
|
||||
|
|
@ -1,181 +0,0 @@
|
|||
"""CLI utility functions."""
|
||||
|
||||
# Standard Library
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
# Third Party
|
||||
from click import echo, style
|
||||
|
||||
# Project
|
||||
from hyperglass.cli.echo import info, error, status, success
|
||||
from hyperglass.cli.static import CL, NL, WS, E
|
||||
|
||||
PROJECT_ROOT = Path(__file__).parent.parent
|
||||
|
||||
|
||||
def async_command(func) -> None:
|
||||
"""Decororator for to make async functions runable from synchronous code."""
|
||||
# Standard Library
|
||||
import asyncio
|
||||
from functools import update_wrapper
|
||||
|
||||
func = asyncio.coroutine(func)
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
loop = asyncio.get_event_loop()
|
||||
return loop.run_until_complete(func(*args, **kwargs))
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
def start_web_server(start, params):
|
||||
"""Start web server."""
|
||||
msg_start = "Starting hyperglass web server on"
|
||||
msg_uri = "http://"
|
||||
msg_host = str(params["host"])
|
||||
msg_port = str(params["port"])
|
||||
msg_len = len("".join([msg_start, WS[1], msg_uri, msg_host, CL[1], msg_port]))
|
||||
try:
|
||||
echo(
|
||||
NL[1]
|
||||
+ WS[msg_len + 8]
|
||||
+ E.ROCKET
|
||||
+ NL[1]
|
||||
+ E.CHECK
|
||||
+ style(msg_start, fg="green", bold=True)
|
||||
+ WS[1]
|
||||
+ style(msg_uri, fg="white")
|
||||
+ style(msg_host, fg="blue", bold=True)
|
||||
+ style(CL[1], fg="white")
|
||||
+ style(msg_port, fg="magenta", bold=True)
|
||||
+ WS[1]
|
||||
+ E.ROCKET
|
||||
+ NL[1]
|
||||
+ WS[1]
|
||||
+ NL[1]
|
||||
)
|
||||
start()
|
||||
|
||||
except Exception as e:
|
||||
error("Failed to start web server: {e}", e=e)
|
||||
|
||||
|
||||
def build_ui(timeout: int) -> None:
|
||||
"""Create a new UI build."""
|
||||
try:
|
||||
# Project
|
||||
from hyperglass.configuration import CONFIG_PATH, params, frontend_params
|
||||
from hyperglass.util.frontend import build_frontend
|
||||
from hyperglass.compat._asyncio import aiorun
|
||||
except ImportError as e:
|
||||
error("Error importing UI builder: {e}", e=e)
|
||||
|
||||
status("Starting new UI build with a {t} second timeout...", t=timeout)
|
||||
|
||||
if params.developer_mode:
|
||||
dev_mode = "development"
|
||||
else:
|
||||
dev_mode = "production"
|
||||
|
||||
try:
|
||||
build_success = aiorun(
|
||||
build_frontend(
|
||||
dev_mode=params.developer_mode,
|
||||
dev_url=f"http://localhost:{str(params.listen_port)}/",
|
||||
prod_url="/api/",
|
||||
params=frontend_params,
|
||||
force=True,
|
||||
app_path=CONFIG_PATH,
|
||||
)
|
||||
)
|
||||
if build_success:
|
||||
success("Completed UI build in {m} mode", m=dev_mode)
|
||||
|
||||
except Exception as e:
|
||||
error("Error building UI: {e}", e=e)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def create_dir(path, **kwargs) -> bool:
|
||||
"""Validate and attempt to create a directory, if it does not exist."""
|
||||
|
||||
# If input path is not a path object, try to make it one
|
||||
if not isinstance(path, Path):
|
||||
try:
|
||||
path = Path(path)
|
||||
except TypeError:
|
||||
error("{p} is not a valid path", p=path)
|
||||
|
||||
# If path does not exist, try to create it
|
||||
if not path.exists():
|
||||
try:
|
||||
path.mkdir(**kwargs)
|
||||
except PermissionError:
|
||||
error(
|
||||
"{u} does not have permission to create {p}. Try running with sudo?",
|
||||
u=os.getlogin(),
|
||||
p=path,
|
||||
)
|
||||
|
||||
# Verify the path was actually created
|
||||
if path.exists():
|
||||
success("Created {p}", p=path)
|
||||
|
||||
# If the path already exists, inform the user
|
||||
elif path.exists():
|
||||
info("{p} already exists", p=path)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def write_to_file(file, data) -> bool:
|
||||
"""Write string data to a file."""
|
||||
try:
|
||||
with file.open("w+") as f:
|
||||
f.write(data.strip())
|
||||
except PermissionError:
|
||||
error(
|
||||
"{u} does not have permission to write to {f}. Try running with sudo?",
|
||||
u=os.getlogin(),
|
||||
f=file,
|
||||
)
|
||||
if not file.exists():
|
||||
error("Error writing file {f}", f=file)
|
||||
elif file.exists():
|
||||
success("Wrote systemd file {f}", f=file)
|
||||
return True
|
||||
|
||||
|
||||
def system_info() -> None:
|
||||
"""Create a markdown table of various system information."""
|
||||
# Project
|
||||
from hyperglass.util.system_info import get_system_info
|
||||
|
||||
data = get_system_info()
|
||||
|
||||
def _code(val):
|
||||
return f"`{str(val)}`"
|
||||
|
||||
def _bold(val):
|
||||
return f"**{str(val)}**"
|
||||
|
||||
md_table_lines = ("| Metric | Value |", "| :----- | :---- |")
|
||||
|
||||
for title, metric in data.items():
|
||||
value, mod = metric
|
||||
|
||||
title = _bold(title)
|
||||
|
||||
if mod == "code":
|
||||
value = _code(value)
|
||||
|
||||
md_table_lines += (f"| {title} | {value} |",)
|
||||
|
||||
md_table = "\n".join(md_table_lines)
|
||||
|
||||
info("Please copy & paste this table in your bug report:\n")
|
||||
echo(md_table + "\n")
|
||||
|
||||
return None
|
||||
|
|
@ -1 +0,0 @@
|
|||
"""Functions for maintaining compatability with older Python versions or libraries."""
|
||||
|
|
@ -1,118 +0,0 @@
|
|||
"""Functions for maintaining asyncio compatability with other versions of Python."""
|
||||
|
||||
# Standard Library
|
||||
import sys
|
||||
import asyncio
|
||||
import weakref
|
||||
|
||||
try:
|
||||
# Standard Library
|
||||
from asyncio import get_running_loop
|
||||
except ImportError:
|
||||
# Standard Library
|
||||
from asyncio.events import _get_running_loop as get_running_loop
|
||||
|
||||
RUNNING_PYTHON_VERSION = sys.version_info
|
||||
|
||||
# _patch_loop, _patched_run, and _cancel_all_tasks are taken directly
|
||||
# from github.com/nickdavis:
|
||||
# https://gist.github.com/nickdavies/4a37c6cd9dcc7041fddd2d2a81cee383
|
||||
|
||||
# These functions are a backport of the functionality added in
|
||||
# Python 3.7 to support asyncio.run(), which is used in several areas
|
||||
# of hyperglass. Because the LTS version of Ubuntu at this time (18.04)
|
||||
# still ships with Python 3.6, compatibility with Python 3.6 is the
|
||||
# goal.
|
||||
|
||||
|
||||
def _patch_loop(loop):
|
||||
tasks = weakref.WeakSet()
|
||||
|
||||
task_factory = [None]
|
||||
|
||||
def _set_task_factory(factory):
|
||||
task_factory[0] = factory
|
||||
|
||||
def _get_task_factory():
|
||||
return task_factory[0]
|
||||
|
||||
def _safe_task_factory(loop, coro):
|
||||
if task_factory[0] is None:
|
||||
task = asyncio.Task(coro, loop=loop)
|
||||
if task._source_traceback:
|
||||
del task._source_traceback[-1]
|
||||
else:
|
||||
task = task_factory[0](loop, coro)
|
||||
tasks.add(task)
|
||||
return task
|
||||
|
||||
loop.set_task_factory(_safe_task_factory)
|
||||
loop.set_task_factory = _set_task_factory
|
||||
loop.get_task_factory = _get_task_factory
|
||||
|
||||
return tasks
|
||||
|
||||
|
||||
def _cancel_all_tasks(loop, tasks):
|
||||
to_cancel = [task for task in tasks if not task.done()]
|
||||
|
||||
if not to_cancel:
|
||||
return
|
||||
|
||||
for task in to_cancel:
|
||||
task.cancel()
|
||||
|
||||
loop.run_until_complete(
|
||||
asyncio.gather(*to_cancel, loop=loop, return_exceptions=True)
|
||||
)
|
||||
|
||||
for task in to_cancel:
|
||||
if task.cancelled():
|
||||
continue
|
||||
if task.exception() is not None:
|
||||
loop.call_exception_handler(
|
||||
{
|
||||
"message": "unhandled exception during asyncio.run() shutdown",
|
||||
"exception": task.exception(),
|
||||
"task": task,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _patched_run(main, *, debug=False):
|
||||
try:
|
||||
loop = get_running_loop()
|
||||
except RuntimeError:
|
||||
loop = None
|
||||
|
||||
if loop is not None:
|
||||
raise RuntimeError("asyncio.run() cannot be called from a running event loop")
|
||||
|
||||
if not asyncio.iscoroutine(main):
|
||||
raise ValueError("a coroutine was expected, got {!r}".format(main))
|
||||
|
||||
loop = asyncio.new_event_loop()
|
||||
tasks = _patch_loop(loop)
|
||||
|
||||
try:
|
||||
asyncio.set_event_loop(loop)
|
||||
loop.set_debug(debug)
|
||||
return loop.run_until_complete(main)
|
||||
finally:
|
||||
try:
|
||||
_cancel_all_tasks(loop, tasks)
|
||||
loop.run_until_complete(loop.shutdown_asyncgens())
|
||||
finally:
|
||||
asyncio.set_event_loop(None)
|
||||
loop.close()
|
||||
|
||||
|
||||
# If local system's python version is at least 3.6, use the backported
|
||||
# asyncio runner.
|
||||
if RUNNING_PYTHON_VERSION >= (3, 6):
|
||||
aiorun = _patched_run
|
||||
|
||||
# If the local system's python version is at least 3.7, use the standard
|
||||
# library's asyncio.run()
|
||||
elif RUNNING_PYTHON_VERSION >= (3, 7):
|
||||
aiorun = asyncio.run
|
||||
File diff suppressed because it is too large
Load diff
5
hyperglass/configuration/.gitignore
vendored
5
hyperglass/configuration/.gitignore
vendored
|
|
@ -1,5 +0,0 @@
|
|||
.DS_Store
|
||||
*.toml
|
||||
*.yaml
|
||||
*.test
|
||||
configuration_old
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
"""hyperglass Configuration."""
|
||||
|
||||
# Local
|
||||
from .main import (
|
||||
URL_DEV,
|
||||
URL_PROD,
|
||||
CONFIG_PATH,
|
||||
STATIC_PATH,
|
||||
REDIS_CONFIG,
|
||||
params,
|
||||
devices,
|
||||
commands,
|
||||
frontend_params,
|
||||
)
|
||||
|
|
@ -1,376 +0,0 @@
|
|||
"""Import configuration files and returns default values if undefined."""
|
||||
|
||||
# Standard Library
|
||||
import os
|
||||
import json
|
||||
from typing import Dict, List
|
||||
from pathlib import Path
|
||||
|
||||
# Third Party
|
||||
import yaml
|
||||
|
||||
# Project
|
||||
from hyperglass.log import (
|
||||
log,
|
||||
set_log_level,
|
||||
enable_file_logging,
|
||||
enable_syslog_logging,
|
||||
)
|
||||
from hyperglass.util import set_app_path, set_cache_env, current_log_level
|
||||
from hyperglass.defaults import CREDIT, DEFAULT_DETAILS
|
||||
from hyperglass.constants import (
|
||||
SUPPORTED_QUERY_TYPES,
|
||||
PARSED_RESPONSE_FIELDS,
|
||||
__version__,
|
||||
)
|
||||
from hyperglass.exceptions import ConfigError, ConfigMissing
|
||||
from hyperglass.util.files import check_path
|
||||
from hyperglass.models.commands import Commands
|
||||
from hyperglass.models.config.params import Params
|
||||
from hyperglass.models.config.devices import Devices
|
||||
|
||||
# Local
|
||||
from .markdown import get_markdown
|
||||
from .validation import validate_config, validate_nos_commands
|
||||
|
||||
set_app_path(required=True)
|
||||
|
||||
CONFIG_PATH = Path(os.environ["hyperglass_directory"])
|
||||
log.info("Configuration directory: {d}", d=str(CONFIG_PATH))
|
||||
|
||||
# Project Directories
|
||||
WORKING_DIR = Path(__file__).resolve().parent
|
||||
CONFIG_FILES = (
|
||||
("hyperglass.yaml", False),
|
||||
("devices.yaml", True),
|
||||
("commands.yaml", False),
|
||||
)
|
||||
|
||||
|
||||
def _check_config_files(directory: Path):
|
||||
"""Verify config files exist and are readable."""
|
||||
|
||||
files = ()
|
||||
|
||||
for file in CONFIG_FILES:
|
||||
file_name, required = file
|
||||
file_path = directory / file_name
|
||||
|
||||
checked = check_path(file_path)
|
||||
|
||||
if checked is None and required:
|
||||
raise ConfigMissing(missing_item=str(file_path))
|
||||
|
||||
if checked is None and not required:
|
||||
log.warning(
|
||||
"'{f}' was not found, but is not required to run hyperglass. "
|
||||
+ "Defaults will be used.",
|
||||
f=str(file_path),
|
||||
)
|
||||
files += (checked,)
|
||||
|
||||
return files
|
||||
|
||||
|
||||
STATIC_PATH = CONFIG_PATH / "static"
|
||||
|
||||
CONFIG_MAIN, CONFIG_DEVICES, CONFIG_COMMANDS = _check_config_files(CONFIG_PATH)
|
||||
|
||||
|
||||
def _config_required(config_path: Path) -> Dict:
|
||||
try:
|
||||
with config_path.open("r") as cf:
|
||||
config = yaml.safe_load(cf)
|
||||
|
||||
except (yaml.YAMLError, yaml.MarkedYAMLError) as yaml_error:
|
||||
raise ConfigError(str(yaml_error))
|
||||
|
||||
if config is None:
|
||||
log.critical("{} appears to be empty", str(config_path))
|
||||
raise ConfigMissing(missing_item=config_path.name)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def _config_optional(config_path: Path) -> Dict:
|
||||
|
||||
config = {}
|
||||
|
||||
if config_path is None:
|
||||
return config
|
||||
|
||||
else:
|
||||
try:
|
||||
with config_path.open("r") as cf:
|
||||
config = yaml.safe_load(cf) or {}
|
||||
|
||||
except (yaml.YAMLError, yaml.MarkedYAMLError) as yaml_error:
|
||||
raise ConfigError(error_msg=str(yaml_error))
|
||||
|
||||
return config
|
||||
|
||||
|
||||
user_config = _config_optional(CONFIG_MAIN)
|
||||
|
||||
# Read raw debug value from config to enable debugging quickly.
|
||||
set_log_level(logger=log, debug=user_config.get("debug", True))
|
||||
|
||||
# Map imported user configuration to expected schema.
|
||||
log.debug("Unvalidated configuration from {}: {}", CONFIG_MAIN, user_config)
|
||||
params = validate_config(config=user_config, importer=Params)
|
||||
|
||||
# Re-evaluate debug state after config is validated
|
||||
log_level = current_log_level(log)
|
||||
|
||||
if params.debug and log_level != "debug":
|
||||
set_log_level(logger=log, debug=True)
|
||||
elif not params.debug and log_level == "debug":
|
||||
set_log_level(logger=log, debug=False)
|
||||
|
||||
# Map imported user commands to expected schema.
|
||||
_user_commands = _config_optional(CONFIG_COMMANDS)
|
||||
log.debug("Unvalidated commands from {}: {}", CONFIG_COMMANDS, _user_commands)
|
||||
commands = validate_config(config=_user_commands, importer=Commands.import_params)
|
||||
|
||||
# Map imported user devices to expected schema.
|
||||
_user_devices = _config_required(CONFIG_DEVICES)
|
||||
log.debug("Unvalidated devices from {}: {}", CONFIG_DEVICES, _user_devices)
|
||||
devices = validate_config(config=_user_devices.get("routers", []), importer=Devices)
|
||||
|
||||
# Validate commands are both supported and properly mapped.
|
||||
validate_nos_commands(devices.all_nos, commands)
|
||||
|
||||
# Set cache configurations to environment variables, so they can be
|
||||
# used without importing this module (Gunicorn, etc).
|
||||
set_cache_env(db=params.cache.database, host=params.cache.host, port=params.cache.port)
|
||||
|
||||
# Set up file logging once configuration parameters are initialized.
|
||||
enable_file_logging(
|
||||
logger=log,
|
||||
log_directory=params.logging.directory,
|
||||
log_format=params.logging.format,
|
||||
log_max_size=params.logging.max_size,
|
||||
)
|
||||
|
||||
# Set up syslog logging if enabled.
|
||||
if params.logging.syslog is not None and params.logging.syslog.enable:
|
||||
enable_syslog_logging(
|
||||
logger=log,
|
||||
syslog_host=params.logging.syslog.host,
|
||||
syslog_port=params.logging.syslog.port,
|
||||
)
|
||||
|
||||
if params.logging.http is not None and params.logging.http.enable:
|
||||
log.debug("HTTP logging is enabled")
|
||||
|
||||
# Perform post-config initialization string formatting or other
|
||||
# functions that require access to other config levels. E.g.,
|
||||
# something in 'params.web.text' needs to be formatted with a value
|
||||
# from params.
|
||||
try:
|
||||
params.web.text.subtitle = params.web.text.subtitle.format(
|
||||
**params.dict(exclude={"web", "queries", "messages"})
|
||||
)
|
||||
|
||||
# If keywords are unmodified (default), add the org name &
|
||||
# site_title.
|
||||
if Params().site_keywords == params.site_keywords:
|
||||
params.site_keywords = sorted(
|
||||
{*params.site_keywords, params.org_name, params.site_title}
|
||||
)
|
||||
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
||||
def _build_frontend_devices():
|
||||
"""Build filtered JSON structure of devices for frontend.
|
||||
|
||||
Schema:
|
||||
{
|
||||
"device.name": {
|
||||
"display_name": "device.display_name",
|
||||
"vrfs": [
|
||||
"Global",
|
||||
"vrf.display_name"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
Raises:
|
||||
ConfigError: Raised if parsing/building error occurs.
|
||||
|
||||
Returns:
|
||||
{dict} -- Frontend devices
|
||||
"""
|
||||
frontend_dict = {}
|
||||
for device in devices.objects:
|
||||
if device.name in frontend_dict:
|
||||
frontend_dict[device.name].update(
|
||||
{
|
||||
"network": device.network.display_name,
|
||||
"display_name": device.display_name,
|
||||
"vrfs": [
|
||||
{
|
||||
"id": vrf.name,
|
||||
"display_name": vrf.display_name,
|
||||
"default": vrf.default,
|
||||
"ipv4": True if vrf.ipv4 else False, # noqa: IF100
|
||||
"ipv6": True if vrf.ipv6 else False, # noqa: IF100
|
||||
}
|
||||
for vrf in device.vrfs
|
||||
],
|
||||
}
|
||||
)
|
||||
elif device.name not in frontend_dict:
|
||||
frontend_dict[device.name] = {
|
||||
"network": device.network.display_name,
|
||||
"display_name": device.display_name,
|
||||
"vrfs": [
|
||||
{
|
||||
"id": vrf.name,
|
||||
"display_name": vrf.display_name,
|
||||
"default": vrf.default,
|
||||
"ipv4": True if vrf.ipv4 else False, # noqa: IF100
|
||||
"ipv6": True if vrf.ipv6 else False, # noqa: IF100
|
||||
}
|
||||
for vrf in device.vrfs
|
||||
],
|
||||
}
|
||||
if not frontend_dict:
|
||||
raise ConfigError(error_msg="Unable to build network to device mapping")
|
||||
return frontend_dict
|
||||
|
||||
|
||||
def _build_networks() -> List[Dict]:
|
||||
"""Build filtered JSON Structure of networks & devices for Jinja templates."""
|
||||
networks = []
|
||||
_networks = list(set({device.network.display_name for device in devices.objects}))
|
||||
|
||||
for _network in _networks:
|
||||
network_def = {"display_name": _network, "locations": []}
|
||||
for device in devices.objects:
|
||||
if device.network.display_name == _network:
|
||||
network_def["locations"].append(
|
||||
{
|
||||
"_id": device._id,
|
||||
"name": device.name,
|
||||
"network": device.network.display_name,
|
||||
"vrfs": [
|
||||
{
|
||||
"_id": vrf._id,
|
||||
"display_name": vrf.display_name,
|
||||
"default": vrf.default,
|
||||
"ipv4": True if vrf.ipv4 else False, # noqa: IF100
|
||||
"ipv6": True if vrf.ipv6 else False, # noqa: IF100
|
||||
}
|
||||
for vrf in device.vrfs
|
||||
],
|
||||
}
|
||||
)
|
||||
networks.append(network_def)
|
||||
|
||||
if not networks:
|
||||
raise ConfigError(error_msg="Unable to build network to device mapping")
|
||||
return networks
|
||||
|
||||
|
||||
content_params = json.loads(
|
||||
params.json(include={"primary_asn", "org_name", "site_title", "site_description"})
|
||||
)
|
||||
|
||||
|
||||
def _build_vrf_help() -> Dict:
|
||||
"""Build a dict of vrfs as keys, help content as values."""
|
||||
all_help = {}
|
||||
for vrf in devices.vrf_objects:
|
||||
|
||||
vrf_help = {}
|
||||
for command in SUPPORTED_QUERY_TYPES:
|
||||
cmd = getattr(vrf.info, command)
|
||||
if cmd.enable:
|
||||
help_params = {**content_params, **cmd.params.dict()}
|
||||
|
||||
if help_params["title"] is None:
|
||||
command_params = getattr(params.queries, command)
|
||||
help_params[
|
||||
"title"
|
||||
] = f"{vrf.display_name}: {command_params.display_name}"
|
||||
|
||||
md = get_markdown(
|
||||
config_path=cmd,
|
||||
default=DEFAULT_DETAILS[command],
|
||||
params=help_params,
|
||||
)
|
||||
|
||||
vrf_help.update(
|
||||
{
|
||||
command: {
|
||||
"content": md,
|
||||
"enable": cmd.enable,
|
||||
"params": help_params,
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
all_help.update({vrf._id: vrf_help})
|
||||
|
||||
return all_help
|
||||
|
||||
|
||||
content_greeting = get_markdown(
|
||||
config_path=params.web.greeting,
|
||||
default="",
|
||||
params={"title": params.web.greeting.title},
|
||||
)
|
||||
|
||||
content_vrf = _build_vrf_help()
|
||||
|
||||
content_credit = CREDIT.format(version=__version__)
|
||||
|
||||
networks = _build_networks()
|
||||
frontend_devices = _build_frontend_devices()
|
||||
_include_fields = {
|
||||
"cache": {"show_text", "timeout"},
|
||||
"debug": ...,
|
||||
"developer_mode": ...,
|
||||
"primary_asn": ...,
|
||||
"request_timeout": ...,
|
||||
"org_name": ...,
|
||||
"google_analytics": ...,
|
||||
"site_title": ...,
|
||||
"site_description": ...,
|
||||
"site_keywords": ...,
|
||||
"web": ...,
|
||||
"messages": ...,
|
||||
}
|
||||
_frontend_params = params.dict(include=_include_fields)
|
||||
|
||||
|
||||
_frontend_params["web"]["logo"]["light_format"] = params.web.logo.light.suffix
|
||||
_frontend_params["web"]["logo"]["dark_format"] = params.web.logo.dark.suffix
|
||||
|
||||
_frontend_params.update(
|
||||
{
|
||||
"hyperglass_version": __version__,
|
||||
"queries": {**params.queries.map, "list": params.queries.list},
|
||||
"networks": networks,
|
||||
"parsed_data_fields": PARSED_RESPONSE_FIELDS,
|
||||
"content": {
|
||||
"credit": content_credit,
|
||||
"vrf": content_vrf,
|
||||
"greeting": content_greeting,
|
||||
},
|
||||
}
|
||||
)
|
||||
frontend_params = _frontend_params
|
||||
|
||||
URL_DEV = f"http://localhost:{str(params.listen_port)}/"
|
||||
URL_PROD = "/api/"
|
||||
|
||||
REDIS_CONFIG = {
|
||||
"host": str(params.cache.host),
|
||||
"port": params.cache.port,
|
||||
"decode_responses": True,
|
||||
"password": params.cache.password,
|
||||
}
|
||||
|
|
@ -1,61 +0,0 @@
|
|||
"""Markdown processing utility functions."""
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
|
||||
|
||||
def _get_file(path_obj):
|
||||
"""Read a file.
|
||||
|
||||
Arguments:
|
||||
path_obj {Path} -- Path to file.
|
||||
|
||||
Returns:
|
||||
{str} -- File contents
|
||||
"""
|
||||
with path_obj.open("r") as raw_file:
|
||||
return raw_file.read()
|
||||
|
||||
|
||||
def format_markdown(content, params):
|
||||
"""Format content with config parameters.
|
||||
|
||||
Arguments:
|
||||
content {str} -- Unformatted content
|
||||
|
||||
Returns:
|
||||
{str} -- Formatted content
|
||||
"""
|
||||
try:
|
||||
fmt = content.format(**params)
|
||||
except KeyError:
|
||||
fmt = content
|
||||
return fmt
|
||||
|
||||
|
||||
def get_markdown(config_path, default, params):
|
||||
"""Get markdown file if specified, or use default.
|
||||
|
||||
Format the content with config parameters.
|
||||
|
||||
Arguments:
|
||||
config_path {object} -- content config
|
||||
default {str} -- default content
|
||||
|
||||
Returns:
|
||||
{str} -- Formatted content
|
||||
"""
|
||||
log.trace(f"Getting Markdown content for '{params['title']}'")
|
||||
|
||||
if config_path.enable and config_path.file is not None:
|
||||
md = _get_file(config_path.file)
|
||||
else:
|
||||
md = default
|
||||
|
||||
log.trace(f"Unformatted Content for '{params['title']}':\n{md}")
|
||||
|
||||
md_fmt = format_markdown(md, params)
|
||||
|
||||
log.trace(f"Formatted Content for '{params['title']}':\n{md_fmt}")
|
||||
|
||||
return md_fmt
|
||||
|
|
@ -1,50 +0,0 @@
|
|||
"""Post-Validation Validation.
|
||||
|
||||
Some validations need to occur across multiple config files.
|
||||
"""
|
||||
# Standard Library
|
||||
from typing import Dict, List, Union, Callable
|
||||
|
||||
# Third Party
|
||||
from pydantic import ValidationError
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
from hyperglass.models import HyperglassModel
|
||||
from hyperglass.constants import TRANSPORT_REST, SUPPORTED_STRUCTURED_OUTPUT
|
||||
from hyperglass.exceptions import ConfigError, ConfigInvalid
|
||||
from hyperglass.models.commands import Commands
|
||||
|
||||
|
||||
def validate_nos_commands(all_nos: List[str], commands: Commands) -> bool:
|
||||
"""Ensure defined devices have associated commands."""
|
||||
custom_commands = commands.dict().keys()
|
||||
|
||||
for nos in all_nos:
|
||||
valid = False
|
||||
if nos in (*SUPPORTED_STRUCTURED_OUTPUT, *TRANSPORT_REST, *custom_commands):
|
||||
valid = True
|
||||
|
||||
if not valid:
|
||||
raise ConfigError(
|
||||
'"{nos}" is used on a device, '
|
||||
+ 'but no command profile for "{nos}" is defined.',
|
||||
nos=nos,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def validate_config(config: Union[Dict, List], importer: Callable) -> HyperglassModel:
|
||||
"""Validate a config dict against a model."""
|
||||
validated = None
|
||||
try:
|
||||
if isinstance(config, Dict):
|
||||
validated = importer(**config)
|
||||
elif isinstance(config, List):
|
||||
validated = importer(config)
|
||||
except ValidationError as err:
|
||||
log.error(str(err))
|
||||
raise ConfigInvalid(err.errors()) from None
|
||||
|
||||
return validated
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
"""hyperglass CLI management tool."""
|
||||
|
||||
# Project
|
||||
from hyperglass.cli import CLI
|
||||
|
||||
if __name__ == "__main__":
|
||||
CLI()
|
||||
|
|
@ -1,89 +0,0 @@
|
|||
"""Constant definitions used throughout the application."""
|
||||
|
||||
# Standard Library
|
||||
from datetime import datetime
|
||||
|
||||
__name__ = "hyperglass"
|
||||
__version__ = "1.0.4"
|
||||
__author__ = "Matt Love"
|
||||
__copyright__ = f"Copyright {datetime.now().year} Matthew Love"
|
||||
__license__ = "BSD 3-Clause Clear License"
|
||||
|
||||
METADATA = (__name__, __version__, __author__, __copyright__, __license__)
|
||||
|
||||
MIN_PYTHON_VERSION = (3, 6)
|
||||
|
||||
MIN_NODE_VERSION = 14
|
||||
|
||||
TARGET_FORMAT_SPACE = ("huawei", "huawei_vrpv8")
|
||||
|
||||
TARGET_JUNIPER_ASPATH = ("juniper", "juniper_junos")
|
||||
|
||||
SUPPORTED_STRUCTURED_OUTPUT = ("juniper", "arista_eos")
|
||||
|
||||
STATUS_CODE_MAP = {"warning": 400, "error": 400, "danger": 500}
|
||||
|
||||
DNS_OVER_HTTPS = {
|
||||
"google": "https://dns.google/resolve",
|
||||
"cloudflare": "https://cloudflare-dns.com/dns-query",
|
||||
}
|
||||
|
||||
PARSED_RESPONSE_FIELDS = (
|
||||
("Prefix", "prefix", "left"),
|
||||
("Active", "active", None),
|
||||
("RPKI State", "rpki_state", "center"),
|
||||
("AS Path", "as_path", "left"),
|
||||
("Next Hop", "next_hop", "left"),
|
||||
("Origin", "source_as", None),
|
||||
("Weight", "weight", "center"),
|
||||
("Local Preference", "local_preference", "center"),
|
||||
("MED", "med", "center"),
|
||||
("Communities", "communities", "center"),
|
||||
("Originator", "source_rid", "right"),
|
||||
("Peer", "peer_rid", "right"),
|
||||
("Age", "age", "right"),
|
||||
)
|
||||
|
||||
SUPPORTED_QUERY_FIELDS = ("query_location", "query_type", "query_target", "query_vrf")
|
||||
SUPPORTED_QUERY_TYPES = (
|
||||
"bgp_route",
|
||||
"bgp_community",
|
||||
"bgp_aspath",
|
||||
"ping",
|
||||
"traceroute",
|
||||
)
|
||||
|
||||
FUNC_COLOR_MAP = {
|
||||
"primary": "cyan",
|
||||
"secondary": "blue",
|
||||
"success": "green",
|
||||
"warning": "yellow",
|
||||
"error": "orange",
|
||||
"danger": "red",
|
||||
}
|
||||
|
||||
TRANSPORT_REST = ("frr_legacy", "bird_legacy")
|
||||
|
||||
SCRAPE_HELPERS = {
|
||||
"arista": "arista_eos",
|
||||
"ios": "cisco_ios",
|
||||
"juniper_junos": "juniper",
|
||||
"junos": "juniper",
|
||||
"mikrotik": "mikrotik_routeros",
|
||||
"tsnr": "tnsr",
|
||||
}
|
||||
|
||||
DRIVER_MAP = {
|
||||
# TODO: Troubleshoot Arista with Scrapli, broken after upgrading to 2021.1.30.
|
||||
# "arista_eos": "scrapli", # noqa: E800
|
||||
"bird": "scrapli",
|
||||
"cisco_ios": "scrapli",
|
||||
"cisco_xe": "scrapli",
|
||||
"cisco_xr": "scrapli",
|
||||
"cisco_nxos": "scrapli",
|
||||
"juniper": "scrapli",
|
||||
"tnsr": "scrapli",
|
||||
"frr": "scrapli",
|
||||
"frr_legacy": "hyperglass_agent",
|
||||
"bird_legacy": "hyperglass_agent",
|
||||
}
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
"""Constant store for large default values."""
|
||||
|
||||
CREDIT = """
|
||||
Powered by [**hyperglass**](https://hyperglass.dev) version {version}. \
|
||||
Source code licensed [_BSD 3-Clause Clear_](https://hyperglass.dev/docs/license/).
|
||||
"""
|
||||
|
||||
DEFAULT_TERMS = """
|
||||
By using {site_title}, you agree to be bound by the following terms of use:
|
||||
|
||||
All queries executed on this page are logged for analysis and troubleshooting. \
|
||||
Users are prohibited from automating queries, or attempting to process queries in \
|
||||
bulk. This service is provided on a best effort basis, and {org_name} \
|
||||
makes no availability or performance warranties or guarantees whatsoever.
|
||||
"""
|
||||
|
||||
DEFAULT_DETAILS = {
|
||||
"bgp_aspath": """
|
||||
{site_title} accepts the following `AS_PATH` regular expression patterns:
|
||||
|
||||
| Expression | Match |
|
||||
| :------------------- | :-------------------------------------------- |
|
||||
| `_65000$` | Originated by 65000 |
|
||||
| `^65000_` | Received from 65000 |
|
||||
| `_65000_` | Via 65000 |
|
||||
| `_65000_65001_` | Via 65000 and 65001 |
|
||||
| `_65000(_.+_)65001$` | Anything from 65001 that passed through 65000 |
|
||||
""",
|
||||
"bgp_community": """
|
||||
{site_title} makes use of the following BGP communities:
|
||||
|
||||
| Community | Description |
|
||||
| :-------- | :---------- |
|
||||
| `65000:1` | Example 1 |
|
||||
| `65000:2` | Example 2 |
|
||||
| `65000:3` | Example 3 |
|
||||
""",
|
||||
"bgp_route": """
|
||||
Performs BGP table lookup based on IPv4/IPv6 prefix.
|
||||
""",
|
||||
"ping": """
|
||||
Sends 5 ICMP echo requests to the target.
|
||||
""",
|
||||
"traceroute": """
|
||||
Performs UDP Based traceroute to the target. \
|
||||
For information about how to interpret traceroute results, [click here]\
|
||||
(https://hyperglass.dev/traceroute_nanog.pdf).
|
||||
""",
|
||||
}
|
||||
|
||||
DEFAULT_HELP = """
|
||||
##### BGP Route
|
||||
|
||||
Performs BGP table lookup based on IPv4/IPv6 prefix.
|
||||
|
||||
---
|
||||
|
||||
##### BGP Community
|
||||
|
||||
Performs BGP table lookup based on [Extended](https://tools.ietf.org/html/rfc4360) \
|
||||
or [Large](https://tools.ietf.org/html/rfc8195) community value.
|
||||
|
||||
---
|
||||
|
||||
##### BGP AS Path
|
||||
|
||||
Performs BGP table lookup based on `AS_PATH` regular expression.
|
||||
|
||||
---
|
||||
|
||||
##### Ping
|
||||
|
||||
Sends 5 ICMP echo requests to the target.
|
||||
|
||||
---
|
||||
|
||||
##### Traceroute
|
||||
|
||||
Performs UDP Based traceroute to the target.
|
||||
|
||||
For information about how to interpret traceroute results, [click here]\
|
||||
(https://hyperglass.dev/traceroute_nanog.pdf).
|
||||
"""
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
"""Handle JSON Web Token Encoding & Decoding."""
|
||||
|
||||
# Standard Library
|
||||
import datetime
|
||||
|
||||
# Third Party
|
||||
import jwt
|
||||
|
||||
# Project
|
||||
from hyperglass.exceptions import RestError
|
||||
|
||||
|
||||
async def jwt_decode(payload: str, secret: str) -> str:
|
||||
"""Decode & validate an encoded JSON Web Token (JWT)."""
|
||||
try:
|
||||
decoded = jwt.decode(payload, secret, algorithm="HS256")
|
||||
decoded = decoded["payload"]
|
||||
return decoded
|
||||
except (KeyError, jwt.PyJWTError) as exp:
|
||||
raise RestError(str(exp)) from None
|
||||
|
||||
|
||||
async def jwt_encode(payload: str, secret: str, duration: int) -> str:
|
||||
"""Encode a query to a JSON Web Token (JWT)."""
|
||||
token = {
|
||||
"payload": payload,
|
||||
"nbf": datetime.datetime.utcnow(),
|
||||
"iat": datetime.datetime.utcnow(),
|
||||
"exp": datetime.datetime.utcnow() + datetime.timedelta(seconds=duration),
|
||||
}
|
||||
encoded = jwt.encode(token, secret, algorithm="HS256").decode("utf-8")
|
||||
return encoded
|
||||
|
|
@ -1,89 +0,0 @@
|
|||
---
|
||||
routers:
|
||||
- name: sfo_router01
|
||||
address: 10.0.0.1
|
||||
network:
|
||||
name: primary
|
||||
display_name: Main Network
|
||||
credential:
|
||||
username: user1
|
||||
password: secret1
|
||||
display_name: San Francisco, CA
|
||||
port: 22
|
||||
nos: cisco_ios
|
||||
vrfs:
|
||||
- name: default
|
||||
display_name: Global
|
||||
ipv4:
|
||||
source_address: 192.0.2.1
|
||||
access_list:
|
||||
- network: 10.0.0.0/8
|
||||
action: deny
|
||||
- network: 192.168.0.0/16
|
||||
action: deny
|
||||
- network: 172.16.0.0/12
|
||||
action: deny
|
||||
- network: 0.0.0.0/0
|
||||
action: permit
|
||||
ge: 8
|
||||
le: 24
|
||||
ipv6:
|
||||
source_address: 2001:db8::1
|
||||
access_list:
|
||||
- network: ::/0
|
||||
action: permit
|
||||
ge: 32
|
||||
le: 64
|
||||
- name: customer_a
|
||||
display_name: Customer A
|
||||
ipv4:
|
||||
source_address: 192.168.1.1
|
||||
access_list:
|
||||
- network: 192.0.2.0/24
|
||||
action: deny
|
||||
- network: 10.0.0.0/8
|
||||
action: permit
|
||||
ipv6: null
|
||||
proxy: null
|
||||
- name: atl_router01
|
||||
address: 10.0.0.2
|
||||
network:
|
||||
name: secondary
|
||||
display_name: That Other Network
|
||||
credential:
|
||||
username: user2
|
||||
password: secret2
|
||||
display_name: Atlanta, GA
|
||||
port: 22
|
||||
nos: juniper
|
||||
vrfs:
|
||||
- name: default
|
||||
display_name: Global
|
||||
ipv4:
|
||||
source_address: 192.0.2.2
|
||||
access_list:
|
||||
- network: 10.0.0.0/8
|
||||
action: deny
|
||||
- network: 192.168.0.0/16
|
||||
action: deny
|
||||
- network: 172.16.0.0/12
|
||||
action: deny
|
||||
- network: 0.0.0.0/0
|
||||
action: permit
|
||||
ge: 8
|
||||
le: 24
|
||||
ipv6:
|
||||
source_address: 2001:db8::2
|
||||
access_list:
|
||||
- network: ::/0
|
||||
action: permit
|
||||
ge: 32
|
||||
le: 64
|
||||
proxy:
|
||||
name: server01
|
||||
address: 10.11.6.204
|
||||
port: 22
|
||||
credential:
|
||||
username: user1
|
||||
password: secret1
|
||||
nos: linux_ssh
|
||||
|
|
@ -1,190 +0,0 @@
|
|||
"""Custom exceptions for hyperglass."""
|
||||
|
||||
# Standard Library
|
||||
import json as _json
|
||||
from typing import Dict, List, Union, Optional
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
from hyperglass.constants import STATUS_CODE_MAP
|
||||
|
||||
|
||||
def validation_error_message(*errors: Dict) -> str:
|
||||
"""Parse errors return from pydantic.ValidationError.errors()."""
|
||||
|
||||
errs = ("\n",)
|
||||
|
||||
for err in errors:
|
||||
loc = " → ".join(str(loc) for loc in err["loc"])
|
||||
errs += (f'Field: {loc}\n Error: {err["msg"]}\n',)
|
||||
|
||||
return "\n".join(errs)
|
||||
|
||||
|
||||
class HyperglassError(Exception):
|
||||
"""hyperglass base exception."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "",
|
||||
level: str = "warning",
|
||||
keywords: Optional[List[str]] = None,
|
||||
) -> None:
|
||||
"""Initialize the hyperglass base exception class."""
|
||||
self._message = message
|
||||
self._level = level
|
||||
self._keywords = keywords or []
|
||||
if self._level == "warning":
|
||||
log.error(repr(self))
|
||||
elif self._level == "danger":
|
||||
log.critical(repr(self))
|
||||
else:
|
||||
log.info(repr(self))
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return the instance's error message."""
|
||||
return self._message
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return the instance's severity & error message in a string."""
|
||||
return f"[{self.level.upper()}] {self._message}"
|
||||
|
||||
def dict(self) -> Dict:
|
||||
"""Return the instance's attributes as a dictionary."""
|
||||
return {
|
||||
"message": self._message,
|
||||
"level": self._level,
|
||||
"keywords": self._keywords,
|
||||
}
|
||||
|
||||
def json(self) -> str:
|
||||
"""Return the instance's attributes as a JSON object."""
|
||||
return _json.dumps(self.__dict__())
|
||||
|
||||
@property
|
||||
def message(self) -> str:
|
||||
"""Return the instance's `message` attribute."""
|
||||
return self._message
|
||||
|
||||
@property
|
||||
def level(self) -> str:
|
||||
"""Return the instance's `level` attribute."""
|
||||
return self._level
|
||||
|
||||
@property
|
||||
def keywords(self) -> List[str]:
|
||||
"""Return the instance's `keywords` attribute."""
|
||||
return self._keywords
|
||||
|
||||
@property
|
||||
def status_code(self) -> int:
|
||||
"""Return HTTP status code based on level level."""
|
||||
return STATUS_CODE_MAP.get(self._level, 500)
|
||||
|
||||
|
||||
class _UnformattedHyperglassError(HyperglassError):
|
||||
"""Base exception class for freeform error messages."""
|
||||
|
||||
_level = "warning"
|
||||
|
||||
def __init__(
|
||||
self, unformatted_msg: str = "", level: Optional[str] = None, **kwargs
|
||||
) -> None:
|
||||
"""Format error message with keyword arguments."""
|
||||
self._message = unformatted_msg.format(**kwargs)
|
||||
self._level = level or self._level
|
||||
self._keywords = list(kwargs.values())
|
||||
super().__init__(
|
||||
message=self._message, level=self._level, keywords=self._keywords
|
||||
)
|
||||
|
||||
|
||||
class _PredefinedHyperglassError(HyperglassError):
|
||||
_message = "undefined"
|
||||
_level = "warning"
|
||||
|
||||
def __init__(self, level: Optional[str] = None, **kwargs) -> None:
|
||||
self._fmt_msg = self._message.format(**kwargs)
|
||||
self._level = level or self._level
|
||||
self._keywords = list(kwargs.values())
|
||||
super().__init__(
|
||||
message=self._fmt_msg, level=self._level, keywords=self._keywords
|
||||
)
|
||||
|
||||
|
||||
class ConfigInvalid(HyperglassError):
|
||||
"""Raised when a config item fails type or option validation."""
|
||||
|
||||
def __init__(self, errors: List[str]) -> None:
|
||||
"""Parse Pydantic ValidationError."""
|
||||
|
||||
super().__init__(message=validation_error_message(*errors))
|
||||
|
||||
|
||||
class ConfigError(_UnformattedHyperglassError):
|
||||
"""Raised for generic user-config issues."""
|
||||
|
||||
|
||||
class ConfigMissing(_PredefinedHyperglassError):
|
||||
"""Raised when a required config file or item is missing or undefined."""
|
||||
|
||||
_message = (
|
||||
"{missing_item} is missing or undefined and is required to start "
|
||||
"hyperglass. Please consult the installation documentation."
|
||||
)
|
||||
|
||||
|
||||
class ScrapeError(_UnformattedHyperglassError):
|
||||
"""Raised when a scrape/netmiko error occurs."""
|
||||
|
||||
_level = "danger"
|
||||
|
||||
|
||||
class AuthError(_UnformattedHyperglassError):
|
||||
"""Raised when authentication to a device fails."""
|
||||
|
||||
_level = "danger"
|
||||
|
||||
|
||||
class RestError(_UnformattedHyperglassError):
|
||||
"""Raised upon a rest API client error."""
|
||||
|
||||
_level = "danger"
|
||||
|
||||
|
||||
class DeviceTimeout(_UnformattedHyperglassError):
|
||||
"""Raised when the connection to a device times out."""
|
||||
|
||||
_level = "danger"
|
||||
|
||||
|
||||
class InputInvalid(_UnformattedHyperglassError):
|
||||
"""Raised when input validation fails."""
|
||||
|
||||
|
||||
class InputNotAllowed(_UnformattedHyperglassError):
|
||||
"""Raised when input validation fails due to a configured check."""
|
||||
|
||||
|
||||
class ResponseEmpty(_UnformattedHyperglassError):
|
||||
"""Raised when hyperglass can connect to the device but the response is empty."""
|
||||
|
||||
|
||||
class UnsupportedDevice(_UnformattedHyperglassError):
|
||||
"""Raised when an input NOS is not in the supported NOS list."""
|
||||
|
||||
|
||||
class ParsingError(_UnformattedHyperglassError):
|
||||
"""Raised when there is a problem parsing a structured response."""
|
||||
|
||||
def __init__(
|
||||
self, unformatted_msg: Union[List[Dict], str], level: str = "danger", **kwargs,
|
||||
) -> None:
|
||||
"""Format error message with keyword arguments."""
|
||||
if isinstance(unformatted_msg, list):
|
||||
self._message = validation_error_message(*unformatted_msg)
|
||||
else:
|
||||
self._message = unformatted_msg.format(**kwargs)
|
||||
self._level = level or self._level
|
||||
self._keywords = list(kwargs.values())
|
||||
super().__init__(self._message, level=self._level, keywords=self._keywords)
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
"""Validate, construct, execute queries.
|
||||
|
||||
Constructs SSH commands or API call parameters based on front end
|
||||
input, executes the commands/calls, returns the output to front end.
|
||||
"""
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
"""Individual transport driver classes & subclasses."""
|
||||
|
||||
# Local
|
||||
from .agent import AgentConnection
|
||||
from ._common import Connection
|
||||
from .ssh_netmiko import NetmikoConnection
|
||||
from .ssh_scrapli import ScrapliConnection
|
||||
|
|
@ -1,81 +0,0 @@
|
|||
"""Base Connection Class."""
|
||||
|
||||
# Standard Library
|
||||
from typing import Dict, Union, Sequence
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
from hyperglass.models.api import Query
|
||||
from hyperglass.parsing.nos import scrape_parsers, structured_parsers
|
||||
from hyperglass.parsing.common import parsers
|
||||
from hyperglass.models.config.devices import Device
|
||||
|
||||
# Local
|
||||
from ._construct import Construct
|
||||
|
||||
|
||||
class Connection:
|
||||
"""Base transport driver class."""
|
||||
|
||||
def __init__(self, device: Device, query_data: Query) -> None:
|
||||
"""Initialize connection to device."""
|
||||
self.device = device
|
||||
self.query_data = query_data
|
||||
self.query_type = self.query_data.query_type
|
||||
self.query_target = self.query_data.query_target
|
||||
self._query = Construct(device=self.device, query_data=self.query_data)
|
||||
self.query = self._query.queries()
|
||||
|
||||
async def parsed_response( # noqa: C901 ("too complex")
|
||||
self, output: Sequence[str]
|
||||
) -> Union[str, Sequence[Dict]]:
|
||||
"""Send output through common parsers."""
|
||||
|
||||
log.debug("Pre-parsed responses:\n{}", output)
|
||||
parsed = ()
|
||||
response = None
|
||||
|
||||
structured_nos = structured_parsers.keys()
|
||||
structured_query_types = structured_parsers.get(self.device.nos, {}).keys()
|
||||
|
||||
scrape_nos = scrape_parsers.keys()
|
||||
scrape_query_types = scrape_parsers.get(self.device.nos, {}).keys()
|
||||
|
||||
if not self.device.structured_output:
|
||||
_parsed = ()
|
||||
for func in parsers:
|
||||
for response in output:
|
||||
_output = func(commands=self.query, output=response)
|
||||
_parsed += (_output,)
|
||||
if self.device.nos in scrape_nos and self.query_type in scrape_query_types:
|
||||
func = scrape_parsers[self.device.nos][self.query_type]
|
||||
for response in _parsed:
|
||||
_output = func(response)
|
||||
parsed += (_output,)
|
||||
else:
|
||||
parsed += _parsed
|
||||
|
||||
response = "\n\n".join(parsed)
|
||||
elif (
|
||||
self.device.structured_output
|
||||
and self.device.nos in structured_nos
|
||||
and self.query_type not in structured_query_types
|
||||
):
|
||||
for func in parsers:
|
||||
for response in output:
|
||||
_output = func(commands=self.query, output=response)
|
||||
parsed += (_output,)
|
||||
response = "\n\n".join(parsed)
|
||||
elif (
|
||||
self.device.structured_output
|
||||
and self.device.nos in structured_nos
|
||||
and self.query_type in structured_query_types
|
||||
):
|
||||
func = structured_parsers[self.device.nos][self.query_type]
|
||||
response = func(output)
|
||||
|
||||
if response is None:
|
||||
response = "\n\n".join(output)
|
||||
|
||||
log.debug("Post-parsed responses:\n{}", response)
|
||||
return response
|
||||
|
|
@ -1,201 +0,0 @@
|
|||
"""Construct SSH command/API parameters from validated query data.
|
||||
|
||||
Accepts filtered & validated input from execute.py, constructs SSH
|
||||
command for Netmiko library or API call parameters for supported
|
||||
hyperglass API modules.
|
||||
"""
|
||||
|
||||
# Standard Library
|
||||
import re
|
||||
import json as _json
|
||||
from operator import attrgetter
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
from hyperglass.constants import TRANSPORT_REST, TARGET_FORMAT_SPACE
|
||||
from hyperglass.configuration import commands
|
||||
|
||||
|
||||
class Construct:
|
||||
"""Construct SSH commands/REST API parameters from validated query data."""
|
||||
|
||||
def __init__(self, device, query_data):
|
||||
"""Initialize command construction."""
|
||||
log.debug(
|
||||
"Constructing {} query for '{}'",
|
||||
query_data.query_type,
|
||||
str(query_data.query_target),
|
||||
)
|
||||
self.device = device
|
||||
self.query_data = query_data
|
||||
self.target = self.query_data.query_target
|
||||
|
||||
# Set transport method based on NOS type
|
||||
self.transport = "scrape"
|
||||
if self.device.nos in TRANSPORT_REST:
|
||||
self.transport = "rest"
|
||||
|
||||
# Remove slashes from target for required platforms
|
||||
if self.device.nos in TARGET_FORMAT_SPACE:
|
||||
self.target = re.sub(r"\/", r" ", str(self.query_data.query_target))
|
||||
|
||||
# Set AFIs for based on query type
|
||||
if self.query_data.query_type in ("bgp_route", "ping", "traceroute"):
|
||||
# For IP queries, AFIs are enabled (not null/None) VRF -> AFI definitions
|
||||
# where the IP version matches the IP version of the target.
|
||||
self.afis = [
|
||||
v
|
||||
for v in (
|
||||
self.query_data.query_vrf.ipv4,
|
||||
self.query_data.query_vrf.ipv6,
|
||||
)
|
||||
if v is not None and self.query_data.query_target.version == v.version
|
||||
]
|
||||
elif self.query_data.query_type in ("bgp_aspath", "bgp_community"):
|
||||
# For AS Path/Community queries, AFIs are just enabled VRF -> AFI
|
||||
# definitions, no IP version checking is performed (since there is no IP).
|
||||
self.afis = [
|
||||
v
|
||||
for v in (
|
||||
self.query_data.query_vrf.ipv4,
|
||||
self.query_data.query_vrf.ipv6,
|
||||
)
|
||||
if v is not None
|
||||
]
|
||||
|
||||
with Formatter(self.device.nos, self.query_data.query_type) as formatter:
|
||||
self.target = formatter(self.query_data.query_target)
|
||||
|
||||
def json(self, afi):
|
||||
"""Return JSON version of validated query for REST devices."""
|
||||
log.debug("Building JSON query for {q}", q=repr(self.query_data))
|
||||
return _json.dumps(
|
||||
{
|
||||
"query_type": self.query_data.query_type,
|
||||
"vrf": self.query_data.query_vrf.name,
|
||||
"afi": afi.protocol,
|
||||
"source": str(afi.source_address),
|
||||
"target": str(self.target),
|
||||
}
|
||||
)
|
||||
|
||||
def scrape(self, afi):
|
||||
"""Return formatted command for 'Scrape' endpoints (SSH)."""
|
||||
if self.device.structured_output:
|
||||
cmd_paths = (
|
||||
self.device.nos,
|
||||
"structured",
|
||||
afi.protocol,
|
||||
self.query_data.query_type,
|
||||
)
|
||||
else:
|
||||
cmd_paths = (self.device.commands, afi.protocol, self.query_data.query_type)
|
||||
|
||||
command = attrgetter(".".join(cmd_paths))(commands)
|
||||
return command.format(
|
||||
target=self.target,
|
||||
source=str(afi.source_address),
|
||||
vrf=self.query_data.query_vrf.name,
|
||||
)
|
||||
|
||||
def queries(self):
|
||||
"""Return queries for each enabled AFI."""
|
||||
query = []
|
||||
|
||||
for afi in self.afis:
|
||||
if self.transport == "rest":
|
||||
query.append(self.json(afi=afi))
|
||||
else:
|
||||
query.append(self.scrape(afi=afi))
|
||||
|
||||
log.debug("Constructed query: {}", query)
|
||||
return query
|
||||
|
||||
|
||||
class Formatter:
|
||||
"""Modify query target based on the device's NOS requirements and the query type."""
|
||||
|
||||
def __init__(self, nos: str, query_type: str) -> None:
|
||||
"""Initialize target formatting."""
|
||||
self.nos = nos
|
||||
self.query_type = query_type
|
||||
|
||||
def __enter__(self):
|
||||
"""Get the relevant formatter."""
|
||||
return self._get_formatter()
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_traceback):
|
||||
"""Handle context exit."""
|
||||
if exc_type is not None:
|
||||
log.error(exc_traceback)
|
||||
pass
|
||||
|
||||
def _get_formatter(self):
|
||||
if self.nos in ("juniper", "juniper_junos"):
|
||||
if self.query_type == "bgp_aspath":
|
||||
return self._juniper_bgp_aspath
|
||||
if self.nos in ("bird", "bird_ssh"):
|
||||
if self.query_type == "bgp_aspath":
|
||||
return self._bird_bgp_aspath
|
||||
elif self.query_type == "bgp_community":
|
||||
return self._bird_bgp_community
|
||||
return self._default
|
||||
|
||||
def _default(self, target: str) -> str:
|
||||
"""Don't format targets by default."""
|
||||
return target
|
||||
|
||||
def _juniper_bgp_aspath(self, target: str) -> str:
|
||||
"""Convert from Cisco AS_PATH format to Juniper format."""
|
||||
query = str(target)
|
||||
asns = re.findall(r"\d+", query)
|
||||
was_modified = False
|
||||
|
||||
if bool(re.match(r"^\_", query)):
|
||||
# Replace `_65000` with `.* 65000`
|
||||
asns.insert(0, r".*")
|
||||
was_modified = True
|
||||
|
||||
if bool(re.match(r".*(\_)$", query)):
|
||||
# Replace `65000_` with `65000 .*`
|
||||
asns.append(r".*")
|
||||
was_modified = True
|
||||
|
||||
if was_modified:
|
||||
modified = " ".join(asns)
|
||||
log.debug("Modified target '{}' to '{}'", target, modified)
|
||||
return modified
|
||||
|
||||
return query
|
||||
|
||||
def _bird_bgp_aspath(self, target: str) -> str:
|
||||
"""Convert from Cisco AS_PATH format to BIRD format."""
|
||||
|
||||
# Extract ASNs from query target string
|
||||
asns = re.findall(r"\d+", target)
|
||||
was_modified = False
|
||||
|
||||
if bool(re.match(r"^\_", target)):
|
||||
# Replace `_65000` with `.* 65000`
|
||||
asns.insert(0, "*")
|
||||
was_modified = True
|
||||
|
||||
if bool(re.match(r".*(\_)$", target)):
|
||||
# Replace `65000_` with `65000 .*`
|
||||
asns.append("*")
|
||||
was_modified = True
|
||||
|
||||
asns.insert(0, "[=")
|
||||
asns.append("=]")
|
||||
|
||||
result = " ".join(asns)
|
||||
|
||||
if was_modified:
|
||||
log.debug("Modified target '{}' to '{}'", target, result)
|
||||
|
||||
return result
|
||||
|
||||
def _bird_bgp_community(self, target: str) -> str:
|
||||
"""Convert from standard community format to BIRD format."""
|
||||
parts = target.split(":")
|
||||
return f'({",".join(parts)})'
|
||||
|
|
@ -1,139 +0,0 @@
|
|||
"""Execute validated & constructed query on device.
|
||||
|
||||
Accepts input from front end application, validates the input and
|
||||
returns errors if input is invalid. Passes validated parameters to
|
||||
construct.py, which is used to build & run the Netmiko connections or
|
||||
hyperglass-frr API calls, returns the output back to the front end.
|
||||
"""
|
||||
|
||||
# Standard Library
|
||||
from ssl import CertificateError
|
||||
from typing import Iterable
|
||||
|
||||
# Third Party
|
||||
import httpx
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
from hyperglass.util import parse_exception
|
||||
from hyperglass.encode import jwt_decode, jwt_encode
|
||||
from hyperglass.exceptions import RestError, ResponseEmpty
|
||||
from hyperglass.configuration import params
|
||||
|
||||
# Local
|
||||
from ._common import Connection
|
||||
|
||||
|
||||
class AgentConnection(Connection):
|
||||
"""Connect to target device via hyperglass-agent."""
|
||||
|
||||
async def collect(self) -> Iterable: # noqa: C901
|
||||
"""Connect to a device running hyperglass-agent via HTTP."""
|
||||
log.debug("Query parameters: {}", self.query)
|
||||
|
||||
client_params = {
|
||||
"headers": {"Content-Type": "application/json"},
|
||||
"timeout": params.request_timeout,
|
||||
}
|
||||
if self.device.ssl is not None and self.device.ssl.enable:
|
||||
with self.device.ssl.cert.open("r") as file:
|
||||
cert = file.read()
|
||||
if not cert:
|
||||
raise RestError(
|
||||
"SSL Certificate for device {d} has not been imported",
|
||||
level="danger",
|
||||
d=self.device.name,
|
||||
)
|
||||
http_protocol = "https"
|
||||
client_params.update({"verify": str(self.device.ssl.cert)})
|
||||
log.debug(
|
||||
(
|
||||
f"Using {str(self.device.ssl.cert)} to validate connection "
|
||||
f"to {self.device.name}"
|
||||
)
|
||||
)
|
||||
else:
|
||||
http_protocol = "http"
|
||||
endpoint = "{protocol}://{address}:{port}/query/".format(
|
||||
protocol=http_protocol, address=self.device._target, port=self.device.port
|
||||
)
|
||||
|
||||
log.debug("URL endpoint: {}", endpoint)
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(**client_params) as http_client:
|
||||
responses = ()
|
||||
|
||||
for query in self.query:
|
||||
encoded_query = await jwt_encode(
|
||||
payload=query,
|
||||
secret=self.device.credential.password.get_secret_value(),
|
||||
duration=params.request_timeout,
|
||||
)
|
||||
log.debug("Encoded JWT: {}", encoded_query)
|
||||
|
||||
raw_response = await http_client.post(
|
||||
endpoint, json={"encoded": encoded_query}
|
||||
)
|
||||
log.debug("HTTP status code: {}", raw_response.status_code)
|
||||
|
||||
raw = raw_response.text
|
||||
log.debug("Raw Response:\n{}", raw)
|
||||
|
||||
if raw_response.status_code == 200:
|
||||
decoded = await jwt_decode(
|
||||
payload=raw_response.json()["encoded"],
|
||||
secret=self.device.credential.password.get_secret_value(),
|
||||
)
|
||||
log.debug("Decoded Response:\n{}", decoded)
|
||||
responses += (decoded,)
|
||||
|
||||
elif raw_response.status_code == 204:
|
||||
raise ResponseEmpty(
|
||||
params.messages.no_output, device_name=self.device.name,
|
||||
)
|
||||
|
||||
else:
|
||||
log.error(raw_response.text)
|
||||
|
||||
except httpx.exceptions.HTTPError as rest_error:
|
||||
msg = parse_exception(rest_error)
|
||||
log.error("Error connecting to device {}: {}", self.device.name, msg)
|
||||
raise RestError(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device.name,
|
||||
error=msg,
|
||||
)
|
||||
except OSError as ose:
|
||||
log.critical(str(ose))
|
||||
raise RestError(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device.name,
|
||||
error="System error",
|
||||
)
|
||||
except CertificateError as cert_error:
|
||||
log.critical(str(cert_error))
|
||||
msg = parse_exception(cert_error)
|
||||
raise RestError(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device.name,
|
||||
error=f"{msg}: {cert_error}",
|
||||
)
|
||||
|
||||
if raw_response.status_code != 200:
|
||||
log.error("Response code is {}", raw_response.status_code)
|
||||
raise RestError(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device.name,
|
||||
error=params.messages.general,
|
||||
)
|
||||
|
||||
if not responses:
|
||||
log.error("No response from device {}", self.device.name)
|
||||
raise RestError(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device.name,
|
||||
error=params.messages.no_response,
|
||||
)
|
||||
|
||||
return responses
|
||||
|
|
@ -1,62 +0,0 @@
|
|||
"""Common Classes or Utilities for SSH Drivers."""
|
||||
|
||||
# Standard Library
|
||||
from typing import Callable
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
from hyperglass.exceptions import ScrapeError
|
||||
from hyperglass.configuration import params
|
||||
from hyperglass.compat._sshtunnel import BaseSSHTunnelForwarderError, open_tunnel
|
||||
|
||||
# Local
|
||||
from ._common import Connection
|
||||
|
||||
|
||||
class SSHConnection(Connection):
|
||||
"""Base class for SSH drivers."""
|
||||
|
||||
def setup_proxy(self) -> Callable:
|
||||
"""Return a preconfigured sshtunnel.SSHTunnelForwarder instance."""
|
||||
|
||||
proxy = self.device.proxy
|
||||
|
||||
def opener():
|
||||
"""Set up an SSH tunnel according to a device's configuration."""
|
||||
tunnel_kwargs = {
|
||||
"ssh_username": proxy.credential.username,
|
||||
"remote_bind_address": (self.device._target, self.device.port),
|
||||
"local_bind_address": ("localhost", 0),
|
||||
"skip_tunnel_checkup": False,
|
||||
"gateway_timeout": params.request_timeout - 2,
|
||||
}
|
||||
if proxy.credential._method == "password":
|
||||
# Use password auth if no key is defined.
|
||||
tunnel_kwargs[
|
||||
"ssh_password"
|
||||
] = proxy.credential.password.get_secret_value()
|
||||
else:
|
||||
# Otherwise, use key auth.
|
||||
tunnel_kwargs["ssh_pkey"] = proxy.credential.key.as_posix()
|
||||
if proxy.credential._method == "encrypted_key":
|
||||
# If the key is encrypted, use the password field as the
|
||||
# private key password.
|
||||
tunnel_kwargs[
|
||||
"ssh_private_key_password"
|
||||
] = proxy.credential.password.get_secret_value()
|
||||
try:
|
||||
return open_tunnel(proxy._target, proxy.port, **tunnel_kwargs)
|
||||
|
||||
except BaseSSHTunnelForwarderError as scrape_proxy_error:
|
||||
log.error(
|
||||
f"Error connecting to device {self.device.name} via "
|
||||
f"proxy {proxy.name}"
|
||||
)
|
||||
raise ScrapeError(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device.name,
|
||||
proxy=proxy.name,
|
||||
error=str(scrape_proxy_error),
|
||||
)
|
||||
|
||||
return opener
|
||||
|
|
@ -1,136 +0,0 @@
|
|||
"""Netmiko-Specific Classes & Utilities.
|
||||
|
||||
https://github.com/ktbyers/netmiko
|
||||
"""
|
||||
|
||||
# Standard Library
|
||||
import math
|
||||
from typing import Iterable
|
||||
|
||||
# Third Party
|
||||
from netmiko import (
|
||||
ConnectHandler,
|
||||
NetMikoTimeoutException,
|
||||
NetMikoAuthenticationException,
|
||||
)
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
from hyperglass.exceptions import AuthError, ScrapeError, DeviceTimeout
|
||||
from hyperglass.configuration import params
|
||||
|
||||
# Local
|
||||
from .ssh import SSHConnection
|
||||
|
||||
netmiko_nos_globals = {
|
||||
# Netmiko doesn't currently handle Mikrotik echo verification well,
|
||||
# see ktbyers/netmiko#1600
|
||||
"mikrotik_routeros": {"global_cmd_verify": False},
|
||||
"mikrotik_switchos": {"global_cmd_verify": False},
|
||||
}
|
||||
|
||||
netmiko_nos_send_args = {
|
||||
# Netmiko doesn't currently handle the Mikrotik prompt properly, see
|
||||
# ktbyers/netmiko#1956
|
||||
"mikrotik_routeros": {"expect_string": r"\S+\s\>\s$"},
|
||||
"mikrotik_switchos": {"expect_string": r"\S+\s\>\s$"},
|
||||
}
|
||||
|
||||
|
||||
class NetmikoConnection(SSHConnection):
|
||||
"""Handle a device connection via Netmiko."""
|
||||
|
||||
async def collect(self, host: str = None, port: int = None) -> Iterable:
|
||||
"""Connect directly to a device.
|
||||
|
||||
Directly connects to the router via Netmiko library, returns the
|
||||
command output.
|
||||
"""
|
||||
if host is not None:
|
||||
log.debug(
|
||||
"Connecting to {} via proxy {} [{}]",
|
||||
self.device.name,
|
||||
self.device.proxy.name,
|
||||
f"{host}:{port}",
|
||||
)
|
||||
else:
|
||||
log.debug("Connecting directly to {}", self.device.name)
|
||||
|
||||
global_args = netmiko_nos_globals.get(self.device.nos, {})
|
||||
|
||||
send_args = netmiko_nos_send_args.get(self.device.nos, {})
|
||||
|
||||
driver_kwargs = {
|
||||
"host": host or self.device._target,
|
||||
"port": port or self.device.port,
|
||||
"device_type": self.device.nos,
|
||||
"username": self.device.credential.username,
|
||||
"global_delay_factor": params.netmiko_delay_factor,
|
||||
"timeout": math.floor(params.request_timeout * 1.25),
|
||||
"session_timeout": math.ceil(params.request_timeout - 1),
|
||||
**global_args,
|
||||
}
|
||||
|
||||
if "_telnet" in self.device.nos:
|
||||
# Telnet devices with a low delay factor (default) tend to
|
||||
# throw login errors.
|
||||
driver_kwargs["global_delay_factor"] = 2
|
||||
|
||||
if self.device.credential._method == "password":
|
||||
# Use password auth if no key is defined.
|
||||
driver_kwargs[
|
||||
"password"
|
||||
] = self.device.credential.password.get_secret_value()
|
||||
else:
|
||||
# Otherwise, use key auth.
|
||||
driver_kwargs["use_keys"] = True
|
||||
driver_kwargs["key_file"] = self.device.credential.key
|
||||
if self.device.credential._method == "encrypted_key":
|
||||
# If the key is encrypted, use the password field as the
|
||||
# private key password.
|
||||
driver_kwargs[
|
||||
"passphrase"
|
||||
] = self.device.credential.password.get_secret_value()
|
||||
|
||||
try:
|
||||
nm_connect_direct = ConnectHandler(**driver_kwargs)
|
||||
|
||||
responses = ()
|
||||
|
||||
for query in self.query:
|
||||
raw = nm_connect_direct.send_command(query, **send_args)
|
||||
responses += (raw,)
|
||||
log.debug(f'Raw response for command "{query}":\n{raw}')
|
||||
|
||||
nm_connect_direct.disconnect()
|
||||
|
||||
except NetMikoTimeoutException as scrape_error:
|
||||
log.error(str(scrape_error))
|
||||
raise DeviceTimeout(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device.name,
|
||||
proxy=None,
|
||||
error=params.messages.request_timeout,
|
||||
)
|
||||
except NetMikoAuthenticationException as auth_error:
|
||||
log.error(
|
||||
"Error authenticating to device {loc}: {e}",
|
||||
loc=self.device.name,
|
||||
e=str(auth_error),
|
||||
)
|
||||
|
||||
raise AuthError(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device.name,
|
||||
proxy=None,
|
||||
error=params.messages.authentication_error,
|
||||
)
|
||||
if not responses:
|
||||
raise ScrapeError(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device.name,
|
||||
proxy=None,
|
||||
error=params.messages.no_response,
|
||||
)
|
||||
|
||||
return responses
|
||||
|
|
@ -1,160 +0,0 @@
|
|||
"""Scrapli-Specific Classes & Utilities.
|
||||
|
||||
https://github.com/carlmontanari/scrapli
|
||||
"""
|
||||
|
||||
# Standard Library
|
||||
import math
|
||||
from typing import Sequence
|
||||
|
||||
# Third Party
|
||||
from scrapli.driver import AsyncGenericDriver
|
||||
from scrapli.exceptions import (
|
||||
ScrapliTimeout,
|
||||
ScrapliException,
|
||||
ScrapliAuthenticationFailed,
|
||||
)
|
||||
from scrapli.driver.core import (
|
||||
AsyncEOSDriver,
|
||||
AsyncNXOSDriver,
|
||||
AsyncIOSXEDriver,
|
||||
AsyncIOSXRDriver,
|
||||
AsyncJunosDriver,
|
||||
)
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
from hyperglass.exceptions import (
|
||||
AuthError,
|
||||
ScrapeError,
|
||||
DeviceTimeout,
|
||||
UnsupportedDevice,
|
||||
)
|
||||
from hyperglass.configuration import params
|
||||
|
||||
# Local
|
||||
from .ssh import SSHConnection
|
||||
|
||||
SCRAPLI_DRIVER_MAP = {
|
||||
"arista_eos": AsyncEOSDriver,
|
||||
"bird": AsyncGenericDriver,
|
||||
"cisco_ios": AsyncIOSXEDriver,
|
||||
"cisco_nxos": AsyncNXOSDriver,
|
||||
"cisco_xr": AsyncIOSXRDriver,
|
||||
"frr": AsyncGenericDriver,
|
||||
"juniper": AsyncJunosDriver,
|
||||
"tnsr": AsyncGenericDriver,
|
||||
}
|
||||
|
||||
driver_global_args = {
|
||||
# Per-NOS driver keyword arguments
|
||||
"tnsr": {"comms_prompt_pattern": r"\S+\s\S+[\#\>]"},
|
||||
"frr": {"comms_ansi": True},
|
||||
"bird": {"comms_ansi": True},
|
||||
}
|
||||
|
||||
|
||||
def _map_driver(nos: str) -> AsyncGenericDriver:
|
||||
driver = SCRAPLI_DRIVER_MAP.get(nos)
|
||||
if driver is None:
|
||||
raise UnsupportedDevice("{nos} is not supported by scrapli.", nos=nos)
|
||||
return driver
|
||||
|
||||
|
||||
class ScrapliConnection(SSHConnection):
|
||||
"""Handle a device connection via Scrapli."""
|
||||
|
||||
async def collect(self, host: str = None, port: int = None) -> Sequence:
|
||||
"""Connect directly to a device.
|
||||
|
||||
Directly connects to the router via Netmiko library, returns the
|
||||
command output.
|
||||
"""
|
||||
driver = _map_driver(self.device.nos)
|
||||
|
||||
if host is not None:
|
||||
log.debug(
|
||||
"Connecting to {} via proxy {} [{}]",
|
||||
self.device.name,
|
||||
self.device.proxy.name,
|
||||
f"{host}:{port}",
|
||||
)
|
||||
else:
|
||||
log.debug("Connecting directly to {}", self.device.name)
|
||||
|
||||
global_args = driver_global_args.get(self.device.nos, {})
|
||||
|
||||
driver_kwargs = {
|
||||
"host": host or self.device._target,
|
||||
"port": port or self.device.port,
|
||||
"auth_username": self.device.credential.username,
|
||||
"timeout_ops": math.floor(params.request_timeout * 1.25),
|
||||
"transport": "asyncssh",
|
||||
"auth_strict_key": False,
|
||||
"ssh_known_hosts_file": False,
|
||||
**global_args,
|
||||
}
|
||||
|
||||
if self.device.credential._method == "password":
|
||||
# Use password auth if no key is defined.
|
||||
driver_kwargs[
|
||||
"auth_password"
|
||||
] = self.device.credential.password.get_secret_value()
|
||||
else:
|
||||
# Otherwise, use key auth.
|
||||
driver_kwargs["auth_private_key"] = self.device.credential.key.as_posix()
|
||||
if self.device.credential._method == "encrypted_key":
|
||||
# If the key is encrypted, use the password field as the
|
||||
# private key password.
|
||||
driver_kwargs[
|
||||
"auth_private_key_passphrase"
|
||||
] = self.device.credential.password.get_secret_value()
|
||||
|
||||
driver = driver(**driver_kwargs)
|
||||
driver.logger = log.bind(
|
||||
logger_name=f"scrapli.{driver.host}:{driver.port}-driver"
|
||||
)
|
||||
try:
|
||||
responses = ()
|
||||
async with driver as connection:
|
||||
await connection.get_prompt()
|
||||
for query in self.query:
|
||||
raw = await connection.send_command(query)
|
||||
responses += (raw.result,)
|
||||
log.debug(f'Raw response for command "{query}":\n{raw.result}')
|
||||
|
||||
except ScrapliTimeout as err:
|
||||
log.error(err)
|
||||
raise DeviceTimeout(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device.name,
|
||||
error=params.messages.request_timeout,
|
||||
)
|
||||
except ScrapliAuthenticationFailed as err:
|
||||
log.error(
|
||||
"Error authenticating to device {loc}: {e}",
|
||||
loc=self.device.name,
|
||||
e=str(err),
|
||||
)
|
||||
|
||||
raise AuthError(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device.name,
|
||||
error=params.messages.authentication_error,
|
||||
)
|
||||
except ScrapliException as err:
|
||||
log.error(err)
|
||||
raise ScrapeError(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device.name,
|
||||
error=params.messages.no_response,
|
||||
)
|
||||
|
||||
if not responses:
|
||||
raise ScrapeError(
|
||||
params.messages.connection_error,
|
||||
device_name=self.device.name,
|
||||
error=params.messages.no_response,
|
||||
)
|
||||
|
||||
return responses
|
||||
|
|
@ -1,96 +0,0 @@
|
|||
"""Execute validated & constructed query on device.
|
||||
|
||||
Accepts input from front end application, validates the input and
|
||||
returns errors if input is invalid. Passes validated parameters to
|
||||
construct.py, which is used to build & run the Netmiko connections or
|
||||
hyperglass-frr API calls, returns the output back to the front end.
|
||||
"""
|
||||
|
||||
# Standard Library
|
||||
import signal
|
||||
from typing import Any, Dict, Union, Callable, Sequence
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
from hyperglass.exceptions import DeviceTimeout, ResponseEmpty
|
||||
from hyperglass.models.api import Query
|
||||
from hyperglass.configuration import params
|
||||
|
||||
# Local
|
||||
from .drivers import Connection, AgentConnection, NetmikoConnection, ScrapliConnection
|
||||
|
||||
|
||||
def map_driver(driver_name: str) -> Connection:
|
||||
"""Get the correct driver class based on the driver name."""
|
||||
|
||||
if driver_name == "scrapli":
|
||||
return ScrapliConnection
|
||||
|
||||
elif driver_name == "hyperglass_agent":
|
||||
return AgentConnection
|
||||
|
||||
return NetmikoConnection
|
||||
|
||||
|
||||
def handle_timeout(**exc_args: Any) -> Callable:
|
||||
"""Return a function signal can use to raise a timeout exception."""
|
||||
|
||||
def handler(*args: Any, **kwargs: Any) -> None:
|
||||
raise DeviceTimeout(**exc_args)
|
||||
|
||||
return handler
|
||||
|
||||
|
||||
async def execute(query: Query) -> Union[str, Sequence[Dict]]:
|
||||
"""Initiate query validation and execution."""
|
||||
|
||||
output = params.messages.general
|
||||
|
||||
log.debug("Received query for {}", query.json())
|
||||
log.debug("Matched device config: {}", query.device)
|
||||
|
||||
mapped_driver = map_driver(query.device.driver)
|
||||
driver = mapped_driver(query.device, query)
|
||||
|
||||
timeout_args = {
|
||||
"unformatted_msg": params.messages.connection_error,
|
||||
"device_name": query.device.name,
|
||||
"error": params.messages.request_timeout,
|
||||
}
|
||||
|
||||
if query.device.proxy:
|
||||
timeout_args["proxy"] = query.device.proxy.name
|
||||
|
||||
signal.signal(signal.SIGALRM, handle_timeout(**timeout_args))
|
||||
signal.alarm(params.request_timeout - 1)
|
||||
|
||||
if query.device.proxy:
|
||||
proxy = driver.setup_proxy()
|
||||
with proxy() as tunnel:
|
||||
response = await driver.collect(
|
||||
tunnel.local_bind_host, tunnel.local_bind_port
|
||||
)
|
||||
else:
|
||||
response = await driver.collect()
|
||||
|
||||
output = await driver.parsed_response(response)
|
||||
|
||||
if isinstance(output, str):
|
||||
# If the output is a string (not structured) and is empty,
|
||||
# produce an error.
|
||||
if output == "" or output == "\n":
|
||||
raise ResponseEmpty(
|
||||
params.messages.no_output, device_name=query.device.name
|
||||
)
|
||||
elif isinstance(output, Dict):
|
||||
# If the output an empty dict, responses have data, produce an
|
||||
# error.
|
||||
if not output:
|
||||
raise ResponseEmpty(
|
||||
params.messages.no_output, device_name=query.device.name
|
||||
)
|
||||
|
||||
log.debug("Output for query: {}:\n{}", query.json(), repr(output))
|
||||
signal.alarm(0)
|
||||
|
||||
return output
|
||||
5
hyperglass/external/__init__.py
vendored
5
hyperglass/external/__init__.py
vendored
|
|
@ -1,5 +0,0 @@
|
|||
"""Functions & handlers for external data."""
|
||||
|
||||
# Local
|
||||
from .ripestat import RIPEStat # noqa: F401
|
||||
from .webhooks import Webhook # noqa: F401
|
||||
330
hyperglass/external/_base.py
vendored
330
hyperglass/external/_base.py
vendored
|
|
@ -1,330 +0,0 @@
|
|||
"""Session handler for RIPEStat Data API."""
|
||||
|
||||
# Standard Library
|
||||
import re
|
||||
import json as _json
|
||||
import socket
|
||||
from json import JSONDecodeError
|
||||
from socket import gaierror
|
||||
|
||||
# Third Party
|
||||
import httpx
|
||||
from httpx import StatusCode
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
from hyperglass.util import make_repr, parse_exception
|
||||
from hyperglass.constants import __version__
|
||||
from hyperglass.exceptions import HyperglassError
|
||||
|
||||
|
||||
def _prepare_dict(_dict):
|
||||
return _json.loads(_json.dumps(_dict, default=str))
|
||||
|
||||
|
||||
class BaseExternal:
|
||||
"""Base session handler."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
base_url,
|
||||
config=None,
|
||||
uri_prefix="",
|
||||
uri_suffix="",
|
||||
verify_ssl=True,
|
||||
timeout=10,
|
||||
parse=True,
|
||||
):
|
||||
"""Initialize connection instance."""
|
||||
self.__name__ = getattr(self, "name", "BaseExternal")
|
||||
self.config = config
|
||||
self.base_url = base_url.strip("/")
|
||||
self.uri_prefix = uri_prefix.strip("/")
|
||||
self.uri_suffix = uri_suffix.strip("/")
|
||||
self.verify_ssl = verify_ssl
|
||||
self.timeout = timeout
|
||||
self.parse = parse
|
||||
|
||||
session_args = {
|
||||
"verify": self.verify_ssl,
|
||||
"base_url": self.base_url,
|
||||
"timeout": self.timeout,
|
||||
}
|
||||
self._session = httpx.Client(**session_args)
|
||||
self._asession = httpx.AsyncClient(**session_args)
|
||||
|
||||
@classmethod
|
||||
def __init_subclass__(cls, name=None, **kwargs):
|
||||
"""Set correct subclass name."""
|
||||
super().__init_subclass__(**kwargs)
|
||||
cls.name = name or cls.__name__
|
||||
|
||||
async def __aenter__(self):
|
||||
"""Test connection on entry."""
|
||||
available = await self._atest()
|
||||
|
||||
if available:
|
||||
log.debug("Initialized session with {}", self.base_url)
|
||||
return self
|
||||
else:
|
||||
raise self._exception(f"Unable to create session to {self.name}")
|
||||
|
||||
async def __aexit__(self, exc_type=None, exc_value=None, traceback=None):
|
||||
"""Close connection on exit."""
|
||||
log.debug("Closing session with {}", self.base_url)
|
||||
|
||||
await self._asession.aclose()
|
||||
return True
|
||||
|
||||
def __enter__(self):
|
||||
"""Test connection on entry."""
|
||||
available = self._test()
|
||||
|
||||
if available:
|
||||
log.debug("Initialized session with {}", self.base_url)
|
||||
return self
|
||||
else:
|
||||
raise self._exception(f"Unable to create session to {self.name}")
|
||||
|
||||
def __exit__(self, exc_type=None, exc_value=None, traceback=None):
|
||||
"""Close connection on exit."""
|
||||
if exc_type is not None:
|
||||
log.error(traceback)
|
||||
self._session.close()
|
||||
|
||||
def __repr__(self):
|
||||
"""Return user friendly representation of instance."""
|
||||
return make_repr(self)
|
||||
|
||||
def _exception(self, message, exc=None, level="warning", **kwargs):
|
||||
"""Add stringified exception to message if passed."""
|
||||
if exc is not None:
|
||||
message = f"{str(message)}: {str(exc)}"
|
||||
|
||||
return HyperglassError(message, str(level), **kwargs)
|
||||
|
||||
def _parse_response(self, response):
|
||||
if self.parse:
|
||||
parsed = {}
|
||||
try:
|
||||
parsed = response.json()
|
||||
except JSONDecodeError:
|
||||
try:
|
||||
parsed = _json.loads(response)
|
||||
except (JSONDecodeError, TypeError):
|
||||
log.error("Error parsing JSON for response {}", repr(response))
|
||||
parsed = {"data": response.text}
|
||||
else:
|
||||
parsed = response
|
||||
return parsed
|
||||
|
||||
def _test(self):
|
||||
"""Open a low-level connection to the base URL to ensure its port is open."""
|
||||
log.debug("Testing connection to {}", self.base_url)
|
||||
|
||||
try:
|
||||
# Parse out just the hostname from a URL string.
|
||||
# E.g. `https://www.example.com` becomes `www.example.com`
|
||||
test_host = re.sub(r"http(s)?\:\/\/", "", self.base_url)
|
||||
|
||||
# Create a generic socket object
|
||||
test_socket = socket.socket()
|
||||
|
||||
# Try opening a low-level socket to make sure it's even
|
||||
# listening on the port prior to trying to use it.
|
||||
test_socket.connect((test_host, 443))
|
||||
|
||||
# Properly shutdown & close the socket.
|
||||
test_socket.shutdown(1)
|
||||
test_socket.close()
|
||||
|
||||
except gaierror as err:
|
||||
# Raised if the target isn't listening on the port
|
||||
raise self._exception(
|
||||
f"{self.name} appears to be unreachable", err
|
||||
) from None
|
||||
|
||||
return True
|
||||
|
||||
async def _atest(self):
|
||||
"""Open a low-level connection to the base URL to ensure its port is open."""
|
||||
return self._test()
|
||||
|
||||
def _build_request(self, **kwargs):
|
||||
"""Process requests parameters into structure usable by http library."""
|
||||
# Standard Library
|
||||
from operator import itemgetter
|
||||
|
||||
supported_methods = ("GET", "POST", "PUT", "DELETE", "HEAD", "PATCH")
|
||||
|
||||
(
|
||||
method,
|
||||
endpoint,
|
||||
item,
|
||||
headers,
|
||||
params,
|
||||
data,
|
||||
timeout,
|
||||
response_required,
|
||||
) = itemgetter(*kwargs.keys())(kwargs)
|
||||
|
||||
if method.upper() not in supported_methods:
|
||||
raise self._exception(
|
||||
f'Method must be one of {", ".join(supported_methods)}. '
|
||||
f"Got: {str(method)}"
|
||||
)
|
||||
|
||||
endpoint = "/".join(
|
||||
i
|
||||
for i in (
|
||||
"",
|
||||
self.uri_prefix.strip("/"),
|
||||
endpoint.strip("/"),
|
||||
self.uri_suffix.strip("/"),
|
||||
item,
|
||||
)
|
||||
if i
|
||||
)
|
||||
|
||||
request = {
|
||||
"method": method,
|
||||
"url": endpoint,
|
||||
"headers": {"user-agent": f"hyperglass/{__version__}"},
|
||||
}
|
||||
|
||||
if headers is not None:
|
||||
request.update({"headers": headers})
|
||||
|
||||
if params is not None:
|
||||
params = {str(k): str(v) for k, v in params.items() if v is not None}
|
||||
request["params"] = params
|
||||
|
||||
if data is not None:
|
||||
if not isinstance(data, dict):
|
||||
raise self._exception(f"Data must be a dict, got: {str(data)}")
|
||||
request["json"] = _prepare_dict(data)
|
||||
|
||||
if timeout is not None:
|
||||
if not isinstance(timeout, int):
|
||||
try:
|
||||
timeout = int(timeout)
|
||||
except TypeError:
|
||||
raise self._exception(
|
||||
f"Timeout must be an int, got: {str(timeout)}"
|
||||
)
|
||||
request["timeout"] = timeout
|
||||
|
||||
log.debug("Constructed request parameters {}", request)
|
||||
return request
|
||||
|
||||
async def _arequest( # noqa: C901
|
||||
self,
|
||||
method,
|
||||
endpoint,
|
||||
item=None,
|
||||
headers=None,
|
||||
params=None,
|
||||
data=None,
|
||||
timeout=None,
|
||||
response_required=False,
|
||||
):
|
||||
"""Run HTTP POST operation."""
|
||||
request = self._build_request(
|
||||
method=method,
|
||||
endpoint=endpoint,
|
||||
item=item,
|
||||
headers=None,
|
||||
params=params,
|
||||
data=data,
|
||||
timeout=timeout,
|
||||
response_required=response_required,
|
||||
)
|
||||
|
||||
try:
|
||||
response = await self._asession.request(**request)
|
||||
|
||||
if response.status_code not in range(200, 300):
|
||||
status = StatusCode(response.status_code)
|
||||
error = self._parse_response(response)
|
||||
raise self._exception(
|
||||
f'{status.name.replace("_", " ")}: {error}', level="danger"
|
||||
) from None
|
||||
|
||||
except httpx.HTTPError as http_err:
|
||||
raise self._exception(parse_exception(http_err), level="danger") from None
|
||||
|
||||
return self._parse_response(response)
|
||||
|
||||
async def _aget(self, endpoint, **kwargs):
|
||||
return await self._arequest(method="GET", endpoint=endpoint, **kwargs)
|
||||
|
||||
async def _apost(self, endpoint, **kwargs):
|
||||
return await self._arequest(method="POST", endpoint=endpoint, **kwargs)
|
||||
|
||||
async def _aput(self, endpoint, **kwargs):
|
||||
return await self._arequest(method="PUT", endpoint=endpoint, **kwargs)
|
||||
|
||||
async def _adelete(self, endpoint, **kwargs):
|
||||
return await self._arequest(method="DELETE", endpoint=endpoint, **kwargs)
|
||||
|
||||
async def _apatch(self, endpoint, **kwargs):
|
||||
return await self._arequest(method="PATCH", endpoint=endpoint, **kwargs)
|
||||
|
||||
async def _ahead(self, endpoint, **kwargs):
|
||||
return await self._arequest(method="HEAD", endpoint=endpoint, **kwargs)
|
||||
|
||||
def _request( # noqa: C901
|
||||
self,
|
||||
method,
|
||||
endpoint,
|
||||
item=None,
|
||||
headers=None,
|
||||
params=None,
|
||||
data=None,
|
||||
timeout=None,
|
||||
response_required=False,
|
||||
):
|
||||
"""Run HTTP POST operation."""
|
||||
request = self._build_request(
|
||||
method=method,
|
||||
endpoint=endpoint,
|
||||
item=item,
|
||||
headers=None,
|
||||
params=params,
|
||||
data=data,
|
||||
timeout=timeout,
|
||||
response_required=response_required,
|
||||
)
|
||||
|
||||
try:
|
||||
response = self._session.request(**request)
|
||||
|
||||
if response.status_code not in range(200, 300):
|
||||
status = StatusCode(response.status_code)
|
||||
error = self._parse_response(response)
|
||||
raise self._exception(
|
||||
f'{status.name.replace("_", " ")}: {error}', level="danger"
|
||||
) from None
|
||||
|
||||
except httpx.HTTPError as http_err:
|
||||
raise self._exception(parse_exception(http_err), level="danger") from None
|
||||
|
||||
return self._parse_response(response)
|
||||
|
||||
def _get(self, endpoint, **kwargs):
|
||||
return self._request(method="GET", endpoint=endpoint, **kwargs)
|
||||
|
||||
def _post(self, endpoint, **kwargs):
|
||||
return self._request(method="POST", endpoint=endpoint, **kwargs)
|
||||
|
||||
def _put(self, endpoint, **kwargs):
|
||||
return self._request(method="PUT", endpoint=endpoint, **kwargs)
|
||||
|
||||
def _delete(self, endpoint, **kwargs):
|
||||
return self._request(method="DELETE", endpoint=endpoint, **kwargs)
|
||||
|
||||
def _patch(self, endpoint, **kwargs):
|
||||
return self._request(method="PATCH", endpoint=endpoint, **kwargs)
|
||||
|
||||
def _head(self, endpoint, **kwargs):
|
||||
return self._request(method="HEAD", endpoint=endpoint, **kwargs)
|
||||
204
hyperglass/external/bgptools.py
vendored
204
hyperglass/external/bgptools.py
vendored
|
|
@ -1,204 +0,0 @@
|
|||
"""Query & parse data from bgp.tools.
|
||||
|
||||
- See https://bgp.tools/credits for acknowledgements and licensing.
|
||||
- See https://bgp.tools/kb/api for query documentation.
|
||||
"""
|
||||
|
||||
# Standard Library
|
||||
import re
|
||||
import socket
|
||||
import asyncio
|
||||
from typing import Dict, List
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
from hyperglass.cache import SyncCache, AsyncCache
|
||||
from hyperglass.configuration import REDIS_CONFIG, params
|
||||
|
||||
DEFAULT_KEYS = ("asn", "ip", "prefix", "country", "rir", "allocated", "org")
|
||||
|
||||
CACHE_KEY = "hyperglass.external.bgptools"
|
||||
|
||||
|
||||
def parse_whois(output: str, targets: List[str]) -> Dict[str, str]:
|
||||
"""Parse raw whois output from bgp.tools.
|
||||
|
||||
Sample output:
|
||||
AS | IP | BGP Prefix | CC | Registry | Allocated | AS Name
|
||||
13335 | 1.1.1.1 | 1.1.1.0/24 | US | ARIN | 2010-07-14 | Cloudflare, Inc.
|
||||
"""
|
||||
|
||||
def lines(raw):
|
||||
"""Generate clean string values for each column."""
|
||||
for r in (r for r in raw.split("\n") if r):
|
||||
fields = (
|
||||
re.sub(r"(\n|\r)", "", field).strip(" ") for field in r.split("|")
|
||||
)
|
||||
yield fields
|
||||
|
||||
data = {}
|
||||
|
||||
for line in lines(output):
|
||||
|
||||
# Unpack each line's parsed values.
|
||||
asn, ip, prefix, country, rir, allocated, org = line
|
||||
|
||||
# Match the line to the item in the list of resources to query.
|
||||
if ip in targets:
|
||||
i = targets.index(ip)
|
||||
data[targets[i]] = {
|
||||
"asn": asn,
|
||||
"ip": ip,
|
||||
"prefix": prefix,
|
||||
"country": country,
|
||||
"rir": rir,
|
||||
"allocated": allocated,
|
||||
"org": org,
|
||||
}
|
||||
|
||||
log.debug("Parsed bgp.tools data: {}", data)
|
||||
return data
|
||||
|
||||
|
||||
async def run_whois(targets: List[str]) -> str:
|
||||
"""Open raw socket to bgp.tools and execute query."""
|
||||
|
||||
# Construct bulk query
|
||||
query = "\n".join(("begin", *targets, "end\n")).encode()
|
||||
|
||||
# Open the socket to bgp.tools
|
||||
log.debug("Opening connection to bgp.tools")
|
||||
reader, writer = await asyncio.open_connection("bgp.tools", port=43)
|
||||
|
||||
# Send the query
|
||||
writer.write(query)
|
||||
if writer.can_write_eof():
|
||||
writer.write_eof()
|
||||
await writer.drain()
|
||||
|
||||
# Read the response
|
||||
response = b""
|
||||
while True:
|
||||
data = await reader.read(128)
|
||||
if data:
|
||||
response += data
|
||||
else:
|
||||
log.debug("Closing connection to bgp.tools")
|
||||
writer.close()
|
||||
break
|
||||
|
||||
return response.decode()
|
||||
|
||||
|
||||
def run_whois_sync(targets: List[str]) -> str:
|
||||
"""Open raw socket to bgp.tools and execute query."""
|
||||
|
||||
# Construct bulk query
|
||||
query = "\n".join(("begin", *targets, "end\n")).encode()
|
||||
|
||||
# Open the socket to bgp.tools
|
||||
log.debug("Opening connection to bgp.tools")
|
||||
sock = socket.socket()
|
||||
sock.connect(("bgp.tools", 43))
|
||||
sock.send(query)
|
||||
|
||||
# Read the response
|
||||
response = b""
|
||||
while True:
|
||||
data = sock.recv(128)
|
||||
if data:
|
||||
response += data
|
||||
|
||||
else:
|
||||
log.debug("Closing connection to bgp.tools")
|
||||
sock.shutdown(1)
|
||||
sock.close()
|
||||
break
|
||||
|
||||
return response.decode()
|
||||
|
||||
|
||||
async def network_info(*targets: str) -> Dict[str, Dict[str, str]]:
|
||||
"""Get ASN, Containing Prefix, and other info about an internet resource."""
|
||||
|
||||
targets = [str(t) for t in targets]
|
||||
cache = AsyncCache(db=params.cache.database, **REDIS_CONFIG)
|
||||
|
||||
# Set default data structure.
|
||||
data = {t: {k: "" for k in DEFAULT_KEYS} for t in targets}
|
||||
|
||||
# Get all cached bgp.tools data.
|
||||
cached = await cache.get_dict(CACHE_KEY)
|
||||
|
||||
# Try to use cached data for each of the items in the list of
|
||||
# resources.
|
||||
for t in targets:
|
||||
|
||||
if t in cached:
|
||||
# Reassign the cached network info to the matching resource.
|
||||
data[t] = cached[t]
|
||||
log.debug("Using cached network info for {}", t)
|
||||
|
||||
# Remove cached items from the resource list so they're not queried.
|
||||
targets = [t for t in targets if t not in cached]
|
||||
|
||||
try:
|
||||
if targets:
|
||||
whoisdata = await run_whois(targets)
|
||||
|
||||
if whoisdata:
|
||||
# If the response is not empty, parse it.
|
||||
data.update(parse_whois(whoisdata, targets))
|
||||
|
||||
# Cache the response
|
||||
for t in targets:
|
||||
await cache.set_dict(CACHE_KEY, t, data[t])
|
||||
log.debug("Cached network info for {}", t)
|
||||
|
||||
except Exception as err:
|
||||
log.error(str(err))
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def network_info_sync(*targets: str) -> Dict[str, Dict[str, str]]:
|
||||
"""Get ASN, Containing Prefix, and other info about an internet resource."""
|
||||
|
||||
targets = [str(t) for t in targets]
|
||||
cache = SyncCache(db=params.cache.database, **REDIS_CONFIG)
|
||||
|
||||
# Set default data structure.
|
||||
data = {t: {k: "" for k in DEFAULT_KEYS} for t in targets}
|
||||
|
||||
# Get all cached bgp.tools data.
|
||||
cached = cache.get_dict(CACHE_KEY)
|
||||
|
||||
# Try to use cached data for each of the items in the list of
|
||||
# resources.
|
||||
for t in targets:
|
||||
|
||||
if t in cached:
|
||||
# Reassign the cached network info to the matching resource.
|
||||
data[t] = cached[t]
|
||||
log.debug("Using cached network info for {}", t)
|
||||
|
||||
# Remove cached items from the resource list so they're not queried.
|
||||
targets = [t for t in targets if t not in cached]
|
||||
|
||||
try:
|
||||
if targets:
|
||||
whoisdata = run_whois_sync(targets)
|
||||
|
||||
if whoisdata:
|
||||
# If the response is not empty, parse it.
|
||||
data.update(parse_whois(whoisdata, targets))
|
||||
|
||||
# Cache the response
|
||||
for t in targets:
|
||||
cache.set_dict(CACHE_KEY, t, data[t])
|
||||
log.debug("Cached network info for {}", t)
|
||||
|
||||
except Exception as err:
|
||||
log.error(str(err))
|
||||
|
||||
return data
|
||||
31
hyperglass/external/generic.py
vendored
31
hyperglass/external/generic.py
vendored
|
|
@ -1,31 +0,0 @@
|
|||
"""Session handler for Generic HTTP API endpoint."""
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
from hyperglass.external._base import BaseExternal
|
||||
from hyperglass.models.webhook import Webhook
|
||||
|
||||
|
||||
class GenericHook(BaseExternal, name="Generic"):
|
||||
"""Slack session handler."""
|
||||
|
||||
def __init__(self, config):
|
||||
"""Initialize external base class with http connection details."""
|
||||
|
||||
super().__init__(
|
||||
base_url=f"{config.host.scheme}://{config.host.host}", config=config
|
||||
)
|
||||
|
||||
async def send(self, query):
|
||||
"""Send an incoming webhook to http endpoint."""
|
||||
|
||||
payload = Webhook(**query)
|
||||
|
||||
log.debug("Sending query data to {}:\n{}", self.config.host.host, payload)
|
||||
|
||||
return await self._apost(
|
||||
endpoint=self.config.host.path,
|
||||
headers=self.config.headers,
|
||||
params=self.config.params,
|
||||
data=payload.export_dict(),
|
||||
)
|
||||
26
hyperglass/external/msteams.py
vendored
26
hyperglass/external/msteams.py
vendored
|
|
@ -1,26 +0,0 @@
|
|||
"""Session handler for Microsoft Teams API."""
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
from hyperglass.external._base import BaseExternal
|
||||
from hyperglass.models.webhook import Webhook
|
||||
|
||||
|
||||
class MSTeams(BaseExternal, name="MSTeams"):
|
||||
"""Microsoft Teams session handler."""
|
||||
|
||||
def __init__(self, config):
|
||||
"""Initialize external base class with Microsoft Teams connection details."""
|
||||
|
||||
super().__init__(
|
||||
base_url="https://outlook.office.com", config=config, parse=False
|
||||
)
|
||||
|
||||
async def send(self, query):
|
||||
"""Send an incoming webhook to Microsoft Teams."""
|
||||
|
||||
payload = Webhook(**query)
|
||||
|
||||
log.debug("Sending query data to Microsoft Teams:\n{}", payload)
|
||||
|
||||
return await self._apost(endpoint=self.config.host.path, data=payload.msteams())
|
||||
79
hyperglass/external/ripestat.py
vendored
79
hyperglass/external/ripestat.py
vendored
|
|
@ -1,79 +0,0 @@
|
|||
"""Session handler for RIPEStat Data API."""
|
||||
|
||||
# Standard Library
|
||||
from ipaddress import ip_address, ip_network
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
from hyperglass.external._base import BaseExternal
|
||||
|
||||
|
||||
class RIPEStat(BaseExternal, name="RIPEStat"):
|
||||
"""RIPEStat session handler."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize external base class with RIPEStat connection details."""
|
||||
|
||||
super().__init__(
|
||||
base_url="https://stat.ripe.net", uri_prefix="/data", uri_suffix="data.json"
|
||||
)
|
||||
|
||||
def network_info_sync(self, resource, serialize=False):
|
||||
"""Get network info via RIPE's Network Info API endpoint (synchronously).
|
||||
|
||||
See: https://stat.ripe.net/docs/data_api#network-info
|
||||
"""
|
||||
try:
|
||||
valid_ip = ip_address(resource)
|
||||
|
||||
if not valid_ip.is_global:
|
||||
log.debug("IP {ip} is not a global address", ip=str(valid_ip))
|
||||
return {"prefix": None, "asn": None}
|
||||
|
||||
except ValueError:
|
||||
log.debug("'{resource}' is not a valid IP address", resource=resource)
|
||||
return {"prefix": None, "asn": None}
|
||||
|
||||
raw = self._get(endpoint="network-info", params={"resource": valid_ip})
|
||||
|
||||
data = {
|
||||
"asns": raw["data"]["asns"],
|
||||
"prefix": ip_network(raw["data"]["prefix"]),
|
||||
}
|
||||
|
||||
if serialize:
|
||||
data["prefix"] = str(data["prefix"])
|
||||
data["asns"] = data["asns"][0]
|
||||
|
||||
log.debug("Collected network info from RIPEState: {i}", i=str(data))
|
||||
return data
|
||||
|
||||
async def network_info(self, resource, serialize=False):
|
||||
"""Get network info via RIPE's Network Info API endpoint.
|
||||
|
||||
See: https://stat.ripe.net/docs/data_api#network-info
|
||||
"""
|
||||
try:
|
||||
valid_ip = ip_address(resource)
|
||||
|
||||
if not valid_ip.is_global:
|
||||
log.debug("IP {ip} is not a global address", ip=str(valid_ip))
|
||||
return {"prefix": None, "asn": None}
|
||||
|
||||
except ValueError:
|
||||
log.debug("'{resource}' is not a valid IP address", resource=resource)
|
||||
return {"prefix": None, "asn": None}
|
||||
|
||||
raw = await self._aget(endpoint="network-info", params={"resource": valid_ip})
|
||||
|
||||
data = {
|
||||
"asns": raw["data"]["asns"],
|
||||
"prefix": ip_network(raw["data"]["prefix"]),
|
||||
}
|
||||
|
||||
if serialize:
|
||||
data["prefix"] = str(data["prefix"])
|
||||
data["asns"] = data["asns"][0]
|
||||
|
||||
log.debug("Collected network info from RIPEState: {i}", i=str(data))
|
||||
return data
|
||||
51
hyperglass/external/rpki.py
vendored
51
hyperglass/external/rpki.py
vendored
|
|
@ -1,51 +0,0 @@
|
|||
"""Validate RPKI state via Cloudflare GraphQL API."""
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
from hyperglass.cache import SyncCache
|
||||
from hyperglass.configuration import REDIS_CONFIG, params
|
||||
from hyperglass.external._base import BaseExternal
|
||||
|
||||
RPKI_STATE_MAP = {"Invalid": 0, "Valid": 1, "NotFound": 2, "DEFAULT": 3}
|
||||
RPKI_NAME_MAP = {v: k for k, v in RPKI_STATE_MAP.items()}
|
||||
CACHE_KEY = "hyperglass.external.rpki"
|
||||
|
||||
cache = SyncCache(db=params.cache.database, **REDIS_CONFIG)
|
||||
|
||||
|
||||
def rpki_state(prefix, asn):
|
||||
"""Get RPKI state and map to expected integer."""
|
||||
log.debug("Validating RPKI State for {p} via AS{a}", p=prefix, a=asn)
|
||||
|
||||
state = 3
|
||||
ro = f"{prefix}@{asn}"
|
||||
|
||||
cached = cache.get_dict(CACHE_KEY, ro)
|
||||
|
||||
if cached is not None:
|
||||
state = cached
|
||||
else:
|
||||
|
||||
ql = 'query GetValidation {{ validation(prefix: "{}", asn: {}) {{ state }} }}'
|
||||
query = ql.format(prefix, asn)
|
||||
|
||||
try:
|
||||
with BaseExternal(base_url="https://rpki.cloudflare.com") as client:
|
||||
response = client._post("/api/graphql", data={"query": query})
|
||||
validation_state = (
|
||||
response.get("data", {}).get("validation", {}).get("state", "DEFAULT")
|
||||
)
|
||||
state = RPKI_STATE_MAP[validation_state]
|
||||
cache.set_dict(CACHE_KEY, ro, state)
|
||||
except Exception as err:
|
||||
log.error(str(err))
|
||||
state = 3
|
||||
|
||||
msg = "RPKI Validation State for {} via AS{} is {}".format(
|
||||
prefix, asn, RPKI_NAME_MAP[state]
|
||||
)
|
||||
if cached is not None:
|
||||
msg += " [CACHED]"
|
||||
|
||||
log.debug(msg)
|
||||
return state
|
||||
24
hyperglass/external/slack.py
vendored
24
hyperglass/external/slack.py
vendored
|
|
@ -1,24 +0,0 @@
|
|||
"""Session handler for Slack API."""
|
||||
|
||||
# Project
|
||||
from hyperglass.log import log
|
||||
from hyperglass.external._base import BaseExternal
|
||||
from hyperglass.models.webhook import Webhook
|
||||
|
||||
|
||||
class SlackHook(BaseExternal, name="Slack"):
|
||||
"""Slack session handler."""
|
||||
|
||||
def __init__(self, config):
|
||||
"""Initialize external base class with Slack connection details."""
|
||||
|
||||
super().__init__(base_url="https://hooks.slack.com", config=config, parse=False)
|
||||
|
||||
async def send(self, query):
|
||||
"""Send an incoming webhook to Slack."""
|
||||
|
||||
payload = Webhook(**query)
|
||||
|
||||
log.debug("Sending query data to Slack:\n{}", payload)
|
||||
|
||||
return await self._apost(endpoint=self.config.host.path, data=payload.slack())
|
||||
28
hyperglass/external/webhooks.py
vendored
28
hyperglass/external/webhooks.py
vendored
|
|
@ -1,28 +0,0 @@
|
|||
"""Convenience functions for webhooks."""
|
||||
|
||||
# Project
|
||||
from hyperglass.exceptions import HyperglassError
|
||||
from hyperglass.external._base import BaseExternal
|
||||
from hyperglass.external.slack import SlackHook
|
||||
from hyperglass.external.generic import GenericHook
|
||||
from hyperglass.external.msteams import MSTeams
|
||||
|
||||
PROVIDER_MAP = {
|
||||
"generic": GenericHook,
|
||||
"msteams": MSTeams,
|
||||
"slack": SlackHook,
|
||||
}
|
||||
|
||||
|
||||
class Webhook(BaseExternal):
|
||||
"""Get webhook for provider name."""
|
||||
|
||||
def __new__(cls, config):
|
||||
"""Return instance for correct provider handler."""
|
||||
try:
|
||||
provider_class = PROVIDER_MAP[config.provider]
|
||||
return provider_class(config)
|
||||
except KeyError:
|
||||
raise HyperglassError(
|
||||
f"'{config.provider.title()}' is not yet supported as a webhook target."
|
||||
)
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 2759.9 544.87">
|
||||
<g fill="#40798C">
|
||||
<g>
|
||||
<path d="M931.39,683.15q20.28,21.23,20.28,62.74l0,134.13c0,3.35-.85,5.94-2.51,7.76a10.52,10.52,0,0,1-14.15,0c-1.67-1.82-2.51-4.4-2.5-7.75l0-131.86q0-35.58-14.12-51.79t-45.62-16.21q-35.13,0-56.35,21.65t-21.24,57.71l0,120.44q0,5-2.51,7.76a9.12,9.12,0,0,1-7.07,2.74,9,9,0,0,1-6.84-3A10.66,10.66,0,0,1,776,880l.12-305.23q0-5,2.51-7.76a10.52,10.52,0,0,1,14.15,0c1.66,1.83,2.5,4.41,2.5,7.76l0,131.39a71.74,71.74,0,0,1,30.13-32.83q20.07-11.4,46.08-11.39Q911.09,661.93,931.39,683.15Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M1201.26,663a9.19,9.19,0,0,1,6.39,2.51,8.56,8.56,0,0,1,2.73,6.62,14.35,14.35,0,0,1-.91,4.1l-131,290.12q-2.75,5.48-8.22,5.47a8.83,8.83,0,0,1-6.16-2.28,7.66,7.66,0,0,1-2.5-5.93l.91-3.65,32.88-72.08L999.6,676.1a14.33,14.33,0,0,1-.91-4.11,8.65,8.65,0,0,1,3.19-6.84,10.93,10.93,0,0,1,7.3-2.73q5,0,7.76,5.48l88,198.49.91-2.28,86.76-194.78C1194.42,665.08,1197.31,662.94,1201.26,663Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M1406.57,676.48q22.34,14.38,34.66,40.39t12.29,60.69q0,34.22-12.34,60t-34.69,39.91q-22.37,14.13-51.56,14.12-27.84,0-47.67-12.33t-29.88-35.6l0,118.62c0,3.35-.84,5.93-2.51,7.76a9.15,9.15,0,0,1-7.07,2.73q-9.59,0-9.58-10.5l.11-289.25a10.34,10.34,0,0,1,2.51-7.3,10.49,10.49,0,0,1,14.14,0,10.42,10.42,0,0,1,2.51,7.3v36q10-22.81,29.89-34.9T1355,662.09Q1384.21,662.11,1406.57,676.48ZM1412,848q20.54-24.85,20.55-70.48T1412,706.37q-20.52-25.56-57-25.57-37.41,0-57.5,24.84t-20.1,70.94q0,46.09,20,71.18t57,25.12Q1391.44,872.9,1412,848Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M1688.91,847.68a8.28,8.28,0,0,1,2.28,5.71,14.17,14.17,0,0,1-3.43,8.89q-3.42,4.35-12.09,9.81-32.85,19.6-68,19.59-49.73,0-78.23-30.6t-28.49-84q0-34.22,12.8-60.22t36.06-40.36q23.28-14.37,53.39-14.35,42,0,66.14,27.4t24.15,74.83q0,8.21-3.19,11.86t-11.41,3.65l-157-.06q.43,44.71,22.56,68.9t62.27,24.2q21.9,0,36.27-5.91a146.07,146.07,0,0,0,28.07-15.51q9.57-6.37,12.32-6.38A7.16,7.16,0,0,1,1688.91,847.68Zm-141.61-146q-21.22,21.68-24.89,60.44l152.85.06q-.91-39.69-19.6-60.91T1603.2,680Q1568.52,680,1547.3,701.63Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M1855.05,663.19c3.65,0,6.16.69,7.52,2.06s2.06,3.42,2.06,6.16q0,7.77-9.59,9.12l-13.69,1.37q-33.3,3.18-50.19,24.61a77.43,77.43,0,0,0-16.9,49.27l0,124.56q0,5-2.51,7.75a10.5,10.5,0,0,1-14.15,0c-1.67-1.83-2.5-4.41-2.5-7.76l.08-206.68q0-10.49,9.58-10.49a9.25,9.25,0,0,1,6.84,2.74c1.83,1.83,2.74,4.41,2.74,7.76v32.85q19.62-39.22,71.19-42.86l6.38-.46Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M2078.6,666a9.84,9.84,0,0,1,2.74,7.3l-.08,204.85q0,47.45-22.38,71.62T1992.26,974q-24.65,0-43.34-4.58a127.8,127.8,0,0,1-36-15.07q-8.67-5.47-12.32-9.81a13.79,13.79,0,0,1-3.64-8.9,7.82,7.82,0,0,1,8.21-8.21q2.28,0,11.86,5.48a204.67,204.67,0,0,0,32.85,16q16.86,6.39,43.34,6.4,33.75,0,51.56-18.91t17.81-55.43l0-47.9q-9.59,23.71-29.9,36.26t-49.05,12.53q-29.2,0-51.78-13.71t-35.11-38.8q-12.54-25.08-12.53-57.49t12.57-57.25q12.56-24.85,35.15-38.54t51.79-13.67q28.27,0,48.35,12.34t30.1,35.14l0-36q0-10.5,9.58-10.49A9.25,9.25,0,0,1,2078.6,666Zm-37.25,172.67q20.78-24.39,20.79-66.83t-20.73-66.62q-20.76-24.18-57.71-24.2-36.51,0-57.72,24.16t-21.25,66.6q0,42.88,21.19,67.08t57.71,24.2Q2020.59,863.1,2041.35,838.69Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M2161.32,888.24c-1.67-1.83-2.51-4.41-2.5-7.76l.11-305.22q0-5,2.51-7.76a9.09,9.09,0,0,1,7.08-2.73,9,9,0,0,1,6.84,3,10.73,10.73,0,0,1,2.73,7.53L2178,880.49a10.75,10.75,0,0,1-2.74,7.53,9,9,0,0,1-6.85,3A9.09,9.09,0,0,1,2161.32,888.24Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M2416.64,744.16l0,136q0,5-2.51,7.76a9.12,9.12,0,0,1-7.08,2.73,9.45,9.45,0,0,1-7.29-3,10.66,10.66,0,0,1-2.74-7.53V848.18q-10,21-29,32.38T2324.88,892a84.41,84.41,0,0,1-38.32-8.68,71.93,71.93,0,0,1-27.6-23.51,59.47,59.47,0,0,1-10.71-33.08q-.43-25.08,12.34-38.78t42.89-19.6q30.12-5.91,84.41-5.9H2397V743.24q0-32.38-13-47.22t-40.83-14.85q-21.9,0-39,5.69t-27.15,13q-1.83,1.38-7.53,5.25c-3.81,2.59-6.93,3.87-9.36,3.87a7.82,7.82,0,0,1-5.7-2.51,8.37,8.37,0,0,1-2.51-6.16q0-8.65,16-18.24a134.23,134.23,0,0,1,34.91-14.59,146.13,146.13,0,0,1,37.64-5Q2416.67,662.5,2416.64,744.16ZM2361.85,863a69.15,69.15,0,0,0,25.78-27.13q9.36-17.34,9.37-39.24V780.2h-8.67q-47.91,0-73,3.85t-35.14,13.21q-10,9.36-10,27.6,0,21,15.49,34.46t39.24,13.47Q2345.41,872.79,2361.85,863Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M2526.54,886.78a122.09,122.09,0,0,1-31.93-14.38q-9.14-5.92-12.32-9.82a13.55,13.55,0,0,1-3.19-8.89,8.24,8.24,0,0,1,2.29-5.71,7.17,7.17,0,0,1,5.47-2.5q2.28,0,6.16,2.51c2.58,1.67,4.33,2.82,5.25,3.42a142.87,142.87,0,0,0,27.59,15.75q14.37,6.17,36.27,6.18,29.65,0,45.63-10.94t16-31q0-11.85-5.7-19.62t-18.7-13.46q-13-5.7-37.17-11.19Q2522,778,2504.69,763.82t-17.33-37.42q0-28.29,22.15-46.08t57.26-17.77a114.74,114.74,0,0,1,32.4,4.58,95.21,95.21,0,0,1,27.37,12.78q8.65,6.4,12.54,11.19a15.49,15.49,0,0,1,3.87,9.81,8.24,8.24,0,0,1-2.28,5.7,7.18,7.18,0,0,1-5.48,2.51c-2.44,0-6.38-2.29-11.86-6.85a127.56,127.56,0,0,0-24.86-15.07q-13-5.93-32.62-5.94-25.54,0-41.29,12.07T2508.81,725q0,11.87,5,19.39t17.1,13q12.09,5.47,34.45,10.5,31.47,7.78,48.35,15.76t24,19.18q7.07,11.18,7.06,28.51,0,27.38-22.6,44t-60,16.63A125.17,125.17,0,0,1,2526.54,886.78Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M2743.71,886.86a121.76,121.76,0,0,1-31.93-14.38q-9.14-5.93-12.32-9.81a13.6,13.6,0,0,1-3.19-8.9,8.25,8.25,0,0,1,2.29-5.7,7.14,7.14,0,0,1,5.47-2.51q2.28,0,6.16,2.51l5.25,3.42a143.22,143.22,0,0,0,27.6,15.76q14.36,6.16,36.26,6.17,29.65,0,45.63-10.93t16-31q0-11.87-5.69-19.62t-18.7-13.47q-13-5.7-37.18-11.19-40.15-9.13-57.48-23.29t-17.32-37.42q0-28.29,22.14-46.07T2784,662.64a114.31,114.31,0,0,1,32.39,4.57A94.83,94.83,0,0,1,2843.71,680q8.65,6.39,12.54,11.18a15.51,15.51,0,0,1,3.87,9.81,8.24,8.24,0,0,1-2.28,5.7,7.12,7.12,0,0,1-5.47,2.51q-3.66,0-11.86-6.85a128.65,128.65,0,0,0-24.86-15.07q-13-5.92-32.62-5.94-25.56,0-41.3,12.08T2726,725.12q0,11.86,5,19.39t17.1,13q12.09,5.47,34.45,10.51,31.47,7.77,48.35,15.76t24,19.17q7.07,11.19,7.06,28.51,0,27.38-22.6,44t-60,16.62A125.19,125.19,0,0,1,2743.71,886.86Z" transform="translate(-102 -476.45)" />
|
||||
</g>
|
||||
<g>
|
||||
<path d="M629,748.94A254.5,254.5,0,0,1,177.8,910.5a253.51,253.51,0,0,1-57-141.26A254.75,254.75,0,0,1,269.49,517c48.18-22.05,103.68-28.13,155.56-17.55a253.76,253.76,0,0,1,204,249.5c0,11.58,18,11.6,18,0-.15-56.09-17.45-111.67-50.07-157.4A275.77,275.77,0,0,0,468,492.94c-53.4-19.67-113.16-21.73-167.89-6.22A272.65,272.65,0,0,0,141.23,889.87c28.26,46.62,70.66,84.65,120.38,107.11a275.73,275.73,0,0,0,167.55,18.95,272.71,272.71,0,0,0,142.15-78.52c38.14-39.87,64-91.3,72.33-145.91A287.4,287.4,0,0,0,647,748.94C647,737.36,629,737.34,629,748.94Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M276.33,926.44a203,203,0,0,0,98.67,26c7.72,0,7.73-12,0-12a190.29,190.29,0,0,1-92.62-24.36c-6.74-3.79-12.79,6.58-6.05,10.36Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M233.58,611c-28.56,28.78-48.31,65.84-55.4,105.83a199.79,199.79,0,0,0,55.4,177c5.45,5.49,13.93-3,8.48-8.48C186.68,829.56,171,742.65,205.27,671.45a191.94,191.94,0,0,1,36.79-52c5.45-5.48-3-14-8.48-8.48Z" transform="translate(-102 -476.45)" />
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 7 KiB |
|
|
@ -1,7 +0,0 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1072 1072">
|
||||
<g fill="#ff5e5b">
|
||||
<path d="M1464,1000a515,515,0,0,1-4.41,66.71l1.28-9.57a502,502,0,0,1-34.62,127.3l3.63-8.6a504.73,504.73,0,0,1-65,111.35l5.63-7.28a508.39,508.39,0,0,1-90.59,90.59l7.28-5.63a504.73,504.73,0,0,1-111.35,65l8.6-3.63a502,502,0,0,1-127.3,34.62l9.57-1.28a510.21,510.21,0,0,1-133.42,0l9.57,1.28a502,502,0,0,1-127.3-34.62l8.6,3.63a504.73,504.73,0,0,1-111.35-65l7.28,5.63a508.39,508.39,0,0,1-90.59-90.59l5.63,7.28a504.73,504.73,0,0,1-65-111.35l3.63,8.6a502,502,0,0,1-34.62-127.3l1.28,9.57a510.21,510.21,0,0,1,0-133.42l-1.28,9.57a502,502,0,0,1,34.62-127.3l-3.63,8.6a504.73,504.73,0,0,1,65-111.35l-5.63,7.28a508.39,508.39,0,0,1,90.59-90.59l-7.28,5.63a504.73,504.73,0,0,1,111.35-65l-8.6,3.63a502,502,0,0,1,127.3-34.62l-9.57,1.28a510.21,510.21,0,0,1,133.42,0l-9.57-1.28a502,502,0,0,1,127.3,34.62l-8.6-3.63a504.73,504.73,0,0,1,111.35,65l-7.28-5.63a508.39,508.39,0,0,1,90.59,90.59l-5.63-7.28a504.73,504.73,0,0,1,65,111.35l-3.63-8.6a502,502,0,0,1,34.62,127.3l-1.28-9.57A515,515,0,0,1,1464,1000c0,18.83,16.54,36.87,36,36s36-15.82,36-36c-.08-55.42-8.52-111.43-25.65-164.17-16.55-51-40.5-99.86-71.58-143.54a557.67,557.67,0,0,0-116.28-120.13c-42.63-32.66-90.23-57.89-140.61-76.39-51.82-19-107.06-29.25-162.18-31.39a533.2,533.2,0,0,0-312.23,86.46c-44.52,29.28-85.17,64.88-119.06,106A563.88,563.88,0,0,0,540,724.75c-14.59,24.29-26.7,49.77-37.45,76-20.92,51-32.75,105.19-37,160.07a534.52,534.52,0,0,0,177.42,438.88C682.81,1435,727.64,1465,776.25,1487a556.45,556.45,0,0,0,79.53,29.37c28.09,7.84,56.76,12.87,85.71,16.24,55.9,6.52,112.74,3.31,167.85-7.85a534.41,534.41,0,0,0,277.91-154.19c36.66-38.42,68.14-82.48,91.8-130,24.24-48.73,41.65-100.88,49.89-154.72A572.24,572.24,0,0,0,1536,1000c0-18.83-16.58-36.87-36-36S1464,979.82,1464,1000Z" transform="translate(-463.99 -463.99)" />
|
||||
<path d="M807.35,1347.71a396.14,396.14,0,0,0,193.6,51c12.55,0,24.58-11.06,24-24s-10.55-24-24-24c-15.93,0-32.51-.36-48.23-3.13,6.6,1.17.54,0-.61-.12q-3-.46-6.09-1-5.61-.94-11.21-2.06-11.52-2.3-22.89-5.34a365.56,365.56,0,0,1-43.22-14.52c-1.06-.42-2.11-.86-3.16-1.3l2.58,1.09a64.07,64.07,0,0,1-6.25-2.75q-4.65-2.1-9.25-4.33c-7.14-3.46-14.15-7.17-21-11.07-10.93-6.17-26.72-3-32.83,8.61-6,11.37-3.07,26.25,8.61,32.84Z" transform="translate(-463.99 -463.99)" />
|
||||
<path d="M723.68,729.37c-55.5,55.73-94.41,128.21-108.25,205.78A398.38,398.38,0,0,0,647.77,1177a391.15,391.15,0,0,0,75.91,106.88c8.87,8.91,25.18,9.54,33.94,0s9.47-24.43,0-33.94q-9.81-9.85-18.91-20.38-4-4.68-7.9-9.51c-.65-.8-1.29-1.61-1.93-2.41s-2.5-4.54-1-1.24c1.37,3,.22.27-.47-.64s-1.55-2.07-2.31-3.11a367.19,367.19,0,0,1-29.26-46.63Q689.5,1154,684,1141.42c-.41-1-.8-1.93-1.23-2.88,2.49,5.5,1.11,2.65.6,1.39-.78-1.94-1.57-3.88-2.33-5.83q-2.73-7-5.16-14a367.36,367.36,0,0,1-14-53.89c-.81-4.51-1.52-9-2.18-13.55-.63-4.26.16,1,.2,1.56-.08-1.07-.27-2.16-.4-3.22q-.5-4.05-.88-8.1-1.4-14.33-1.69-28.74t.55-29q.39-7,1.06-13.89.35-3.66.77-7.3c.12-1.08.26-2.15.38-3.23.59-4.9-.79,5.22,0-.05A386.85,386.85,0,0,1,671.76,906q4-13.21,9-26.09c.76-2,1.53-3.9,2.32-5.84l.3-.73c1-2.4.79-1.93-.6,1.39a14,14,0,0,1,1.23-2.88q2.82-6.45,5.87-12.79a367.61,367.61,0,0,1,30.71-52.19q2.24-3.17,4.53-6.28c1-1.43,6.28-8,1.84-2.55,3.24-4,6.41-8.07,9.74-12q10-11.79,20.91-22.73c8.87-8.91,9.56-25.16,0-33.94s-24.48-9.51-33.94,0Z" transform="translate(-463.99 -463.99)" />
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 3.3 KiB |
|
|
@ -1,21 +0,0 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 2759.9 544.87">
|
||||
<g fill="#ff5e5b">
|
||||
<g>
|
||||
<path d="M931.39,683.15q20.28,21.23,20.28,62.74l0,134.13c0,3.35-.85,5.94-2.51,7.76a10.52,10.52,0,0,1-14.15,0c-1.67-1.82-2.51-4.4-2.5-7.75l0-131.86q0-35.58-14.12-51.79t-45.62-16.21q-35.13,0-56.35,21.65t-21.24,57.71l0,120.44q0,5-2.51,7.76a9.12,9.12,0,0,1-7.07,2.74,9,9,0,0,1-6.84-3A10.66,10.66,0,0,1,776,880l.12-305.23q0-5,2.51-7.76a10.52,10.52,0,0,1,14.15,0c1.66,1.83,2.5,4.41,2.5,7.76l0,131.39a71.74,71.74,0,0,1,30.13-32.83q20.07-11.4,46.08-11.39Q911.09,661.93,931.39,683.15Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M1201.26,663a9.19,9.19,0,0,1,6.39,2.51,8.56,8.56,0,0,1,2.73,6.62,14.35,14.35,0,0,1-.91,4.1l-131,290.12q-2.75,5.48-8.22,5.47a8.83,8.83,0,0,1-6.16-2.28,7.66,7.66,0,0,1-2.5-5.93l.91-3.65,32.88-72.08L999.6,676.1a14.33,14.33,0,0,1-.91-4.11,8.65,8.65,0,0,1,3.19-6.84,10.93,10.93,0,0,1,7.3-2.73q5,0,7.76,5.48l88,198.49.91-2.28,86.76-194.78C1194.42,665.08,1197.31,662.94,1201.26,663Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M1406.57,676.48q22.34,14.38,34.66,40.39t12.29,60.69q0,34.22-12.34,60t-34.69,39.91q-22.37,14.13-51.56,14.12-27.84,0-47.67-12.33t-29.88-35.6l0,118.62c0,3.35-.84,5.93-2.51,7.76a9.15,9.15,0,0,1-7.07,2.73q-9.59,0-9.58-10.5l.11-289.25a10.34,10.34,0,0,1,2.51-7.3,10.49,10.49,0,0,1,14.14,0,10.42,10.42,0,0,1,2.51,7.3v36q10-22.81,29.89-34.9T1355,662.09Q1384.21,662.11,1406.57,676.48ZM1412,848q20.54-24.85,20.55-70.48T1412,706.37q-20.52-25.56-57-25.57-37.41,0-57.5,24.84t-20.1,70.94q0,46.09,20,71.18t57,25.12Q1391.44,872.9,1412,848Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M1688.91,847.68a8.28,8.28,0,0,1,2.28,5.71,14.17,14.17,0,0,1-3.43,8.89q-3.42,4.35-12.09,9.81-32.85,19.6-68,19.59-49.73,0-78.23-30.6t-28.49-84q0-34.22,12.8-60.22t36.06-40.36q23.28-14.37,53.39-14.35,42,0,66.14,27.4t24.15,74.83q0,8.21-3.19,11.86t-11.41,3.65l-157-.06q.43,44.71,22.56,68.9t62.27,24.2q21.9,0,36.27-5.91a146.07,146.07,0,0,0,28.07-15.51q9.57-6.37,12.32-6.38A7.16,7.16,0,0,1,1688.91,847.68Zm-141.61-146q-21.22,21.68-24.89,60.44l152.85.06q-.91-39.69-19.6-60.91T1603.2,680Q1568.52,680,1547.3,701.63Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M1855.05,663.19c3.65,0,6.16.69,7.52,2.06s2.06,3.42,2.06,6.16q0,7.77-9.59,9.12l-13.69,1.37q-33.3,3.18-50.19,24.61a77.43,77.43,0,0,0-16.9,49.27l0,124.56q0,5-2.51,7.75a10.5,10.5,0,0,1-14.15,0c-1.67-1.83-2.5-4.41-2.5-7.76l.08-206.68q0-10.49,9.58-10.49a9.25,9.25,0,0,1,6.84,2.74c1.83,1.83,2.74,4.41,2.74,7.76v32.85q19.62-39.22,71.19-42.86l6.38-.46Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M2078.6,666a9.84,9.84,0,0,1,2.74,7.3l-.08,204.85q0,47.45-22.38,71.62T1992.26,974q-24.65,0-43.34-4.58a127.8,127.8,0,0,1-36-15.07q-8.67-5.47-12.32-9.81a13.79,13.79,0,0,1-3.64-8.9,7.82,7.82,0,0,1,8.21-8.21q2.28,0,11.86,5.48a204.67,204.67,0,0,0,32.85,16q16.86,6.39,43.34,6.4,33.75,0,51.56-18.91t17.81-55.43l0-47.9q-9.59,23.71-29.9,36.26t-49.05,12.53q-29.2,0-51.78-13.71t-35.11-38.8q-12.54-25.08-12.53-57.49t12.57-57.25q12.56-24.85,35.15-38.54t51.79-13.67q28.27,0,48.35,12.34t30.1,35.14l0-36q0-10.5,9.58-10.49A9.25,9.25,0,0,1,2078.6,666Zm-37.25,172.67q20.78-24.39,20.79-66.83t-20.73-66.62q-20.76-24.18-57.71-24.2-36.51,0-57.72,24.16t-21.25,66.6q0,42.88,21.19,67.08t57.71,24.2Q2020.59,863.1,2041.35,838.69Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M2161.32,888.24c-1.67-1.83-2.51-4.41-2.5-7.76l.11-305.22q0-5,2.51-7.76a9.09,9.09,0,0,1,7.08-2.73,9,9,0,0,1,6.84,3,10.73,10.73,0,0,1,2.73,7.53L2178,880.49a10.75,10.75,0,0,1-2.74,7.53,9,9,0,0,1-6.85,3A9.09,9.09,0,0,1,2161.32,888.24Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M2416.64,744.16l0,136q0,5-2.51,7.76a9.12,9.12,0,0,1-7.08,2.73,9.45,9.45,0,0,1-7.29-3,10.66,10.66,0,0,1-2.74-7.53V848.18q-10,21-29,32.38T2324.88,892a84.41,84.41,0,0,1-38.32-8.68,71.93,71.93,0,0,1-27.6-23.51,59.47,59.47,0,0,1-10.71-33.08q-.43-25.08,12.34-38.78t42.89-19.6q30.12-5.91,84.41-5.9H2397V743.24q0-32.38-13-47.22t-40.83-14.85q-21.9,0-39,5.69t-27.15,13q-1.83,1.38-7.53,5.25c-3.81,2.59-6.93,3.87-9.36,3.87a7.82,7.82,0,0,1-5.7-2.51,8.37,8.37,0,0,1-2.51-6.16q0-8.65,16-18.24a134.23,134.23,0,0,1,34.91-14.59,146.13,146.13,0,0,1,37.64-5Q2416.67,662.5,2416.64,744.16ZM2361.85,863a69.15,69.15,0,0,0,25.78-27.13q9.36-17.34,9.37-39.24V780.2h-8.67q-47.91,0-73,3.85t-35.14,13.21q-10,9.36-10,27.6,0,21,15.49,34.46t39.24,13.47Q2345.41,872.79,2361.85,863Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M2526.54,886.78a122.09,122.09,0,0,1-31.93-14.38q-9.14-5.92-12.32-9.82a13.55,13.55,0,0,1-3.19-8.89,8.24,8.24,0,0,1,2.29-5.71,7.17,7.17,0,0,1,5.47-2.5q2.28,0,6.16,2.51c2.58,1.67,4.33,2.82,5.25,3.42a142.87,142.87,0,0,0,27.59,15.75q14.37,6.17,36.27,6.18,29.65,0,45.63-10.94t16-31q0-11.85-5.7-19.62t-18.7-13.46q-13-5.7-37.17-11.19Q2522,778,2504.69,763.82t-17.33-37.42q0-28.29,22.15-46.08t57.26-17.77a114.74,114.74,0,0,1,32.4,4.58,95.21,95.21,0,0,1,27.37,12.78q8.65,6.4,12.54,11.19a15.49,15.49,0,0,1,3.87,9.81,8.24,8.24,0,0,1-2.28,5.7,7.18,7.18,0,0,1-5.48,2.51c-2.44,0-6.38-2.29-11.86-6.85a127.56,127.56,0,0,0-24.86-15.07q-13-5.93-32.62-5.94-25.54,0-41.29,12.07T2508.81,725q0,11.87,5,19.39t17.1,13q12.09,5.47,34.45,10.5,31.47,7.78,48.35,15.76t24,19.18q7.07,11.18,7.06,28.51,0,27.38-22.6,44t-60,16.63A125.17,125.17,0,0,1,2526.54,886.78Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M2743.71,886.86a121.76,121.76,0,0,1-31.93-14.38q-9.14-5.93-12.32-9.81a13.6,13.6,0,0,1-3.19-8.9,8.25,8.25,0,0,1,2.29-5.7,7.14,7.14,0,0,1,5.47-2.51q2.28,0,6.16,2.51l5.25,3.42a143.22,143.22,0,0,0,27.6,15.76q14.36,6.16,36.26,6.17,29.65,0,45.63-10.93t16-31q0-11.87-5.69-19.62t-18.7-13.47q-13-5.7-37.18-11.19-40.15-9.13-57.48-23.29t-17.32-37.42q0-28.29,22.14-46.07T2784,662.64a114.31,114.31,0,0,1,32.39,4.57A94.83,94.83,0,0,1,2843.71,680q8.65,6.39,12.54,11.18a15.51,15.51,0,0,1,3.87,9.81,8.24,8.24,0,0,1-2.28,5.7,7.12,7.12,0,0,1-5.47,2.51q-3.66,0-11.86-6.85a128.65,128.65,0,0,0-24.86-15.07q-13-5.92-32.62-5.94-25.56,0-41.3,12.08T2726,725.12q0,11.86,5,19.39t17.1,13q12.09,5.47,34.45,10.51,31.47,7.77,48.35,15.76t24,19.17q7.07,11.19,7.06,28.51,0,27.38-22.6,44t-60,16.62A125.19,125.19,0,0,1,2743.71,886.86Z" transform="translate(-102 -476.45)" />
|
||||
</g>
|
||||
<g>
|
||||
<path d="M629,748.94A254.5,254.5,0,0,1,177.8,910.5a253.51,253.51,0,0,1-57-141.26A254.75,254.75,0,0,1,269.49,517c48.18-22.05,103.68-28.13,155.56-17.55a253.76,253.76,0,0,1,204,249.5c0,11.58,18,11.6,18,0-.15-56.09-17.45-111.67-50.07-157.4A275.77,275.77,0,0,0,468,492.94c-53.4-19.67-113.16-21.73-167.89-6.22A272.65,272.65,0,0,0,141.23,889.87c28.26,46.62,70.66,84.65,120.38,107.11a275.73,275.73,0,0,0,167.55,18.95,272.71,272.71,0,0,0,142.15-78.52c38.14-39.87,64-91.3,72.33-145.91A287.4,287.4,0,0,0,647,748.94C647,737.36,629,737.34,629,748.94Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M276.33,926.44a203,203,0,0,0,98.67,26c7.72,0,7.73-12,0-12a190.29,190.29,0,0,1-92.62-24.36c-6.74-3.79-12.79,6.58-6.05,10.36Z" transform="translate(-102 -476.45)" />
|
||||
<path d="M233.58,611c-28.56,28.78-48.31,65.84-55.4,105.83a199.79,199.79,0,0,0,55.4,177c5.45,5.49,13.93-3,8.48-8.48C186.68,829.56,171,742.65,205.27,671.45a191.94,191.94,0,0,1,36.79-52c5.45-5.48-3-14-8.48-8.48Z" transform="translate(-102 -476.45)" />
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 7 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 45 KiB |
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue