replace flake8 with ruff for linting, refactor issues

This commit is contained in:
thatmattlove 2022-12-24 17:53:05 -05:00
parent 086279ab85
commit 9c9a8469a4
40 changed files with 494 additions and 423 deletions

33
.flake8
View file

@ -1,33 +0,0 @@
[flake8]
max-line-length=100
count=True
show-source=False
statistics=True
exclude=.git, __pycache__, hyperglass/ui, hyperglass/plugins/external, hyperglass/api/examples/*.py, hyperglass/compat/_sshtunnel.py
filename=*.py
per-file-ignores=
hyperglass/main.py:E402
# Disable classmethod warning for validator decorators
hyperglass/models/*.py:N805,E0213,R0903,E501,C0301
hyperglass/models/api/*.py:N805,E0213,R0903,E501,C0301
hyperglass/models/commands/*.py:N805,E0213,R0903,E501,C0301
hyperglass/parsing/models/*.py:N805,E0213,R0903
hyperglass/defaults/*/*.py:E501
hyperglass/configuration/models/*.py:N805,E0213,R0903,E501,C0301
# Disable unused import warning for modules
hyperglass/*/__init__.py:F401
hyperglass/models/*/__init__.py:F401
# Disable assertion and docstring checks on tests.
hyperglass/**/test_*.py:S101,D103,D100,D104
hyperglass/**/tests/*.py:S101,D103,D100,D104
hyperglass/**/tests/__init__.py:D103,D100,D104
hyperglass/state/hooks.py:F811
# Ignore whitespace in docstrings
hyperglass/cli/static.py:W293
# Ignore docstring standards
hyperglass/cli/main.py:D400,D403
ignore=W503,R504,D202,S403,S301,S404,E731,D402,IF100,B008
select=B, BLK, C, D, E, F, I, II, N, P, PIE, S, R, W
disable-noqa=False
hang-closing=False
max-complexity=10

1
.gitignore vendored
View file

@ -5,7 +5,6 @@ TODO*
test.py test.py
.DS_Store .DS_Store
.idea .idea
.vscode
old_*.py old_*.py
*.rdb *.rdb
# #

View file

@ -1,10 +1,15 @@
repos: repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pycqa/isort
rev: v2.3.0 rev: 5.11.2
hooks: hooks:
- id: flake8 - id: isort
stages: args: ['--profile', 'black', '--filter-files', '--check']
- commit - repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.192
hooks:
- id: ruff
# Respect `exclude` and `extend-exclude` settings.
args: ['--force-exclude']
- repo: local - repo: local
hooks: hooks:
- id: typescript - id: typescript

7
.vscode/settings.json vendored Normal file
View file

@ -0,0 +1,7 @@
{
"yaml.format.singleQuote": true,
"eslint.workingDirectories": ["./hyperglass/ui"],
"python.linting.mypyEnabled": false,
"python.linting.enabled": false,
"prettier.configPath": "./hyperglass/ui/.prettierrc"
}

View file

@ -150,8 +150,7 @@ async def docs(params: "Params" = Depends(get_params)):
return docs_func( return docs_func(
openapi_url=params.docs.openapi_url, title=params.site_title + " - API Docs" openapi_url=params.docs.openapi_url, title=params.site_title + " - API Docs"
) )
else: raise HTTPException(detail="Not found", status_code=404)
raise HTTPException(detail="Not found", status_code=404)
async def router(id: str, devices: "Devices" = Depends(get_devices)): async def router(id: str, devices: "Devices" = Depends(get_devices)):

View file

@ -269,9 +269,9 @@ def _plugins(
if len(matching) == 0: if len(matching) == 0:
echo.error(f"No plugins matching {search!r}") echo.error(f"No plugins matching {search!r}")
raise typer.Exit(1) raise typer.Exit(1)
else:
echo._console.print(Columns(matching)) echo._console.print(Columns(matching))
raise typer.Exit(0) raise typer.Exit(0)
echo._console.print(Columns(all_plugins)) echo._console.print(Columns(all_plugins))

View file

@ -36,8 +36,8 @@ def load_dsl(path: Path, *, empty_allowed: bool) -> LoadedConfig:
loader = yaml.safe_load loader = yaml.safe_load
except ImportError: except ImportError as err:
raise ConfigLoaderMissing(path) raise ConfigLoaderMissing(path) from err
elif path.suffix == ".toml": elif path.suffix == ".toml":
try: try:
# Third Party # Third Party
@ -45,8 +45,8 @@ def load_dsl(path: Path, *, empty_allowed: bool) -> LoadedConfig:
loader = toml.load loader = toml.load
except ImportError: except ImportError as err:
raise ConfigLoaderMissing(path) raise ConfigLoaderMissing(path) from err
elif path.suffix == ".json": elif path.suffix == ".json":
# Standard Library # Standard Library
@ -112,10 +112,10 @@ def load_config(name: str, *, required: bool) -> LoadedConfig:
if path is None and required is False: if path is None and required is False:
return {} return {}
elif path.suffix == ".py": if path.suffix == ".py":
return load_python(path, empty_allowed=not required) return load_python(path, empty_allowed=not required)
elif path.suffix.replace(".", "") in CONFIG_EXTENSIONS: if path.suffix.replace(".", "") in CONFIG_EXTENSIONS:
return load_dsl(path, empty_allowed=not required) return load_dsl(path, empty_allowed=not required)
raise ConfigError( raise ConfigError(

View file

@ -156,7 +156,7 @@ class Formatter:
if self.platform in ("bird", "bird_ssh"): if self.platform in ("bird", "bird_ssh"):
if self.query_type == "bgp_aspath": if self.query_type == "bgp_aspath":
return self._with_formatter(self._bird_bgp_aspath) return self._with_formatter(self._bird_bgp_aspath)
elif self.query_type == "bgp_community": if self.query_type == "bgp_community":
return self._with_formatter(self._bird_bgp_community) return self._with_formatter(self._bird_bgp_community)
return self._with_formatter(self._default) return self._with_formatter(self._default)

View file

@ -43,7 +43,7 @@ class HttpClient(Connection):
self.config._attribute_map.query_location: self.query_data.query_location, self.config._attribute_map.query_location: self.query_data.query_location,
self.config._attribute_map.query_type: self.query_data.query_type, self.config._attribute_map.query_type: self.query_data.query_type,
} }
elif isinstance(self.config.query, t.Dict): if isinstance(self.config.query, t.Dict):
return { return {
key: value.format( key: value.format(
**{ **{
@ -65,13 +65,13 @@ class HttpClient(Connection):
if self.config.body_format == "json": if self.config.body_format == "json":
return {"json": data} return {"json": data}
elif self.config.body_format == "yaml": if self.config.body_format == "yaml":
# Third Party # Third Party
import yaml import yaml
return {"content": yaml.dump(data), "headers": {"content-type": "text/yaml"}} return {"content": yaml.dump(data), "headers": {"content-type": "text/yaml"}}
elif self.config.body_format == "xml": if self.config.body_format == "xml":
# Third Party # Third Party
import xmltodict # type: ignore import xmltodict # type: ignore
@ -79,7 +79,7 @@ class HttpClient(Connection):
"content": xmltodict.unparse({"query": data}), "content": xmltodict.unparse({"query": data}),
"headers": {"content-type": "application/xml"}, "headers": {"content-type": "application/xml"},
} }
elif self.config.body_format == "text": if self.config.body_format == "text":
return {"data": data} return {"data": data}
return {} return {}
@ -108,10 +108,10 @@ class HttpClient(Connection):
responses += (data,) responses += (data,)
except (httpx.TimeoutException) as error: except (httpx.TimeoutException) as error:
raise DeviceTimeout(error=error, device=self.device) raise DeviceTimeout(error=error, device=self.device) from error
except (httpx.HTTPStatusError) as error: except (httpx.HTTPStatusError) as error:
if error.response.status_code == 401: if error.response.status_code == 401:
raise AuthError(error=error, device=self.device) raise AuthError(error=error, device=self.device) from error
raise RestError(error=error, device=self.device) raise RestError(error=error, device=self.device) from error
return responses return responses

View file

@ -54,6 +54,8 @@ class SSHConnection(Connection):
log.error( log.error(
f"Error connecting to device {self.device.name} via " f"proxy {proxy.name}" f"Error connecting to device {self.device.name} via " f"proxy {proxy.name}"
) )
raise ScrapeError(error=scrape_proxy_error, device=self.device) raise ScrapeError(
error=scrape_proxy_error, device=self.device
) from scrape_proxy_error
return opener return opener

View file

@ -102,10 +102,10 @@ class NetmikoConnection(SSHConnection):
nm_connect_direct.disconnect() nm_connect_direct.disconnect()
except NetMikoTimeoutException as scrape_error: except NetMikoTimeoutException as scrape_error:
raise DeviceTimeout(error=scrape_error, device=self.device) raise DeviceTimeout(error=scrape_error, device=self.device) from scrape_error
except NetMikoAuthenticationException as auth_error: except NetMikoAuthenticationException as auth_error:
raise AuthError(error=auth_error, device=self.device) raise AuthError(error=auth_error, device=self.device) from auth_error
if not responses: if not responses:
raise ResponseEmpty(query=self.query_data) raise ResponseEmpty(query=self.query_data)

View file

@ -7,9 +7,11 @@ from .generic import BaseExternal
from .msteams import MSTeams from .msteams import MSTeams
from .bgptools import network_info, network_info_sync from .bgptools import network_info, network_info_sync
from .webhooks import Webhook from .webhooks import Webhook
from .http_client import HTTPClient
__all__ = ( __all__ = (
"BaseExternal", "BaseExternal",
"HTTPClient",
"MSTeams", "MSTeams",
"network_info_sync", "network_info_sync",
"network_info", "network_info",

View file

@ -243,8 +243,8 @@ class BaseExternal:
if not isinstance(timeout, int): if not isinstance(timeout, int):
try: try:
timeout = int(timeout) timeout = int(timeout)
except TypeError: except TypeError as err:
raise self._exception(f"Timeout must be an int, got: {str(timeout)}") raise self._exception(f"Timeout must be an int, got: {str(timeout)}") from err
request["timeout"] = timeout request["timeout"] = timeout
log.debug("Constructed request parameters {}", request) log.debug("Constructed request parameters {}", request)

View file

@ -148,7 +148,7 @@ async def network_info(*targets: str) -> TargetData:
# Try to use cached data for each of the items in the list of # Try to use cached data for each of the items in the list of
# resources. # resources.
for target in (t for t in query_targets if t in cached): for target in (target for target in query_targets if target in cached):
# Reassign the cached network info to the matching resource. # Reassign the cached network info to the matching resource.
query_data[target] = cached[target] query_data[target] = cached[target]
log.debug("Using cached network info for {}", target) log.debug("Using cached network info for {}", target)

234
hyperglass/external/http_client.py vendored Normal file
View file

@ -0,0 +1,234 @@
"""HTTP Client for plugin use."""
# Standard Library
import typing as t
# Project
from hyperglass.models.fields import JsonValue, Primitives
# Local
from ._base import BaseExternal
class HTTPClient(BaseExternal, name="HTTPClient"):
"""Wrapper around a standard HTTP Client."""
def __init__(self: "HTTPClient", base_url: str, timeout: int = 10) -> None:
"""Create an HTTPClient instance."""
super().__init__(base_url=base_url, timeout=timeout, parse=False)
async def aget(
self: "HTTPClient",
endpoint: str,
headers: t.Dict[str, str] = None,
params: t.Dict[str, JsonValue[Primitives]] = None,
data: t.Optional[t.Any] = None,
timeout: t.Optional[int] = None,
) -> t.Any:
"""Perform an async HTTP GET request."""
return await self._arequest(
method="GET",
endpoint=endpoint,
headers=headers,
params=params,
data=data,
timeout=timeout,
)
async def apost(
self: "HTTPClient",
endpoint: str,
headers: t.Dict[str, str] = None,
params: t.Dict[str, JsonValue[Primitives]] = None,
data: t.Optional[t.Any] = None,
timeout: t.Optional[int] = None,
) -> t.Any:
"""Perform an async HTTP POST request."""
return await self._arequest(
method="POST",
endpoint=endpoint,
headers=headers,
params=params,
data=data,
timeout=timeout,
)
async def aput(
self: "HTTPClient",
endpoint: str,
headers: t.Dict[str, str] = None,
params: t.Dict[str, JsonValue[Primitives]] = None,
data: t.Optional[t.Any] = None,
timeout: t.Optional[int] = None,
) -> t.Any:
"""Perform an async HTTP PUT request."""
return await self._arequest(
method="PUT",
endpoint=endpoint,
headers=headers,
params=params,
data=data,
timeout=timeout,
)
async def adelete(
self: "HTTPClient",
endpoint: str,
headers: t.Dict[str, str] = None,
params: t.Dict[str, JsonValue[Primitives]] = None,
data: t.Optional[t.Any] = None,
timeout: t.Optional[int] = None,
) -> t.Any:
"""Perform an async HTTP DELETE request."""
return await self._arequest(
method="DELETE",
endpoint=endpoint,
headers=headers,
params=params,
data=data,
timeout=timeout,
)
async def apatch(
self: "HTTPClient",
endpoint: str,
headers: t.Dict[str, str] = None,
params: t.Dict[str, JsonValue[Primitives]] = None,
data: t.Optional[t.Any] = None,
timeout: t.Optional[int] = None,
) -> t.Any:
"""Perform an async HTTP PATCH request."""
return await self._arequest(
method="PATCH",
endpoint=endpoint,
headers=headers,
params=params,
data=data,
timeout=timeout,
)
async def ahead(
self: "HTTPClient",
endpoint: str,
headers: t.Dict[str, str] = None,
params: t.Dict[str, JsonValue[Primitives]] = None,
data: t.Optional[t.Any] = None,
timeout: t.Optional[int] = None,
) -> t.Any:
"""Perform an async HTTP HEAD request."""
return await self._arequest(
method="HEAD",
endpoint=endpoint,
headers=headers,
params=params,
data=data,
timeout=timeout,
)
def get(
self: "HTTPClient",
endpoint: str,
headers: t.Dict[str, str] = None,
params: t.Dict[str, JsonValue[Primitives]] = None,
data: t.Optional[t.Any] = None,
timeout: t.Optional[int] = None,
) -> t.Any:
"""Perform an HTTP GET request."""
return self._request(
method="GET",
endpoint=endpoint,
headers=headers,
params=params,
data=data,
timeout=timeout,
)
def post(
self: "HTTPClient",
endpoint: str,
headers: t.Dict[str, str] = None,
params: t.Dict[str, JsonValue[Primitives]] = None,
data: t.Optional[t.Any] = None,
timeout: t.Optional[int] = None,
) -> t.Any:
"""Perform an HTTP POST request."""
return self._request(
method="POST",
endpoint=endpoint,
headers=headers,
params=params,
data=data,
timeout=timeout,
)
def put(
self: "HTTPClient",
endpoint: str,
headers: t.Dict[str, str] = None,
params: t.Dict[str, JsonValue[Primitives]] = None,
data: t.Optional[t.Any] = None,
timeout: t.Optional[int] = None,
) -> t.Any:
"""Perform an HTTP PUT request."""
return self._request(
method="PUT",
endpoint=endpoint,
headers=headers,
params=params,
data=data,
timeout=timeout,
)
def delete(
self: "HTTPClient",
endpoint: str,
headers: t.Dict[str, str] = None,
params: t.Dict[str, JsonValue[Primitives]] = None,
data: t.Optional[t.Any] = None,
timeout: t.Optional[int] = None,
) -> t.Any:
"""Perform an HTTP DELETE request."""
return self._request(
method="DELETE",
endpoint=endpoint,
headers=headers,
params=params,
data=data,
timeout=timeout,
)
def patch(
self: "HTTPClient",
endpoint: str,
headers: t.Dict[str, str] = None,
params: t.Dict[str, JsonValue[Primitives]] = None,
data: t.Optional[t.Any] = None,
timeout: t.Optional[int] = None,
) -> t.Any:
"""Perform an HTTP PATCH request."""
return self._request(
method="PATCH",
endpoint=endpoint,
headers=headers,
params=params,
data=data,
timeout=timeout,
)
def head(
self: "HTTPClient",
endpoint: str,
headers: t.Dict[str, str] = None,
params: t.Dict[str, JsonValue[Primitives]] = None,
data: t.Optional[t.Any] = None,
timeout: t.Optional[int] = None,
) -> t.Any:
"""Perform an HTTP HEAD request."""
return self._request(
method="HEAD",
endpoint=endpoint,
headers=headers,
params=params,
data=data,
timeout=timeout,
)

View file

@ -31,8 +31,8 @@ class Webhook(BaseExternal):
try: try:
provider_class = PROVIDER_MAP[config.provider] provider_class = PROVIDER_MAP[config.provider]
return provider_class(config) return provider_class(config)
except KeyError: except KeyError as err:
raise UnsupportedError( raise UnsupportedError(
message="{p} is not yet supported as a webhook target.", message="{p} is not yet supported as a webhook target.",
p=config.provider.title(), p=config.provider.title(),
) ) from err

View file

@ -56,10 +56,14 @@ class Query(BaseModel):
self.directive = query_directives[0] self.directive = query_directives[0]
self._input_plugin_manager = InputPluginManager()
self.query_target = self.transform_query_target()
try: try:
self.validate_query_target() self.validate_query_target()
except InputValidationError as err: except InputValidationError as err:
raise InputInvalid(**err.kwargs) raise InputInvalid(**err.kwargs) from err
def __repr__(self) -> str: def __repr__(self) -> str:
"""Represent only the query fields.""" """Represent only the query fields."""
@ -80,14 +84,17 @@ class Query(BaseModel):
).hexdigest() ).hexdigest()
def validate_query_target(self) -> None: def validate_query_target(self) -> None:
"""Validate a query target after all fields/relationships havebeen initialized.""" """Validate a query target after all fields/relationships have been initialized."""
# Run config/rule-based validations. # Run config/rule-based validations.
self.directive.validate_target(self.query_target) self.directive.validate_target(self.query_target)
# Run plugin-based validations. # Run plugin-based validations.
manager = InputPluginManager() self._input_plugin_manager.validate(query=self)
manager.execute(query=self)
log.debug("Validation passed for query {!r}", self) log.debug("Validation passed for query {!r}", self)
def transform_query_target(self) -> QueryTarget:
"""Transform a query target based on defined plugins."""
return self._input_plugin_manager.transform(query=self)
def dict(self) -> t.Dict[str, t.Union[t.List[str], str]]: def dict(self) -> t.Dict[str, t.Union[t.List[str], str]]:
"""Include only public fields.""" """Include only public fields."""
return super().dict(include={"query_location", "query_target", "query_type"}) return super().dict(include={"query_location", "query_target", "query_type"})

View file

@ -216,7 +216,7 @@ class Device(HyperglassModelWithId, extra="allow"):
p=values["platform"], p=values["platform"],
) )
return value return value
elif value is None and values["platform"] in SUPPORTED_STRUCTURED_OUTPUT: if value is None and values["platform"] in SUPPORTED_STRUCTURED_OUTPUT:
value = True value = True
else: else:
value = False value = False

View file

@ -79,7 +79,7 @@ class BGPRoute(HyperglassModel):
# If router validation is enabled, return the value as-is. # If router validation is enabled, return the value as-is.
return value return value
elif structured.rpki.mode == "external": if structured.rpki.mode == "external":
# If external validation is enabled, validate the prefix # If external validation is enabled, validate the prefix
# & asn with Cloudflare's RPKI API. # & asn with Cloudflare's RPKI API.
as_path = values["as_path"] as_path = values["as_path"]
@ -88,9 +88,8 @@ class BGPRoute(HyperglassModel):
# If the AS_PATH length is 0, i.e. for an internal route, # If the AS_PATH length is 0, i.e. for an internal route,
# return RPKI Unknown state. # return RPKI Unknown state.
return 3 return 3
else: # Get last ASN in path
# Get last ASN in path asn = as_path[-1]
asn = as_path[-1]
try: try:
net = ip_network(values["prefix"]) net = ip_network(values["prefix"])
@ -100,8 +99,8 @@ class BGPRoute(HyperglassModel):
# Only do external RPKI lookups for global prefixes. # Only do external RPKI lookups for global prefixes.
if net.is_global: if net.is_global:
return rpki_state(prefix=values["prefix"], asn=asn) return rpki_state(prefix=values["prefix"], asn=asn)
else:
return value return value
class BGPRouteTable(HyperglassModel): class BGPRouteTable(HyperglassModel):

View file

@ -137,7 +137,7 @@ class RuleWithIP(Rule):
valid_target = ip_network(target) valid_target = ip_network(target)
except ValueError as err: except ValueError as err:
raise InputValidationError(error=str(err), target=target) raise InputValidationError(error=str(err), target=target) from err
is_member = self.membership(valid_target, self.condition) is_member = self.membership(valid_target, self.condition)
in_range = self.in_range(valid_target) in_range = self.in_range(valid_target)
@ -146,7 +146,7 @@ class RuleWithIP(Rule):
self._passed = True self._passed = True
return True return True
elif is_member and not in_range: if is_member and not in_range:
self._passed = False self._passed = False
raise InputValidationError( raise InputValidationError(
error="Prefix-length is not within range {ge}-{le}", error="Prefix-length is not within range {ge}-{le}",
@ -155,7 +155,7 @@ class RuleWithIP(Rule):
le=self.le, le=self.le,
) )
elif is_member and self.action == "deny": if is_member and self.action == "deny":
self._passed = False self._passed = False
raise InputValidationError( raise InputValidationError(
error="Member of denied network '{network}'", error="Member of denied network '{network}'",
@ -204,7 +204,7 @@ class RuleWithPattern(Rule):
if is_match and self.action == "permit": if is_match and self.action == "permit":
return True return True
elif is_match and self.action == "deny": if is_match and self.action == "deny":
return InputValidationError(target=value, error="Denied") return InputValidationError(target=value, error="Denied")
return False return False
@ -213,13 +213,13 @@ class RuleWithPattern(Rule):
if isinstance(result, BaseException): if isinstance(result, BaseException):
self._passed = False self._passed = False
raise result raise result
elif result is False: if result is False:
self._passed = False self._passed = False
return result return result
self._passed = True self._passed = True
return True return True
elif isinstance(target, t.List) and not multiple: if isinstance(target, t.List) and not multiple:
raise InputValidationError("Target must be a single value") raise InputValidationError("Target must be a single value")
result = validate_single_value(target) result = validate_single_value(target)
@ -277,7 +277,7 @@ class Directive(HyperglassUniqueModel, unique_by=("id", "table_output")):
if self.field.is_select: if self.field.is_select:
return "select" return "select"
elif self.field.is_text or self.field.is_ip: if self.field.is_text or self.field.is_ip:
return "text" return "text"
return None return None

View file

@ -64,7 +64,7 @@ class Action(str):
if value in cls.permits: if value in cls.permits:
return cls("permit") return cls("permit")
elif value in cls.denies: if value in cls.denies:
return cls("deny") return cls("deny")
raise ValueError( raise ValueError(

View file

@ -118,7 +118,9 @@ class HyperglassUniqueModel(HyperglassModel):
def __hash__(self: "HyperglassUniqueModel") -> int: def __hash__(self: "HyperglassUniqueModel") -> int:
"""Create a hashed representation of this model's name.""" """Create a hashed representation of this model's name."""
fields = dict(zip(self._unique_fields, (getattr(self, f) for f in self._unique_fields))) fields = dict(
zip(self._unique_fields, (getattr(self, f) for f in self._unique_fields), strict=True)
)
return hash(json.dumps(fields)) return hash(json.dumps(fields))

View file

@ -79,7 +79,7 @@ class JuniperRouteTableEntry(JuniperBase):
if "selected_next_hop" in hop: if "selected_next_hop" in hop:
selected_next_hop = hop.get("to", "") selected_next_hop = hop.get("to", "")
break break
elif hop.get("to") is not None: if hop.get("to") is not None:
selected_next_hop = hop["to"] selected_next_hop = hop["to"]
break break
@ -114,8 +114,8 @@ class JuniperRouteTableEntry(JuniperBase):
if not isinstance(value, dict): if not isinstance(value, dict):
try: try:
value = int(value) value = int(value)
except ValueError: except ValueError as err:
raise ValueError(f"Age field is in an unexpected format. Got: {value}") raise ValueError(f"Age field is in an unexpected format. Got: {value}") from err
else: else:
value = value.get("@junos:seconds", 0) value = value.get("@junos:seconds", 0)
return int(value) return int(value)

View file

@ -84,15 +84,15 @@ class HyperglassSettings(BaseSettings):
if value is None: if value is None:
if values["debug"] is False: if values["debug"] is False:
return ip_address("::1") return ip_address("::1")
elif values["debug"] is True: if values["debug"] is True:
return ip_address("::") return ip_address("::")
if isinstance(value, str): if isinstance(value, str):
if value != "localhost": if value != "localhost":
try: try:
return ip_address(value) return ip_address(value)
except ValueError: except ValueError as err:
raise ValueError(str(value)) raise ValueError(str(value)) from err
elif value == "localhost": elif value == "localhost":
return ip_address("::1") return ip_address("::1")

View file

@ -39,19 +39,19 @@ def parse_arista(output: Sequence[str]) -> Dict: # noqa: C901
except json.JSONDecodeError as err: except json.JSONDecodeError as err:
log.critical("Error decoding JSON: {}", str(err)) log.critical("Error decoding JSON: {}", str(err))
raise ParsingError("Error parsing response data") raise ParsingError("Error parsing response data") from err
except KeyError as err: except KeyError as err:
log.critical("'{}' was not found in the response", str(err)) log.critical("'{}' was not found in the response", str(err))
raise ParsingError("Error parsing response data") raise ParsingError("Error parsing response data") from err
except IndexError as err: except IndexError as err:
log.critical(str(err)) log.critical(str(err))
raise ParsingError("Error parsing response data") raise ParsingError("Error parsing response data") from err
except ValidationError as err: except ValidationError as err:
log.critical(str(err)) log.critical(str(err))
raise ParsingError(err.errors()) raise ParsingError(err.errors()) from err
log.debug("Serialzed: {}", data) log.debug("Serialized: {}", data)
return data return data

View file

@ -94,10 +94,10 @@ def parse_juniper(output: Sequence) -> Dict: # noqa: C901
except KeyError as err: except KeyError as err:
log.critical("{} was not found in the response", str(err)) log.critical("{} was not found in the response", str(err))
raise ParsingError("Error parsing response data") raise ParsingError("Error parsing response data") from err
except ValidationError as err: except ValidationError as err:
log.critical(str(err)) log.critical(str(err))
raise ParsingError(err.errors()) raise ParsingError(err.errors()) from err
return data return data

View file

@ -46,7 +46,7 @@ def parse_mikrotik(output: str):
# Remove any lines marked for removal and re-join with a single # Remove any lines marked for removal and re-join with a single
# newline character. # newline character.
lines = [l for i, l in enumerate(lines) if i not in remove_lines] lines = [line for idx, line in enumerate(lines) if idx not in remove_lines]
output = "\n".join(lines) output = "\n".join(lines)
return output return output

View file

@ -2,7 +2,7 @@
# Local # Local
from .main import register_plugin, init_builtin_plugins from .main import register_plugin, init_builtin_plugins
from ._input import InputPlugin, InputPluginReturn from ._input import InputPlugin, InputPluginValidationReturn
from ._output import OutputType, OutputPlugin from ._output import OutputType, OutputPlugin
from ._manager import InputPluginManager, OutputPluginManager from ._manager import InputPluginManager, OutputPluginManager
@ -10,7 +10,7 @@ __all__ = (
"init_builtin_plugins", "init_builtin_plugins",
"InputPlugin", "InputPlugin",
"InputPluginManager", "InputPluginManager",
"InputPluginReturn", "InputPluginValidationReturn",
"OutputPlugin", "OutputPlugin",
"OutputPluginManager", "OutputPluginManager",
"OutputType", "OutputType",

View file

@ -11,12 +11,15 @@ from pydantic import PrivateAttr
from hyperglass.state.hooks import use_state from hyperglass.state.hooks import use_state
# Local # Local
from .._input import InputPlugin, InputPluginReturn from .._input import InputPlugin
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
# Project # Project
from hyperglass.models.api.query import Query from hyperglass.models.api.query import Query
# Local
from .._input import InputPluginValidationReturn
_32BIT = 0xFFFFFFFF _32BIT = 0xFFFFFFFF
_16BIT = 0xFFFF _16BIT = 0xFFFF
EXTENDED_TYPES = ("target", "origin") EXTENDED_TYPES = ("target", "origin")
@ -63,10 +66,10 @@ def validate_new_format(value: str) -> bool:
if all((check_decimal(one, _16BIT), check_decimal(two, _16BIT))): if all((check_decimal(one, _16BIT), check_decimal(two, _16BIT))):
# Handle standard format, e.g. `65000:1` # Handle standard format, e.g. `65000:1`
return True return True
elif all((check_decimal(one, _16BIT), check_decimal(two, _32BIT))): if all((check_decimal(one, _16BIT), check_decimal(two, _32BIT))):
# Handle extended format, e.g. `65000:4294967295` # Handle extended format, e.g. `65000:4294967295`
return True return True
elif all((check_string(one), check_decimal(two, _16BIT))): if all((check_string(one), check_decimal(two, _16BIT))):
# Handle IP address format, e.g. `192.0.2.1:65000` # Handle IP address format, e.g. `192.0.2.1:65000`
return True return True
@ -92,7 +95,7 @@ class ValidateBGPCommunity(InputPlugin):
__hyperglass_builtin__: bool = PrivateAttr(True) __hyperglass_builtin__: bool = PrivateAttr(True)
def validate(self, query: "Query") -> InputPluginReturn: def validate(self, query: "Query") -> "InputPluginValidationReturn":
"""Ensure an input query target is a valid BGP community.""" """Ensure an input query target is a valid BGP community."""
params = use_state("params") params = use_state("params")

View file

@ -48,19 +48,19 @@ def parse_arista(output: t.Sequence[str]) -> "OutputDataModel":
except json.JSONDecodeError as err: except json.JSONDecodeError as err:
log.critical("Error decoding JSON: {}", str(err)) log.critical("Error decoding JSON: {}", str(err))
raise ParsingError("Error parsing response data") raise ParsingError("Error parsing response data") from err
except KeyError as err: except KeyError as err:
log.critical("'{}' was not found in the response", str(err)) log.critical("'{}' was not found in the response", str(err))
raise ParsingError("Error parsing response data") raise ParsingError("Error parsing response data") from err
except IndexError as err: except IndexError as err:
log.critical(str(err)) log.critical(str(err))
raise ParsingError("Error parsing response data") raise ParsingError("Error parsing response data") from err
except ValidationError as err: except ValidationError as err:
log.critical(str(err)) log.critical(str(err))
raise ParsingError(err.errors()) raise ParsingError(err.errors()) from err
return result return result

View file

@ -108,10 +108,10 @@ def parse_juniper(output: Sequence[str]) -> "OutputDataModel": # noqa: C901
raise ParsingError("Error parsing response data") from err raise ParsingError("Error parsing response data") from err
except KeyError as err: except KeyError as err:
raise ParsingError("{key} was not found in the response", key=str(err)) raise ParsingError("{key} was not found in the response", key=str(err)) from err
except ValidationError as err: except ValidationError as err:
raise ParsingError(err) raise ParsingError(err) from err
return result return result

View file

@ -8,9 +8,11 @@ from ._base import DirectivePlugin, HyperglassPlugin
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
# Project # Project
from hyperglass.models.api.query import Query from hyperglass.models.api.query import Query, QueryTarget
InputPluginReturn = t.Union[None, bool]
InputPluginValidationReturn = t.Union[None, bool]
InputPluginTransformReturn = t.Union[t.Sequence["QueryTarget"], "QueryTarget"]
class InputPlugin(HyperglassPlugin, DirectivePlugin): class InputPlugin(HyperglassPlugin, DirectivePlugin):
@ -19,6 +21,10 @@ class InputPlugin(HyperglassPlugin, DirectivePlugin):
_type = "input" _type = "input"
failure_reason: t.Optional[str] = None failure_reason: t.Optional[str] = None
def validate(self, query: "Query") -> InputPluginReturn: def validate(self, query: "Query") -> InputPluginValidationReturn:
"""Validate input from hyperglass UI/API.""" """Validate input from hyperglass UI/API."""
return None return None
def transform(self, query: "Query") -> InputPluginTransformReturn:
"""Transform query target prior to running commands."""
return query.query_target

View file

@ -11,7 +11,7 @@ from hyperglass.exceptions.private import PluginError, InputValidationError
# Local # Local
from ._base import PluginType, HyperglassPlugin from ._base import PluginType, HyperglassPlugin
from ._input import InputPlugin, InputPluginReturn from ._input import InputPlugin, InputPluginTransformReturn, InputPluginValidationReturn
from ._output import OutputType, OutputPlugin from ._output import OutputType, OutputPlugin
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
@ -117,7 +117,7 @@ class PluginManager(t.Generic[PluginT]):
log.success("Registered {} plugin {!r}", self._type, instance.name) log.success("Registered {} plugin {!r}", self._type, instance.name)
return return
except TypeError: except TypeError:
raise PluginError( raise PluginError( # noqa: B904
"Plugin '{p}' has not defined a required method. " "Plugin '{p}' has not defined a required method. "
"Please consult the hyperglass documentation.", "Please consult the hyperglass documentation.",
p=repr(plugin), p=repr(plugin),
@ -128,24 +128,27 @@ class PluginManager(t.Generic[PluginT]):
class InputPluginManager(PluginManager[InputPlugin], type="input"): class InputPluginManager(PluginManager[InputPlugin], type="input"):
"""Manage Input Validation Plugins.""" """Manage Input Validation Plugins."""
def execute(self: "InputPluginManager", *, query: "Query") -> InputPluginReturn: def _gather_plugins(
self: "InputPluginManager", query: "Query"
) -> t.Generator[InputPlugin, None, None]:
for plugin in self.plugins(builtins=True):
if plugin.directives and query.directive.id in plugin.directives:
yield plugin
if plugin.ref in query.directive.plugins:
yield plugin
if plugin.common is True:
yield plugin
def validate(self: "InputPluginManager", query: "Query") -> InputPluginValidationReturn:
"""Execute all input validation plugins. """Execute all input validation plugins.
If any plugin returns `False`, execution is halted. If any plugin returns `False`, execution is halted.
""" """
result = None result = None
builtins = ( for plugin in self._gather_plugins(query):
plugin
for plugin in self.plugins(builtins=True)
if plugin.directives and query.directive.id in plugin.directives
)
directives = (plugin for plugin in self.plugins() if plugin.ref in query.directive.plugins)
common = (plugin for plugin in self.plugins() if plugin.common is True)
for plugin in (*directives, *builtins, *common):
result = plugin.validate(query) result = plugin.validate(query)
result_test = "valid" if result is True else "invalid" if result is False else "none" result_test = "valid" if result is True else "invalid" if result is False else "none"
log.debug("Input Plugin {!r} result={!r}", plugin.name, result_test) log.debug("Input Plugin Validation {!r} result={!r}", plugin.name, result_test)
if result is False: if result is False:
raise InputValidationError( raise InputValidationError(
error="No matched validation rules", target=query.query_target error="No matched validation rules", target=query.query_target
@ -154,6 +157,14 @@ class InputPluginManager(PluginManager[InputPlugin], type="input"):
return result return result
return result return result
def transform(self: "InputPluginManager", *, query: "Query") -> InputPluginTransformReturn:
"""Execute all input transformation plugins."""
result = query.query_target
for plugin in self._gather_plugins(query):
result = plugin.transform(query=query)
log.debug("Input Plugin Transform {!r} result={!r}", plugin.name, result)
return result
class OutputPluginManager(PluginManager[OutputPlugin], type="output"): class OutputPluginManager(PluginManager[OutputPlugin], type="output"):
"""Manage Output Processing Plugins.""" """Manage Output Processing Plugins."""

View file

@ -24,14 +24,14 @@ async def move_files(src: Path, dst: Path, files: t.Iterable[Path]) -> t.Tuple[s
if not isinstance(src, Path): if not isinstance(src, Path):
try: try:
src = Path(src) src = Path(src)
except TypeError: except TypeError as err:
raise error("{p} is not a valid path", p=src) raise error("{p} is not a valid path", p=src) from err
if not isinstance(dst, Path): if not isinstance(dst, Path):
try: try:
dst = Path(dst) dst = Path(dst)
except TypeError: except TypeError as err:
raise error("{p} is not a valid path", p=dst) raise error("{p} is not a valid path", p=dst) from err
if not isinstance(files, (t.List, t.Tuple, t.Generator)): if not isinstance(files, (t.List, t.Tuple, t.Generator)):
raise error( raise error(
@ -57,7 +57,7 @@ async def move_files(src: Path, dst: Path, files: t.Iterable[Path]) -> t.Tuple[s
shutil.copyfile(file, dst_file) shutil.copyfile(file, dst_file)
migrated += (str(dst_file),) migrated += (str(dst_file),)
except Exception as e: except Exception as e:
raise error("Failed to migrate {f}: {e}", f=dst_file, e=e) raise error("Failed to migrate {f}: {e}", f=dst_file, e=e) from e
return migrated return migrated

View file

@ -60,8 +60,8 @@ async def read_package_json() -> t.Dict[str, t.Any]:
with package_json_file.open("r") as file: with package_json_file.open("r") as file:
package_json = json.load(file) package_json = json.load(file)
except Exception as e: except Exception as err:
raise RuntimeError(f"Error reading package.json: {str(e)}") raise RuntimeError(f"Error reading package.json: {str(err)}") from err
log.debug("package.json:\n{p}", p=package_json) log.debug("package.json:\n{p}", p=package_json)
@ -98,8 +98,8 @@ async def node_initial(timeout: int = 180, dev_mode: bool = False) -> str:
await proc.wait() await proc.wait()
all_messages += (messages,) all_messages += (messages,)
except Exception as e: except Exception as err:
raise RuntimeError(str(e)) raise RuntimeError(str(err)) from err
return "\n".join(all_messages) return "\n".join(all_messages)
@ -107,7 +107,7 @@ async def node_initial(timeout: int = 180, dev_mode: bool = False) -> str:
async def build_ui(app_path: Path): async def build_ui(app_path: Path):
"""Execute `next build` & `next export` from UI directory. """Execute `next build` & `next export` from UI directory.
Raises: ### Raises
RuntimeError: Raised if exit code is not 0. RuntimeError: Raised if exit code is not 0.
RuntimeError: Raised when any other error occurs. RuntimeError: Raised when any other error occurs.
""" """
@ -139,12 +139,12 @@ async def build_ui(app_path: Path):
await proc.wait() await proc.wait()
all_messages.append(messages) all_messages.append(messages)
except asyncio.TimeoutError: except asyncio.TimeoutError as err:
raise RuntimeError(f"{timeout} second timeout exceeded while building UI") raise RuntimeError(f"{timeout} second timeout exceeded while building UI") from err
except Exception as err: except Exception as err:
log.error(err) log.error(err)
raise RuntimeError(str(err)) raise RuntimeError(str(err)) from err
return "\n".join(all_messages) return "\n".join(all_messages)

View file

@ -128,9 +128,9 @@ def deep_convert_keys(_dict: t.Type[DeepConvert], predicate: t.Callable[[str], s
def get_value(value: t.Any): def get_value(value: t.Any):
if isinstance(value, t.Dict): if isinstance(value, t.Dict):
return {predicate(k): get_value(v) for k, v in value.items()} return {predicate(k): get_value(v) for k, v in value.items()}
elif isinstance(value, t.List): if isinstance(value, t.List):
return [get_value(v) for v in value] return [get_value(v) for v in value]
elif isinstance(value, t.Tuple): if isinstance(value, t.Tuple):
return tuple(get_value(v) for v in value) return tuple(get_value(v) for v in value)
return value return value

View file

@ -35,12 +35,12 @@ def get_driver(_type: str, driver: t.Optional[str]) -> str:
# If no driver is set, use the driver map with netmiko as # If no driver is set, use the driver map with netmiko as
# fallback. # fallback.
return DRIVER_MAP.get(_type, "netmiko") return DRIVER_MAP.get(_type, "netmiko")
elif driver in ALL_DRIVERS: if driver in ALL_DRIVERS:
# If a driver is set and it is valid, allow it. # If a driver is set and it is valid, allow it.
return driver return driver
else:
# Otherwise, fail validation. # Otherwise, fail validation.
raise ValueError("{} is not a supported driver.".format(driver)) raise ValueError("{} is not a supported driver.".format(driver))
def resolve_hostname( def resolve_hostname(

286
poetry.lock generated
View file

@ -34,17 +34,6 @@ python-versions = ">=3.7"
[package.extras] [package.extras]
tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"]
[[package]]
name = "aspy-yaml"
version = "1.3.0"
description = "A few extensions to pyyaml."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[package.dependencies]
pyyaml = "*"
[[package]] [[package]]
name = "attrs" name = "attrs"
version = "22.1.0" version = "22.1.0"
@ -296,157 +285,6 @@ mccabe = ">=0.7.0,<0.8.0"
pycodestyle = ">=2.10.0,<2.11.0" pycodestyle = ">=2.10.0,<2.11.0"
pyflakes = ">=3.0.0,<3.1.0" pyflakes = ">=3.0.0,<3.1.0"
[[package]]
name = "flake8-bandit"
version = "4.1.1"
description = "Automated security testing with bandit and flake8."
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
bandit = ">=1.7.3"
flake8 = ">=5.0.0"
[[package]]
name = "flake8-black"
version = "0.3.5"
description = "flake8 plugin to call black as a code style validator"
category = "dev"
optional = false
python-versions = ">=3.7"
[package.dependencies]
black = ">=22.1.0"
flake8 = ">=3"
tomli = "*"
[package.extras]
develop = ["build", "twine"]
[[package]]
name = "flake8-breakpoint"
version = "1.1.0"
description = "Flake8 plugin that check forgotten breakpoints"
category = "dev"
optional = false
python-versions = ">=3.6,<4.0"
[package.dependencies]
flake8-plugin-utils = ">=1.0,<2.0"
[[package]]
name = "flake8-bugbear"
version = "22.12.6"
description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle."
category = "dev"
optional = false
python-versions = ">=3.7"
[package.dependencies]
attrs = ">=19.2.0"
flake8 = ">=3.0.0"
[package.extras]
dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "tox"]
[[package]]
name = "flake8-builtins"
version = "2.0.1"
description = "Check for python builtins being used as variables or parameters."
category = "dev"
optional = false
python-versions = ">=3.7"
[package.dependencies]
flake8 = "*"
[package.extras]
test = ["pytest"]
[[package]]
name = "flake8-comprehensions"
version = "3.10.1"
description = "A flake8 plugin to help you write better list/set/dict comprehensions."
category = "dev"
optional = false
python-versions = ">=3.7"
[package.dependencies]
flake8 = ">=3.0,<3.2.0 || >3.2.0"
[[package]]
name = "flake8-deprecated"
version = "2.0.1"
description = "Warns about deprecated method calls."
category = "dev"
optional = false
python-versions = ">=3.7"
[package.dependencies]
flake8 = "*"
[package.extras]
test = ["pytest"]
[[package]]
name = "flake8-docstrings"
version = "1.6.0"
description = "Extension for flake8 which uses pydocstyle to check docstrings"
category = "dev"
optional = false
python-versions = "*"
[package.dependencies]
flake8 = ">=3"
pydocstyle = ">=2.1"
[[package]]
name = "flake8-isort"
version = "5.0.3"
description = "flake8 plugin that integrates isort ."
category = "dev"
optional = false
python-versions = ">=3.7"
[package.dependencies]
flake8 = "*"
isort = ">=4.3.5,<6"
[package.extras]
test = ["pytest"]
[[package]]
name = "flake8-plugin-utils"
version = "1.3.2"
description = "The package provides base classes and utils for flake8 plugin writing"
category = "dev"
optional = false
python-versions = ">=3.6,<4.0"
[[package]]
name = "flake8-polyfill"
version = "1.0.2"
description = "Polyfill package for Flake8 plugins"
category = "dev"
optional = false
python-versions = "*"
[package.dependencies]
flake8 = "*"
[[package]]
name = "flake8-print"
version = "5.0.0"
description = "print statement checker plugin for flake8"
category = "dev"
optional = false
python-versions = ">=3.7"
[package.dependencies]
flake8 = ">=3.0"
pycodestyle = "*"
[[package]] [[package]]
name = "future" name = "future"
version = "0.18.2" version = "0.18.2"
@ -773,21 +611,19 @@ testing = ["pytest", "pytest-benchmark"]
[[package]] [[package]]
name = "pre-commit" name = "pre-commit"
version = "1.21.0" version = "2.20.0"
description = "A framework for managing and maintaining multi-language pre-commit hooks." description = "A framework for managing and maintaining multi-language pre-commit hooks."
category = "dev" category = "dev"
optional = false optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" python-versions = ">=3.7"
[package.dependencies] [package.dependencies]
"aspy.yaml" = "*"
cfgv = ">=2.0.0" cfgv = ">=2.0.0"
identify = ">=1.0.0" identify = ">=1.0.0"
nodeenv = ">=0.11.1" nodeenv = ">=0.11.1"
pyyaml = "*" pyyaml = ">=5.1"
six = "*"
toml = "*" toml = "*"
virtualenv = ">=15.2" virtualenv = ">=20.0.8"
[[package]] [[package]]
name = "psutil" name = "psutil"
@ -840,20 +676,6 @@ typing-extensions = ">=3.7.4.3"
dotenv = ["python-dotenv (>=0.10.4)"] dotenv = ["python-dotenv (>=0.10.4)"]
email = ["email-validator (>=1.0.3)"] email = ["email-validator (>=1.0.3)"]
[[package]]
name = "pydocstyle"
version = "6.1.1"
description = "Python docstring style checker"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
snowballstemmer = "*"
[package.extras]
toml = ["toml"]
[[package]] [[package]]
name = "pyflakes" name = "pyflakes"
version = "3.0.1" version = "3.0.1"
@ -1018,6 +840,14 @@ pygments = ">=2.6.0,<3.0.0"
[package.extras] [package.extras]
jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"]
[[package]]
name = "ruff"
version = "0.0.192"
description = "An extremely fast Python linter, written in Rust."
category = "dev"
optional = false
python-versions = ">=3.7"
[[package]] [[package]]
name = "scp" name = "scp"
version = "0.14.4" version = "0.14.4"
@ -1066,14 +896,6 @@ category = "main"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
[[package]]
name = "snowballstemmer"
version = "2.2.0"
description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
category = "dev"
optional = false
python-versions = "*"
[[package]] [[package]]
name = "stackprinter" name = "stackprinter"
version = "0.2.10" version = "0.2.10"
@ -1316,7 +1138,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = ">=3.8.1,<4.0" python-versions = ">=3.8.1,<4.0"
content-hash = "1b4540f55d01e47f526891a5e33e8d9346883b478bbcb17a043e9e24201ab1f6" content-hash = "03ec17db8c6644b1b6e4e4fc53177107c75f23179c491e5c722a379b3bdb765e"
[metadata.files] [metadata.files]
aiofiles = [ aiofiles = [
@ -1331,10 +1153,6 @@ asgiref = [
{file = "asgiref-3.5.2-py3-none-any.whl", hash = "sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4"}, {file = "asgiref-3.5.2-py3-none-any.whl", hash = "sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4"},
{file = "asgiref-3.5.2.tar.gz", hash = "sha256:4a29362a6acebe09bf1d6640db38c1dc3d9217c68e6f9f6204d72667fc19a424"}, {file = "asgiref-3.5.2.tar.gz", hash = "sha256:4a29362a6acebe09bf1d6640db38c1dc3d9217c68e6f9f6204d72667fc19a424"},
] ]
aspy-yaml = [
{file = "aspy.yaml-1.3.0-py2.py3-none-any.whl", hash = "sha256:463372c043f70160a9ec950c3f1e4c3a82db5fca01d334b6bc89c7164d744bdc"},
{file = "aspy.yaml-1.3.0.tar.gz", hash = "sha256:e7c742382eff2caed61f87a39d13f99109088e5e93f04d76eb8d4b28aa143f45"},
]
attrs = [ attrs = [
{file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"},
{file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"},
@ -1616,54 +1434,6 @@ flake8 = [
{file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"}, {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"},
{file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"}, {file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"},
] ]
flake8-bandit = [
{file = "flake8_bandit-4.1.1-py3-none-any.whl", hash = "sha256:4c8a53eb48f23d4ef1e59293657181a3c989d0077c9952717e98a0eace43e06d"},
{file = "flake8_bandit-4.1.1.tar.gz", hash = "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e"},
]
flake8-black = [
{file = "flake8-black-0.3.5.tar.gz", hash = "sha256:9e93252b1314a8eb3c2f55dec54a07239e502b12f57567f2c105f2202714b15e"},
{file = "flake8_black-0.3.5-py3-none-any.whl", hash = "sha256:4948a579fdddd98fbf935fd94255dfcfce560c4ddc1ceee08e3f12d6114c8619"},
]
flake8-breakpoint = [
{file = "flake8-breakpoint-1.1.0.tar.gz", hash = "sha256:5bc70d478f0437a3655d094e1d2fca81ddacabaa84d99db45ad3630bf2004064"},
{file = "flake8_breakpoint-1.1.0-py3-none-any.whl", hash = "sha256:27e0cb132647f9ef348b4a3c3126e7350bedbb22e8e221cd11712a223855ea0b"},
]
flake8-bugbear = [
{file = "flake8-bugbear-22.12.6.tar.gz", hash = "sha256:4cdb2c06e229971104443ae293e75e64c6107798229202fbe4f4091427a30ac0"},
{file = "flake8_bugbear-22.12.6-py3-none-any.whl", hash = "sha256:b69a510634f8a9c298dfda2b18a8036455e6b19ecac4fe582e4d7a0abfa50a30"},
]
flake8-builtins = [
{file = "flake8-builtins-2.0.1.tar.gz", hash = "sha256:5aeb420130efe8acbdaf8708a582492413293a3ca25653518f687937879650a5"},
{file = "flake8_builtins-2.0.1-py3-none-any.whl", hash = "sha256:a5b9ca9cbc921c4455ea02e2e9963c990ac66d028c15b654625e012a1e3bbb4d"},
]
flake8-comprehensions = [
{file = "flake8-comprehensions-3.10.1.tar.gz", hash = "sha256:412052ac4a947f36b891143430fef4859705af11b2572fbb689f90d372cf26ab"},
{file = "flake8_comprehensions-3.10.1-py3-none-any.whl", hash = "sha256:d763de3c74bc18a79c039a7ec732e0a1985b0c79309ceb51e56401ad0a2cd44e"},
]
flake8-deprecated = [
{file = "flake8-deprecated-2.0.1.tar.gz", hash = "sha256:c7659a530aa76c3ad8be0c1e8331ed56d882ef8bfba074501a545bb3352b0c23"},
{file = "flake8_deprecated-2.0.1-py3-none-any.whl", hash = "sha256:8c61d2cb8d487118b6c20392b25f08ba1ec49c759e4ea562c7a60172912bc7ee"},
]
flake8-docstrings = [
{file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"},
{file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"},
]
flake8-isort = [
{file = "flake8-isort-5.0.3.tar.gz", hash = "sha256:0951398c343c67f4933407adbbfb495d4df7c038650c5d05753a006efcfeb390"},
{file = "flake8_isort-5.0.3-py3-none-any.whl", hash = "sha256:8c4ab431d87780d0c8336e9614e50ef11201bc848ef64ca017532dec39d4bf49"},
]
flake8-plugin-utils = [
{file = "flake8-plugin-utils-1.3.2.tar.gz", hash = "sha256:20fa2a8ca2decac50116edb42e6af0a1253ef639ad79941249b840531889c65a"},
{file = "flake8_plugin_utils-1.3.2-py3-none-any.whl", hash = "sha256:1fe43e3e9acf3a7c0f6b88f5338cad37044d2f156c43cb6b080b5f9da8a76f06"},
]
flake8-polyfill = [
{file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"},
{file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"},
]
flake8-print = [
{file = "flake8-print-5.0.0.tar.gz", hash = "sha256:76915a2a389cc1c0879636c219eb909c38501d3a43cc8dae542081c9ba48bdf9"},
{file = "flake8_print-5.0.0-py3-none-any.whl", hash = "sha256:84a1a6ea10d7056b804221ac5e62b1cee1aefc897ce16f2e5c42d3046068f5d8"},
]
future = [ future = [
{file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"},
] ]
@ -1896,8 +1666,8 @@ pluggy = [
{file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
] ]
pre-commit = [ pre-commit = [
{file = "pre_commit-1.21.0-py2.py3-none-any.whl", hash = "sha256:f92a359477f3252452ae2e8d3029de77aec59415c16ae4189bcfba40b757e029"}, {file = "pre_commit-2.20.0-py2.py3-none-any.whl", hash = "sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7"},
{file = "pre_commit-1.21.0.tar.gz", hash = "sha256:8f48d8637bdae6fa70cc97db9c1dd5aa7c5c8bf71968932a380628c25978b850"}, {file = "pre_commit-2.20.0.tar.gz", hash = "sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959"},
] ]
psutil = [ psutil = [
{file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"},
@ -1963,10 +1733,6 @@ pydantic = [
{file = "pydantic-1.9.2-py3-none-any.whl", hash = "sha256:78a4d6bdfd116a559aeec9a4cfe77dda62acc6233f8b56a716edad2651023e5e"}, {file = "pydantic-1.9.2-py3-none-any.whl", hash = "sha256:78a4d6bdfd116a559aeec9a4cfe77dda62acc6233f8b56a716edad2651023e5e"},
{file = "pydantic-1.9.2.tar.gz", hash = "sha256:8cb0bc509bfb71305d7a59d00163d5f9fc4530f0881ea32c74ff4f74c85f3d3d"}, {file = "pydantic-1.9.2.tar.gz", hash = "sha256:8cb0bc509bfb71305d7a59d00163d5f9fc4530f0881ea32c74ff4f74c85f3d3d"},
] ]
pydocstyle = [
{file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"},
{file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"},
]
pyflakes = [ pyflakes = [
{file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"},
{file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"},
@ -2087,6 +1853,24 @@ rich = [
{file = "rich-10.16.2-py3-none-any.whl", hash = "sha256:c59d73bd804c90f747c8d7b1d023b88f2a9ac2454224a4aeaf959b21eeb42d03"}, {file = "rich-10.16.2-py3-none-any.whl", hash = "sha256:c59d73bd804c90f747c8d7b1d023b88f2a9ac2454224a4aeaf959b21eeb42d03"},
{file = "rich-10.16.2.tar.gz", hash = "sha256:720974689960e06c2efdb54327f8bf0cdbdf4eae4ad73b6c94213cad405c371b"}, {file = "rich-10.16.2.tar.gz", hash = "sha256:720974689960e06c2efdb54327f8bf0cdbdf4eae4ad73b6c94213cad405c371b"},
] ]
ruff = [
{file = "ruff-0.0.192-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:eac8b1447b82744aa6d64303be081227bbc9a6c3577c793f4cf8ed5c09decb71"},
{file = "ruff-0.0.192-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:71d49353a7e8799bc879df9cc17e2f2a8664240617b25a11db517d97ed65b377"},
{file = "ruff-0.0.192-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:113562bc3298b680df41c4a26fb55d10ed6e38432e987437302e8959a26ca8f5"},
{file = "ruff-0.0.192-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e054e307f53af0db530eb1f8a810edd671561b512cf17954f8842bf7d786153b"},
{file = "ruff-0.0.192-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:340ea45b619a6729e518658bbd11b3650d1de89f87e01334d36f8f22c454fe89"},
{file = "ruff-0.0.192-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a6b6863dcbac58666b87b840fd92a49a8791ee9c52bca2cc33e480380e6bf50d"},
{file = "ruff-0.0.192-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8661c278c6a9dd059e1327d1f4b9ea1e749da6e26b77e567f1566d31d561868a"},
{file = "ruff-0.0.192-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:51ffca9d10f139932479c5ca3838b5194ccef534677969e21b368daee5e13bca"},
{file = "ruff-0.0.192-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03921ceae167be1733be9b50774627a3a5874e0d0db6d0f3d22cb7d4ec0bc50d"},
{file = "ruff-0.0.192-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e41e8492413b17451eb5ffee07339dbbd6c7b89a6c968b4fcb7f1188505f418a"},
{file = "ruff-0.0.192-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:de4a29092aa026a484e174acb1ddc92c752836daebd4fb6d94078986531a2f4d"},
{file = "ruff-0.0.192-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6623a936cfc2547a9abbb888c9d5ffcae06897306b2649c4700a2f33bd08ada6"},
{file = "ruff-0.0.192-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6bd9410f10fa3efbf34ef45afae8064c7e8fddcc3ea0c1755f4e2d12c0a197e5"},
{file = "ruff-0.0.192-py3-none-win32.whl", hash = "sha256:c3f79b3469b28961ea1737de1b2aa0e001c0f0d16fa17ead8fcfa7b9fbcd25d1"},
{file = "ruff-0.0.192-py3-none-win_amd64.whl", hash = "sha256:fb60b2ecba8e59ad553ba003dd529e716e1eef5cd660f1f94466765f57d60c17"},
{file = "ruff-0.0.192.tar.gz", hash = "sha256:a7ecadd76b938c3b05f74d4223fa7cf443563086cbdfae2189220c3be0bde648"},
]
scp = [ scp = [
{file = "scp-0.14.4-py2.py3-none-any.whl", hash = "sha256:29ddaafbfba60793a8a779694c97d8c150d365668a4ef67616c515b80a69ef2f"}, {file = "scp-0.14.4-py2.py3-none-any.whl", hash = "sha256:29ddaafbfba60793a8a779694c97d8c150d365668a4ef67616c515b80a69ef2f"},
{file = "scp-0.14.4.tar.gz", hash = "sha256:54699b92cb68ae34b5928c48a888eab9722a212502cba89aa795bd56597505bd"}, {file = "scp-0.14.4.tar.gz", hash = "sha256:54699b92cb68ae34b5928c48a888eab9722a212502cba89aa795bd56597505bd"},
@ -2107,10 +1891,6 @@ sniffio = [
{file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"},
{file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"},
] ]
snowballstemmer = [
{file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
{file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
]
stackprinter = [ stackprinter = [
{file = "stackprinter-0.2.10-py3-none-any.whl", hash = "sha256:496e6cd058e7dd6f41e0c67e044f79a894297bec9fb80493a4fd094fac1e4677"}, {file = "stackprinter-0.2.10-py3-none-any.whl", hash = "sha256:496e6cd058e7dd6f41e0c67e044f79a894297bec9fb80493a4fd094fac1e4677"},
{file = "stackprinter-0.2.10.tar.gz", hash = "sha256:99d1ea6b91ffad96b28241edd7bcf071752b0cf694cab58d2335df5353acd086"}, {file = "stackprinter-0.2.10.tar.gz", hash = "sha256:99d1ea6b91ffad96b28241edd7bcf071752b0cf694cab58d2335df5353acd086"},

View file

@ -9,10 +9,10 @@ classifiers = [
"Development Status :: 5 - Production/Stable", "Development Status :: 5 - Production/Stable",
"Intended Audience :: Information Technology", "Intended Audience :: Information Technology",
"Operating System :: POSIX :: Linux", "Operating System :: POSIX :: Linux",
"Programming Language :: JavaScript", "Programming Language :: TypeScript",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Topic :: Internet", "Topic :: Internet",
"Topic :: System :: Networking", "Topic :: System :: Networking",
] ]
@ -57,24 +57,12 @@ xmltodict = "^0.12.0"
[tool.poetry.group.dev.dependencies] [tool.poetry.group.dev.dependencies]
bandit = "^1.7.4" bandit = "^1.7.4"
black = "^22.12.0" black = "^22.12.0"
flake8 = "^6.0.0"
flake8-bandit = "^4.1.1"
flake8-black = "^0.3.5"
flake8-breakpoint = "^1.1.0"
flake8-bugbear = "^22.12.6"
flake8-builtins = "^2.0.1"
flake8-comprehensions = "^3.10.1"
flake8-deprecated = "^2.0.1"
flake8-docstrings = "^1.6.0"
flake8-isort = "^5.0.3"
flake8-plugin-utils = "^1.3.2"
flake8-polyfill = "^1.0.2"
flake8-print = "^5.0.0"
isort = "^5.10.1" isort = "^5.10.1"
pep8-naming = "^0.13.2" pep8-naming = "^0.13.2"
pre-commit = "^1.21.0" pre-commit = "^2.20.0"
pytest = "^7.2.0" pytest = "^7.2.0"
pytest-dependency = "^0.5.1" pytest-dependency = "^0.5.1"
ruff = "^0.0.192"
stackprinter = "^0.2.10" stackprinter = "^0.2.10"
taskipy = "^1.10.3" taskipy = "^1.10.3"
@ -94,19 +82,20 @@ known_third_party = ["starlette", "fastapi", "inquirer"]
length_sort = true length_sort = true
line_length = 100 line_length = 100
multi_line_output = 3 multi_line_output = 3
profile = "black"
skip_glob = "hyperglass/api/examples/*.py" skip_glob = "hyperglass/api/examples/*.py"
[tool.pyright] [tool.pyright]
exclude = ["**/node_modules", "**/ui", "**/__pycache__"] exclude = ["**/node_modules", "**/ui", "**/__pycache__"]
include = ["hyperglass"] include = ["hyperglass"]
pythonVersion = "3.8" pythonVersion = "3.9"
reportMissingImports = true reportMissingImports = true
reportMissingTypeStubs = true reportMissingTypeStubs = true
[tool.taskipy.tasks] [tool.taskipy.tasks]
check = {cmd = "task lint && task ui-lint", help = "Run all lint checks"} check = {cmd = "task lint && task ui-lint", help = "Run all lint checks"}
format = {cmd = "black hyperglass", help = "Run Black"} format = {cmd = "black hyperglass", help = "Run Black"}
lint = {cmd = "flake8 hyperglass", help = "Run Flake8"} lint = {cmd = "ruff hyperglass", help = "Run Ruff Linter"}
sort = {cmd = "isort hyperglass", help = "Run iSort"} sort = {cmd = "isort hyperglass", help = "Run iSort"}
start = {cmd = "python3 -m hyperglass.main", help = "Start hyperglass"} start = {cmd = "python3 -m hyperglass.main", help = "Start hyperglass"}
start-asgi = {cmd = "uvicorn hyperglass.api:app", help = "Start hyperglass via Uvicorn"} start-asgi = {cmd = "uvicorn hyperglass.api:app", help = "Start hyperglass via Uvicorn"}
@ -118,3 +107,59 @@ ui-lint = {cmd = "yarn --cwd ./hyperglass/ui/ lint", help = "Run ESLint"}
ui-typecheck = {cmd = "yarn --cwd ./hyperglass/ui/ typecheck", help = "Run TypeScript Check"} ui-typecheck = {cmd = "yarn --cwd ./hyperglass/ui/ typecheck", help = "Run TypeScript Check"}
upgrade = {cmd = "python3 version.py", help = "Upgrade hyperglass version"} upgrade = {cmd = "python3 version.py", help = "Upgrade hyperglass version"}
yarn = {cmd = "yarn --cwd ./hyperglass/ui/", help = "Run a yarn command from the UI directory"} yarn = {cmd = "yarn --cwd ./hyperglass/ui/", help = "Run a yarn command from the UI directory"}
[tool.ruff]
exclude = [
".git",
"__pycache__",
"hyperglass/ui",
"hyperglass/plugins/external",
"hyperglass/api/examples/*.py",
"hyperglass/compat/_sshtunnel.py",
]
ignore = [
# "W503",
"RET504",
"D202", # "S403",
# "S301",
# "S404",
"E731",
"D203", # Blank line before docstring.
"D213", # Multiline docstring summary on second line.
"D402",
"D406",
"D407",
"B008",
"I001",
"D418", # No docstring on overloaded functions.
"N818", # Error suffix on custom exceptions.
"RET501", # Explicitly return None
]
line-length = 100
select = ["B", "C", "D", "E", "F", "I", "N", "S", "RET", "W"]
[tool.ruff.mccabe]
max-complexity = 10
[tool.ruff.per-file-ignores]
"hyperglass/main.py" = ["E402"]
# Disable classmethod warning for validator decorat
"hyperglass/configuration/models/*.py" = ["N805"]
"hyperglass/defaults/*/*.py" = ["E501"]
"hyperglass/models/*.py" = ["N805", "E501"]
"hyperglass/models/api/*.py" = ["N805", "E501"]
"hyperglass/models/commands/*.py" = ["N805", "E5"]
"hyperglass/parsing/models/*.py" = ["N805"]
# Disable unused import warning for modules
"hyperglass/*/__init__.py" = ["F401"]
"hyperglass/models/*/__init__.py" = ["F401"]
# Disable assertion and docstring checks on tests.
"hyperglass/**/test_*.py" = ["S101", "D103", "D100", "D104"]
"hyperglass/**/tests/*.py" = ["S101", "D103", "D100", "D104"]
"hyperglass/**/tests/__init__.py" = ["D103", "D100", "D104"]
"hyperglass/state/hooks.py" = ["F811"]
# Ignore whitespace in docstrings
"hyperglass/cli/static.py" = []
# Ignore docstring standards
"hyperglass/cli/*.py" = ["B904"]
"hyperglass/cli/main.py" = ["D400", "D403", "D415"]

View file

@ -77,10 +77,10 @@ class Version:
if self._did_update: if self._did_update:
old, new = self.upgrade_path old, new = self.upgrade_path
return f"Upgraded {self.name} from {old}{new}" return f"Upgraded {self.name} from {old}{new}"
elif self._did_check: if self._did_check:
return f"No update required for {self.name} from version {self.old_version}" return f"No update required for {self.name} from version {self.old_version}"
else:
return f"{self.name} has not been checked" return f"{self.name} has not been checked"
def upgrade(self) -> None: def upgrade(self) -> None:
"""Find a matching current version and upgrade it to the new version.""" """Find a matching current version and upgrade it to the new version."""
@ -121,7 +121,10 @@ def update_versions(new_version: str) -> None:
"""Update hyperglass version in all package files.""" """Update hyperglass version in all package files."""
for name, file, pattern in UPGRADES: for name, file, pattern in UPGRADES:
with Version( with Version(
name=name, file=file, line_pattern=pattern, new_version=new_version, name=name,
file=file,
line_pattern=pattern,
new_version=new_version,
) as version: ) as version:
version.upgrade() version.upgrade()
typer.echo(str(version)) typer.echo(str(version))