pre-commit linting (#259)

* pre-commit linting

* fix pydocs
This commit is contained in:
Henning Jacobs
2020-04-25 21:01:21 +02:00
committed by GitHub
parent b409e4f375
commit 76a498bacc
23 changed files with 567 additions and 335 deletions

View File

@@ -7,11 +7,14 @@ def expo(n: int, base=2, factor=1, max_value=None):
Adapted from https://github.com/litl/backoff/blob/master/backoff.py (MIT License)
Args:
----
n: The exponent.
base: The mathematical base of the exponentiation operation
factor: Factor to multiply the exponentation by.
max_value: The maximum value to yield. Once the value in the
true exponential sequence exceeds this, the value
of max_value will forever after be yielded.
"""
a = factor * base ** n
if max_value is None or a < max_value:
@@ -27,8 +30,12 @@ def random_jitter(value, jitter=1):
This adds up to 1 second of additional time to the original value.
Prior to backoff version 1.2 this was the default jitter behavior.
Args:
----
value: The unadulterated backoff value.
jitter: Jitter amount.
"""
return value + random.uniform(0, jitter)
@@ -43,6 +50,8 @@ def full_jitter(value):
(http://www.awsarchitectureblog.com/2015/03/backoff.html)
Args:
----
value: The unadulterated backoff value.
"""
return random.uniform(0, value)

View File

@@ -2,14 +2,15 @@ import logging
import re
import time
from pathlib import Path
from typing import List
from urllib.parse import urljoin
import requests
import tokens
from pykube import HTTPClient
from pykube import KubeConfig
from requests.auth import AuthBase
from pykube import HTTPClient, KubeConfig
# default URL points to kubectl proxy
DEFAULT_CLUSTERS = "http://localhost:8001/"
CLUSTER_ID_INVALID_CHARS = re.compile("[^a-z0-9:-]")
@@ -20,7 +21,7 @@ tokens.configure(from_file_only=True)
def generate_cluster_id(url: str):
"""Generate some "cluster ID" from given API server URL"""
"""Generate some "cluster ID" from given API server URL."""
for prefix in ("https://", "http://"):
if url.startswith(prefix):
url = url[len(prefix) :]
@@ -28,7 +29,8 @@ def generate_cluster_id(url: str):
class StaticAuthorizationHeaderAuth(AuthBase):
"""Static authentication with given "Authorization" header"""
"""Static authentication with given "Authorization" header."""
def __init__(self, authorization):
self.authorization = authorization
@@ -39,8 +41,8 @@ class StaticAuthorizationHeaderAuth(AuthBase):
class OAuthTokenAuth(AuthBase):
"""Dynamic authentication using the "tokens" library to load OAuth tokens from file
(potentially mounted from a Kubernetes secret)"""
"""Dynamic authentication using the "tokens" library to load OAuth tokens from file (potentially mounted from a Kubernetes secret)."""
def __init__(self, token_name):
self.token_name = token_name
@@ -105,7 +107,7 @@ class ClusterRegistryDiscoverer:
self._url = cluster_registry_url
self._cache_lifetime = cache_lifetime
self._last_cache_refresh = 0
self._clusters = []
self._clusters: List[Cluster] = []
self._session = requests.Session()
self._session.auth = OAuthTokenAuth("read-only")
@@ -127,8 +129,10 @@ class ClusterRegistryDiscoverer:
)
self._clusters = clusters
self._last_cache_refresh = time.time()
except:
logger.exception(f"Failed to refresh from cluster registry {self._url}")
except Exception as e:
logger.exception(
f"Failed to refresh from cluster registry {self._url}: {e}"
)
def get_clusters(self):
now = time.time()

View File

@@ -2,12 +2,12 @@ import datetime
import logging
import time
import requests
import pykube
from pykube import Pod, Node
from pykube.objects import APIObject, NamespacedAPIObject
import requests
from pykube import Node
from pykube import Pod
from pykube.objects import APIObject
from pykube.objects import NamespacedAPIObject
from .utils import get_short_error_message

View File

@@ -1,5 +1,4 @@
#!/usr/bin/env python3
import gevent.monkey
gevent.monkey.patch_all()
@@ -15,6 +14,7 @@ import os
import signal
import time
import kube_ops_view
from typing import Union
from pathlib import Path
from flask import Flask, redirect, url_for
@@ -39,7 +39,7 @@ logger = logging.getLogger(__name__)
SERVER_STATUS = {"shutdown": False}
AUTHORIZE_URL = os.getenv("AUTHORIZE_URL")
ACCESS_TOKEN_URL = os.getenv("ACCESS_TOKEN_URL")
APP_URL = os.getenv("APP_URL")
APP_URL = os.getenv("APP_URL") or ""
SCOPE = os.getenv("SCOPE")
app = Flask(__name__)
@@ -134,7 +134,7 @@ def event(cluster_ids: set):
@app.route("/events")
@authorize
def get_events():
"""SSE (Server Side Events), for an EventSource"""
"""SSE (Server Side Events), for an EventSource."""
cluster_ids = set()
for _id in flask.request.args.get("cluster_ids", "").split():
if _id:
@@ -165,7 +165,7 @@ def redeem_screen_token(token: str):
)
try:
app.store.redeem_screen_token(token, remote_addr)
except:
except Exception:
flask.abort(401)
flask.session["auth_token"] = (token, "")
return redirect(urljoin(APP_URL, "/"))
@@ -313,6 +313,13 @@ def main(
"pod_link_url_template": pod_link_url_template,
}
discoverer: Union[
MockDiscoverer,
ClusterRegistryDiscoverer,
KubeconfigDiscoverer,
StaticClusterDiscoverer,
]
if mock:
cluster_query = query_mock_cluster
discoverer = MockDiscoverer()

View File

@@ -1,6 +1,6 @@
import time
import random
import string
import time
def hash_int(x: int):
@@ -35,7 +35,7 @@ def generate_mock_pod(index: int, i: int, j: int):
phase = pod_phases[hash_int((index + 1) * (i + 1) * (j + 1)) % len(pod_phases)]
containers = []
for k in range(1 + j % 2):
for _ in range(1 + j % 2):
# generate "more real data"
requests_cpu = random.randint(10, 50)
requests_memory = random.randint(64, 256)
@@ -88,7 +88,7 @@ def generate_mock_pod(index: int, i: int, j: int):
def query_mock_cluster(cluster):
"""Generate deterministic (no randomness!) mock data"""
"""Generate deterministic (no randomness!) mock data."""
index = int(cluster.id.split("-")[-1])
nodes = {}
for i in range(10):

View File

@@ -7,7 +7,8 @@ CREDENTIALS_DIR = os.getenv("CREDENTIALS_DIR", "")
class OAuth2ConsumerBlueprintWithClientRefresh(OAuth2ConsumerBlueprint):
"""Same as flask_dance.consumer.OAuth2ConsumerBlueprint, but loads client credentials from file"""
"""Same as flask_dance.consumer.OAuth2ConsumerBlueprint, but loads client credentials from file."""
def refresh_credentials(self):
with open(os.path.join(CREDENTIALS_DIR, "authcode-client-id")) as fd:

View File

@@ -2,11 +2,14 @@ import json
import logging
import random
import string
import redis
import time
from redlock import Redlock
from abc import ABC
from abc import abstractmethod
from queue import Queue
from typing import Set
import redis
from redlock import Redlock
logger = logging.getLogger(__name__)
@@ -14,21 +17,21 @@ ONE_YEAR = 3600 * 24 * 365
def generate_token(n: int):
"""Generate a random ASCII token of length n"""
"""Generate a random ASCII token of length n."""
# uses os.urandom()
rng = random.SystemRandom()
return "".join([rng.choice(string.ascii_letters + string.digits) for i in range(n)])
def generate_token_data():
"""Generate screen token data for storing"""
"""Generate screen token data for storing."""
token = generate_token(10)
now = time.time()
return {"token": token, "created": now, "expires": now + ONE_YEAR}
def check_token(token: str, remote_addr: str, data: dict):
"""Check whether the given screen token is valid, raises exception if not"""
"""Check whether the given screen token is valid, raises exception if not."""
now = time.time()
if (
data
@@ -41,11 +44,19 @@ def check_token(token: str, remote_addr: str, data: dict):
raise ValueError("Invalid token")
class AbstractStore:
class AbstractStore(ABC):
@abstractmethod
def set(self, key, val):
pass
@abstractmethod
def get(self, key):
return None
def get_cluster_ids(self):
return self.get("cluster-ids") or []
def set_cluster_ids(self, cluster_ids: set):
def set_cluster_ids(self, cluster_ids: Set[str]):
self.set("cluster-ids", list(sorted(cluster_ids)))
def get_cluster_status(self, cluster_id: str) -> dict:
@@ -62,7 +73,8 @@ class AbstractStore:
class MemoryStore(AbstractStore):
"""Memory-only backend, mostly useful for local debugging"""
"""Memory-only backend, mostly useful for local debugging."""
def __init__(self):
self._data = {}
@@ -110,7 +122,8 @@ class MemoryStore(AbstractStore):
class RedisStore(AbstractStore):
"""Redis-based backend for deployments with replicas > 1"""
"""Redis-based backend for deployments with replicas > 1."""
def __init__(self, url: str):
logger.info("Connecting to Redis on {}..".format(url))
@@ -146,14 +159,14 @@ class RedisStore(AbstractStore):
yield (event_type, json.loads(data))
def create_screen_token(self):
"""Generate a new screen token and store it in Redis"""
"""Generate a new screen token and store it in Redis."""
data = generate_token_data()
token = data["token"]
self._redis.set("screen-tokens:{}".format(token), json.dumps(data))
return token
def redeem_screen_token(self, token: str, remote_addr: str):
"""Validate the given token and bind it to the IP"""
"""Validate the given token and bind it to the IP."""
redis_key = "screen-tokens:{}".format(token)
data = self._redis.get(redis_key)
if not data:

View File

@@ -1,11 +1,14 @@
import logging
import time
from typing import Callable
import gevent
import json_delta
import requests.exceptions
from .backoff import expo, random_jitter
from .backoff import expo
from .backoff import random_jitter
from .cluster_discovery import Cluster
from .utils import get_short_error_message
logger = logging.getLogger(__name__)
@@ -15,7 +18,7 @@ def calculate_backoff(tries: int):
return random_jitter(expo(tries, factor=2, max_value=60), jitter=4)
def handle_query_failure(e: Exception, cluster, backoff: dict):
def handle_query_failure(e: Exception, cluster: Cluster, backoff: dict):
if not backoff:
backoff = {}
tries = backoff.get("tries", 0) + 1
@@ -37,7 +40,7 @@ def handle_query_failure(e: Exception, cluster, backoff: dict):
def update_clusters(
cluster_discoverer,
query_cluster: callable,
query_cluster: Callable[[Cluster], dict],
store,
query_interval: float = 5,
debug: bool = False,
@@ -109,8 +112,8 @@ def update_clusters(
store.set_cluster_data(cluster.id, data)
store.set_cluster_status(cluster.id, status)
store.set_cluster_ids(cluster_ids)
except:
logger.exception("Failed to update")
except Exception as e:
logger.exception(f"Failed to update: {e}")
finally:
store.release_lock(lock)
# sleep 1-2 seconds

View File

@@ -2,7 +2,7 @@ import requests.exceptions
def get_short_error_message(e: Exception):
"""Generate a reasonable short message why the HTTP request failed"""
"""Generate a reasonable short message why the HTTP request failed."""
if isinstance(e, requests.exceptions.RequestException) and e.response is not None:
# e.g. "401 Unauthorized"