Commit 36bf0446 authored by Dom Sekotill's avatar Dom Sekotill
Browse files

Merge branch 'develop' into 'main'

Combine develop with main

See merge request !2
parents b589c911 8ddb7aa5
Loading
Loading
Loading
Loading
Loading
+3 −0
Original line number Diff line number Diff line
# Python
*.py[co]
dist/

# Testing
/results/
+58 −0
Original line number Diff line number Diff line
# Find a suitable commit for determining changed files
#
#
# Copyright 2022 Dom Sekotill <dom.sekotill@kodo.org.uk>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


pre_commit_run() (
	set -eu
	declare -a PRE_COMMIT_ARGS

	find_lca() {
		local repo=$CI_REPOSITORY_URL
		local current_branch=$1 other_branch=$2

		# See https://stackoverflow.com/questions/63878612/git-fatal-error-in-object-unshallow-sha-1
		# and https://stackoverflow.com/questions/4698759/converting-git-repository-to-shallow/53245223#53245223
		# for background on what `git repack -d` is doing here.
		git repack -qd

		git fetch -q $repo --shallow-exclude=$other_branch $current_branch
		git fetch -q $repo --deepen=1 $current_branch

		FROM_REF=$(git rev-parse -q --revs-only --verify shallow) || unset FROM_REF
	}

	fetch_ref() {
		git fetch -q $CI_REPOSITORY_URL --depth=1 $1
		FROM_REF=$1
	}

	if [[ -v CI_COMMIT_BEFORE_SHA ]] && [[ ! $CI_COMMIT_BEFORE_SHA =~ ^0{40}$ ]]; then
		fetch_ref $CI_COMMIT_BEFORE_SHA
	elif [[ -v CI_MERGE_REQUEST_TARGET_BRANCH_NAME ]]; then
		find_lca $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME $CI_MERGE_REQUEST_TARGET_BRANCH_NAME
	elif [[ $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH ]]; then
		find_lca $CI_COMMIT_BRANCH $CI_DEFAULT_BRANCH
	fi

	if [[ -v FROM_REF ]]; then
		PRE_COMMIT_ARGS=( --from-ref=$FROM_REF --to-ref=$CI_COMMIT_SHA )
	else
		PRE_COMMIT_ARGS=( --all-files )
	fi

	pre-commit run "$@" "${PRE_COMMIT_ARGS[@]}"
)
+90 −46
Original line number Diff line number Diff line
# Optional project CI variables to set:
#
# SAFETY_API_KEY:
#   Set to your API key for accessing up-to-date package security information

stages:
- test
- build
- test
- publish

image: python:3.9

workflow:
  rules:
  - if: $CI_PIPELINE_SOURCE == "merge_request_event"
  - if: $CI_OPEN_MERGE_REQUESTS
    when: never
  - when: always


.python:
  image: python:3.9
  variables:
    PIP_CACHE_DIR: $CI_PROJECT_DIR/cache/pkg
  PRE_COMMIT_HOME: $CI_PROJECT_DIR/cache/pre-commit

.cached:
    PIP_NO_COMPILE: "true"
    PIP_NO_CLEAN: "true"
  cache:
    key: $CI_JOB_IMAGE
    paths: [cache]
  before_script:
  - pip install "pip>=21.3"


Build Package:
  stage: build
  extends: [.python]
  script:
  - pip install build
  - python -m build
  artifacts:
    paths: [dist]


Pin:
  # Pin dependencies in requirements.txt for reproducing pipeline results
  stage: test
  extends: [.python]
  needs: []
  script:
  - pip install --prefer-binary -e .
  - pip freeze --exclude-editable | tee requirements.txt
  artifacts:
    paths: [requirements.txt]


Dependency Check:
  stage: test
  image: pyupio/safety:latest
  needs: [Pin]
  allow_failure: true
  script:
  - safety check -r requirements.txt


Code Checks:
  stage: test
  extends: [.cached]
  image: docker.kodo.org.uk/ci-images/pre-commit:2.15.0-1
  needs: []
  variables:
    HOOK_STAGE: commit
    FROM_REF: $CI_DEFAULT_BRANCH
    PRE_COMMIT_HOME: $CI_PROJECT_DIR/cache/pre-commit
  cache:
    key: $CI_JOB_IMAGE
    paths: [cache]
  rules:
  - if: $CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
    variables:
      FROM_REF: $CI_COMMIT_BEFORE_SHA
  - if: $CI_PIPELINE_SOURCE == "push"
  - if: $CI_PIPELINE_SOURCE == "merge_request_event"
    variables:
      HOOK_STAGE: merge-commit
  script:
  - git fetch $CI_REPOSITORY_URL $FROM_REF:FROM_REF -f
  - pre-commit run
    --hook-stage=$HOOK_STAGE
    --from-ref=FROM_REF
    --to-ref=HEAD
  - source .gitlab-ci.pre-commit-run.bash
  - pre_commit_run --hook-stage=commit
  - pre_commit_run --hook-stage=push


Commit Graph Check:
  extends: ["Code Checks"]
  variables:
    HOOK_STAGE: push
  rules:
  - if: $CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
    variables:
      FROM_REF: $CI_COMMIT_BEFORE_SHA
  - if: $CI_PIPELINE_SOURCE == "merge_request_event"
Unit Tests:
  stage: test
  extends: [.python]
  needs: [Pin]
  script:
  - pip install -r requirements.txt -e . coverage[toml] nose
  - coverage run -m nose tests
    --verbose
    --with-xunit --xunit-file=results/xunit.xml
  after_script:
  - coverage report
  - coverage json
  - coverage xml
  - coverage html
  coverage: '/^TOTAL .* (\d{1,3}\.\d{2})%$/'
  artifacts:
    paths: [results]
    reports:
      cobertura: results/coverage.xml
      junit: results/xunit.xml


Check Tag:
  stage: test
  extends: [.cached]
  extends: [.python]
  needs: ["Build Package"]
  rules:
  - if: $CI_COMMIT_TAG =~ /^v[0-9]/
  script:
  - pip install tomli packaging
  - pip install packaging pkginfo
  - |
    python <<-END
    import tomli
    from glob import glob
    from packaging.version import Version
    from pkginfo import Wheel

    with open("pyproject.toml", "rb") as f:
      proj = tomli.load(f)

    assert Version("$CI_COMMIT_TAG") == Version(proj["tool"]["poetry"]["version"])
    wheel_path = glob("dist/*.whl")[0]
    wheel = Wheel(wheel_path)
    assert Version("$CI_COMMIT_TAG") == Version(wheel.version)
    END


Build Package:
  stage: build
  extends: [.cached]
  script:
  - pip install build
  - python -m build
  artifacts:
    paths: [dist]


Upload Package:
  stage: publish
  extends: [.cached]
  extends: [.python]
  needs: ["Build Package"]
  rules:
  - if: $CI_COMMIT_TAG =~ /^v[0-9]/
  script:
+12 −5
Original line number Diff line number Diff line
@@ -35,11 +35,13 @@ repos:
  - id: gitlint

- repo: https://code.kodo.org.uk/dom/pre-commit-hooks
  rev: v0.6
  rev: v0.6.1
  hooks:
  - id: check-executable-modes
  - id: check-for-squash
  - id: copyright-notice
    args: [--min-size=100]
    stages: [commit, manual]
  - id: protect-first-parent

- repo: https://github.com/pre-commit/pygrep-hooks
@@ -73,10 +75,10 @@ repos:
    types_or: [python, pyi]
    stages: [commit, manual]

- repo: https://github.com/domsekotill/flakehell
  rev: 5a7ecdc
- repo: https://github.com/flakeheaven/flakeheaven
  rev: 0.11.0
  hooks:
  - id: flakehell
  - id: flakeheaven
    additional_dependencies:
    - flake8-bugbear
    - flake8-docstrings
@@ -85,10 +87,15 @@ repos:
    - flake8-sfs

- repo: https://github.com/pre-commit/mirrors-mypy
  rev: v0.910
  rev: v0.942
  hooks:
  - id: mypy
    args: [--follow-imports=silent]
    additional_dependencies:
    - packaging >=21
    - types-orjson
    - types-requests
    - types-urllib3
    - trio-typing[mypy] ~=0.6
    - xdg ~=5.1
    - git+https://code.kodo.org.uk/dom/type-stubs.git#type-stubs[jsonpath,parse]
+300 −0
Original line number Diff line number Diff line
#  Copyright 2021,2022  Dominik Sekotill <dom.sekotill@kodo.org.uk>
#
#  This Source Code Form is subject to the terms of the Mozilla Public
#  License, v. 2.0. If a copy of the MPL was not distributed with this
#  file, You can obtain one at http://mozilla.org/MPL/2.0/.

"""
Start and manage a test Kubernetes cluster with Kubernetes-in-Docker (kind)
"""

from __future__ import annotations

import platform
import re
from abc import ABC
from abc import abstractmethod
from io import BytesIO
from pathlib import Path
from shutil import copyfileobj
from tarfile import TarFile
from typing import IO
from typing import Iterator

import requests
import xdg
from packaging.version import Version

from behave_utils.json import JSONObject
from behave_utils.url import URL

CACHE_DIR: Path = xdg.xdg_cache_home() / "behave-testing"


class DownloadableExecutable(ABC):
	"""
	Base class for downloading static binaries to local paths

	Subclasses should implement `get_latest` and `get_stream` methods.  They may use the
	"kernel", "arch" and "goarch" attributes to select the correct source for the current
	platform.

	Subclasses must also provide the "name" attribute, either as a class or instance
	attribute.  It is used to generate a cache path.

	Users of the subclasses SHOULD ONLY call the `get_binary` method to get a path pointing
	to a locally cached copy of the downloaded binary.
	"""

	# Map of `uname -m` output to architecture values accepted by Go
	# Many Go binaries include the architecture value accepted by `go` in their names, so
	# the "goarch" class attribute is added for convenience, generated from this map.
	# This map may not be fully complete. Only non-equal values need to be added.
	GOARCH_MAP = {
		"i386": "386",
		"i686": "386",
		"x86": "386",

		"x86_64": "amd64",

		"armv6l": "arm",
		"armv7l": "arm",

		"aarch64": "arm64",
	}

	kernel = platform.system().lower()
	arch = platform.machine()
	goarch = GOARCH_MAP.get(arch, arch)

	name: str

	def __init__(self, version: str = "latest"):
		self.version = version

	@abstractmethod
	def get_latest(self, session: requests.Session) -> str:
		"""
		Return the latest release string for a supported binary

		Implementations must discover and return the latest release or tag string

		`session` is provided for performing HTTP requests.  Although its use is not
		required, it has and automatic code check hook so there is no need to manually check
		the return code and handle errors.
		"""
		raise NotImplementedError

	@abstractmethod
	def get_stream(self, session: requests.Session, version: str) -> IO[bytes]:
		"""
		Return a stream that emits the requested version of a supported binary

		Implementations must perform a request for the binary and return a file-like reader

		The return object must be a readable FileIO like instance, returning bytes.  If the
		source is uncompressed the "raw" attribute of a `requests.Response` object opened
		with `stream=True` will suffice.  See examples below.

		`version` specifies the wanted version of the binary, which MAY be different from
		the "version" instance attribute.  Other attributes such as "kernel" and "arch" (or
		"goarch" if appropriate) MUST be honoured when selecting a source.

		`session` is provided for performing HTTP requests.  Although its use is not
		required, it has and automatic code check hook so there is no need to manually check
		the return code and handle errors.


		Examples:

		1) Get an uncompressed binary:

		>>> def get_stream(session: requests.Session, version: str) -> IO[bytes]:
		...     url = "https://example.com/binary"
		...     return session.get(url, stream=True).raw


		2) Get a binary from a GZip compressed tar archive, storing the tar file in memory:

		Note: Avoid this for very large downloads. Unfortunately the Python tarfile
		implementation cannot handle non-seekable streams.

		>>> from tarfile import TarFile

		>>> def get_stream(session: requests.Session, version: str) -> IO[bytes]:
		...     url = "https://example.com/binary.tar.gz"
		...     buf = BytesIO(session.get(url).content)
		...     tar = TarFile.gzopen("buffer", fileobj=buf)
		...     return tar.extractfile(self.name)


		3) Get a binary from a GZip compressed tar archive, storing the tar file in the file
		system:

		>>> from tarfile import TarFile
		>>> from tempfile import TemporaryFile
		>>> from shutil import copyfileobj

		>>> def get_stream(session: requests.Session, version: str) -> IO[bytes]:
		...     url = "https://example.com/binary.tar.gz"
		...     resp = session.get(url, stream=True)
		...     temp = TemporaryFile()
		...     copyfileobj(resp.raw, temp)
		...     tar = TarFile.gzopen("buffer", fileobj=temp)
		...     return tar.extractfile(self.name)
		"""
		raise NotImplementedError

	def get_binary(self) -> Path:
		"""
		Return a Path to a locally cached executable, downloading it if necessary
		"""
		CACHE_DIR.mkdir(0o775, True, True)
		version = self.version

		with requests.Session() as session:
			assert isinstance(session.hooks["response"], list)
			session.hooks["response"].append(lambda r, *a, **k: r.raise_for_status())

			if version == "latest":
				version = self.get_latest(session)

			binary = CACHE_DIR / f"{self.name}-{version}-{self.kernel}-{self.arch}"
			if binary.exists():
				return binary

			stream = self.get_stream(session, version)

			try:
				with binary.open("wb") as f:
					copyfileobj(stream, f)
			except BaseException:
				binary.unlink()
				raise
			binary.chmod(0o755)

		return binary


class DownloadableDocker(DownloadableExecutable):
	"""
	Download class for the Docker client binary
	"""

	URL = "https://download.docker.com/{kernel}/static/stable/{arch}/docker-{version}.tgz"
	LATEST_URL = "https://download.docker.com/{kernel}/static/stable/{arch}/"
	VERSION_RE = re.compile(rb'href="docker-(?P<release>[0-9.]+).tgz"')

	name = "docker"

	def get_latest(self, session: requests.Session) -> str:
		"""
		Return latest Docker release
		"""
		url = self.LATEST_URL.format(kernel=self.kernel, arch=self.arch)
		doc = session.get(url).content
		latest = max(self._extract_versions(doc))
		return str(latest)

	def get_stream(self, session: requests.Session, version: str) -> IO[bytes]:
		"""
		Return a stream that emits theDocker CLI binary
		"""
		url = self.URL.format(version=version, kernel=self.kernel, arch=self.arch)
		buf = BytesIO(session.get(url).content)
		tar = TarFile.gzopen("buffer", fileobj=buf)
		stream = tar.extractfile("docker/docker")
		if stream is None:
			raise FileNotFoundError(f"'docker/docker' in {url}")
		return stream

	@classmethod
	def _extract_versions(cls, doc: bytes) -> Iterator[Version]:
		for match in cls.VERSION_RE.finditer(doc):
			yield Version(match.group("release").decode())


class DownloadableKubeTools(DownloadableExecutable):
	"""
	Download class for the kubernetes binaries "kubectl", "kubelet" and "kubeadm"
	"""

	URL = "https://dl.k8s.io/release/{version}/bin/{kernel}/{arch}/{name}"
	LATEST_URL = "https://dl.k8s.io/release/stable.txt"

	def __init__(self, name: str, version: str = "latest"):
		DownloadableExecutable.__init__(self, version)
		self.name = name
		self._latest = ""

	def get_latest(self, session: requests.Session) -> str:
		"""
		Return that latest release of Kubernetes
		"""
		if not self._latest:
			self._latest = session.get(self.LATEST_URL).content.decode().strip()
		return self._latest

	def get_stream(self, session: requests.Session, version: str) -> IO[bytes]:
		"""
		Return a stream that emits the requested Kubernetes binary
		"""
		url = self.URL.format(version=version, kernel=self.kernel, arch=self.goarch, name=self.name)
		stream: IO[bytes] = session.get(url, stream=True).raw
		return stream


class DownloadableCrictl(DownloadableExecutable):
	"""
	Download class for the "crictl" binary
	"""

	URL = "https://github.com/kubernetes-sigs/cri-tools/releases/download/{version}/crictl-{version}-{kernel}-{arch}.tar.gz"
	LATEST_URL = "https://api.github.com/repos/kubernetes-sigs/cri-tools/releases/latest"

	name = "cri"

	def get_latest(self, session: requests.Session) -> str:
		"""
		Return the latest "crictl" release
		"""
		json = JSONObject.from_string(session.get(self.LATEST_URL).content)
		return json.path("$.name", str).replace("cri-tools ", "")

	def get_stream(self, session: requests.Session, version: str) -> IO[bytes]:
		"""
		Return a stream that emits the requested "crictl" binary
		"""
		url = self.URL.format(version=version, kernel=self.kernel, arch=self.goarch)
		buf = BytesIO(session.get(url).content)
		tar = TarFile.gzopen("buffer", fileobj=buf)
		stream = tar.extractfile("crictl")
		if stream is None:
			raise FileNotFoundError(f"'crictl' in {url}")
		return stream


class DownloadableKind(DownloadableExecutable):
	"""
	Download class for the "kind" (Kubernetes-in-Docker) binary
	"""

	URL = "https://kind.sigs.k8s.io/dl/{version}/kind-{kernel}-{arch}"
	LATEST_URL = "https://api.github.com/repos/kubernetes-sigs/kind/releases/latest"

	name = "kind"

	def get_latest(self, session: requests.Session) -> str:
		"""
		Return the latest Kind binary
		"""
		json = JSONObject.from_string(session.get(self.LATEST_URL).content)
		return json.path("$.name", str)

	def get_stream(self, session: requests.Session, version: str) -> IO[bytes]:
		"""
		Return a stream that emits the requested Kind binary
		"""
		url = self.URL.format(version=version, kernel=self.kernel, arch=self.goarch)
		stream: IO[bytes] = session.get(url, stream=True).raw
		return stream
Loading