update kubernetest 1.22

This commit is contained in:
xiafan 2023-06-29 14:40:40 +08:00
parent 626033bfd5
commit 38d4d046a6
50 changed files with 129 additions and 69 deletions

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -6,8 +6,8 @@
"url": "layer:options"
},
{
"branch": "refs/heads/stable",
"rev": "0d10732a6e14ea2f940a35ab61425a97c5db6a16",
"branch": "refs/heads/master\nrefs/heads/stable",
"rev": "a3ff62c32c993d80417f6e093e3ef95e42f62083",
"url": "layer:basic"
},
{
@ -42,6 +42,11 @@
"dynamic",
"unchecked"
],
".github/workflows/main.yml": [
"layer:basic",
"static",
"96a48a981ceb2a96f427a6b5226d2da6d7191981793804055d70a88ca1987473"
],
".gitignore": [
"kata",
"static",
@ -52,11 +57,6 @@
"static",
"714ed5453bd5a053676efb64370194a7c130f426ec11acba7d1509d558dc979c"
],
".travis/profile-update.yaml": [
"layer:basic",
"static",
"731e20aa59bf61c024d317ad630e478301a9386ccc0afe56e6c1c09db07ac83b"
],
"CONTRIBUTING.md": [
"kata",
"static",
@ -350,7 +350,7 @@
"lib/charms/layer/basic.py": [
"layer:basic",
"static",
"3126b5754ad39402ee27e64527044ddd231ed1cd137fcedaffb51e63a635f108"
"98b47134770ed6e4c0b2d4aad73cd5bc200bec84aa9c1c4e075fd70c3222a0c9"
],
"lib/charms/layer/execd.py": [
"layer:basic",
@ -452,30 +452,30 @@
"dynamic",
"cacecf0baa674d356641f1d406b8bff1d756d739c46b869a54de515d08e6fc9c"
],
"wheelhouse/certifi-2021.5.30.tar.gz": [
"wheelhouse/certifi-2021.10.8.tar.gz": [
"__pip__",
"dynamic",
"2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee"
"78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"
],
"wheelhouse/charmhelpers-0.20.22.tar.gz": [
"wheelhouse/charmhelpers-0.20.23.tar.gz": [
"layer:basic",
"dynamic",
"b7550108118ce4f87488343384441797777d0da746e1346ed4e6361b4eab0ddb"
"59a9776594e91cd3e3e000043f8668b4d7b279422dbb17e320f01dc16385b80e"
],
"wheelhouse/charms.reactive-1.4.1.tar.gz": [
"layer:basic",
"dynamic",
"bba21b4fd40b26c240c9ef2aa10c6fdf73592031c68591da4e7ccc46ca9cb616"
],
"wheelhouse/charset-normalizer-2.0.3.tar.gz": [
"wheelhouse/charset-normalizer-2.0.7.tar.gz": [
"__pip__",
"dynamic",
"c46c3ace2d744cfbdebceaa3c19ae691f53ae621b39fd7570f59d14fb7f2fd12"
"e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0"
],
"wheelhouse/idna-3.2.tar.gz": [
"wheelhouse/idna-3.3.tar.gz": [
"__pip__",
"dynamic",
"467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"
"9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"
],
"wheelhouse/netaddr-0.7.19.tar.gz": [
"layer:basic",
@ -492,10 +492,10 @@
"dynamic",
"c0a292bd977ef590379a3f05d7b7f65135487b67470f6281289a94e015650ea1"
],
"wheelhouse/pyaml-20.4.0.tar.gz": [
"wheelhouse/pyaml-21.10.1.tar.gz": [
"__pip__",
"dynamic",
"29a5c2a68660a799103d6949167bd6c7953d031449d08802386372de1db6ad71"
"c6519fee13bf06e3bb3f20cacdea8eba9140385a7c2546df5dbae4887f768383"
],
"wheelhouse/requests-2.26.0.tar.gz": [
"kata",
@ -517,10 +517,10 @@
"dynamic",
"1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"
],
"wheelhouse/urllib3-1.26.6.tar.gz": [
"wheelhouse/urllib3-1.26.7.tar.gz": [
"__pip__",
"dynamic",
"f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"
"4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"
],
"wheelhouse/wheel-0.33.6.tar.gz": [
"layer:basic",

50
kata/.github/workflows/main.yml vendored Normal file
View File

@ -0,0 +1,50 @@
name: Test Suite
on: [pull_request]
jobs:
lint:
name: Lint
runs-on: ubuntu-latest
strategy:
matrix:
python: [3.5, 3.6, 3.7, 3.8, 3.9]
steps:
- name: Check out code
uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python }}
- name: Install Dependencies
run: |
pip install tox
- name: Run lint
run: tox -e flake8
functional-test:
name: Functional test with LXD
runs-on: ubuntu-latest
timeout-minutes: 360
steps:
- name: Check out code
uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install Dependencies
run: |
pip install tox
- name: Setup operator environment
uses: charmed-kubernetes/actions-operator@master
- name: Run test
run: tox -e func
- name: Show Status
if: ${{ always() }}
run: |
model=$(juju models --format yaml|grep "^- name:.*zaza"|cut -f2 -d/);
juju status -m "$model"
- name: Show Error Logs
if: ${{ always() }}
run: |
model=$(juju models --format yaml|grep "^- name:.*zaza"|cut -f2 -d/);
juju debug-log -m "$model" --replay --no-tail --level ERROR

View File

@ -1,12 +0,0 @@
config: {}
description: Default LXD profile - updated
devices:
eth0:
name: eth0
parent: lxdbr0
nictype: bridged
type: nic
root:
path: /
pool: default
type: disk

View File

@ -199,7 +199,13 @@ def bootstrap_charm_deps():
# a set so that we can ignore the pre-install packages and let pip
# choose the best version in case there are multiple from layer
# conflicts)
pkgs = _load_wheelhouse_versions().keys() - set(pre_install_pkgs)
_versions = _load_wheelhouse_versions()
_pkgs = _versions.keys() - set(pre_install_pkgs)
# add back the versions such that each package in pkgs is
# <package_name>==<version>.
# This ensures that pip 20.3.4+ will install the packages from the
# wheelhouse without (erroneously) flagging an error.
pkgs = _add_back_versions(_pkgs, _versions)
reinstall_flag = '--force-reinstall'
if not cfg.get('use_venv', True) and pre_eoan:
reinstall_flag = '--ignore-installed'
@ -278,6 +284,55 @@ def _load_wheelhouse_versions():
return versions
def _add_back_versions(pkgs, versions):
"""Add back the version strings to each of the packages.
The versions are LooseVersion() from _load_wheelhouse_versions(). This
function strips the ".zip" or ".tar.gz" from the end of the version string
and adds it back to the package in the form of <package_name>==<version>
If a package name is not a key in the versions dictionary, then it is
returned in the list unchanged.
:param pkgs: A list of package names
:type pkgs: List[str]
:param versions: A map of package to LooseVersion
:type versions: Dict[str, LooseVersion]
:returns: A list of (maybe) versioned packages
:rtype: List[str]
"""
def _strip_ext(s):
"""Strip an extension (if it exists) from the string
:param s: the string to strip an extension off if it exists
:type s: str
:returns: string without an extension of .zip or .tar.gz
:rtype: str
"""
for ending in [".zip", ".tar.gz"]:
if s.endswith(ending):
return s[:-len(ending)]
return s
def _maybe_add_version(pkg):
"""Maybe add back the version number to a package if it exists.
Adds the version number, if the package exists in the lexically
captured `versions` dictionary, in the form <pkg>==<version>. Strips
the extension if it exists.
:param pkg: the package name to (maybe) add the version number to.
:type pkg: str
"""
try:
return "{}=={}".format(pkg, _strip_ext(str(versions[pkg])))
except KeyError:
pass
return pkg
return [_maybe_add_version(pkg) for pkg in pkgs]
def _update_if_newer(pip, pkgs):
installed = _load_installed_versions(pip)
wheelhouse = _load_wheelhouse_versions()

View File

@ -60,13 +60,13 @@ def install_kata():
if not archive or os.path.getsize(archive) == 0:
status.maintenance('Installing Kata via apt')
gpg_key = requests.get(
'http://ftp.lysator.liu.se/pub/opensuse/repositories/home:/katacontainers:/'
'http://download.opensuse.org/repositories/home:/katacontainers:/'
'releases:/{}:/master/x{}/Release.key'.format(arch, release)).text
import_key(gpg_key)
with open('/etc/apt/sources.list.d/kata-containers.list', 'w') as f:
f.write(
'deb http://ftp.lysator.liu.se/pub/opensuse/repositories/home:/'
'deb http://download.opensuse.org/repositories/home:/'
'katacontainers:/releases:/{}:/master/x{}/ /'
.format(arch, release)
)

View File

@ -1 +1 @@
0ea81f0c
ccfa68be

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -170,17 +170,6 @@ def get_ingress_address(endpoint_name, ignore_addresses=None):
# doesn't support spaces, so just return the private address
return hookenv.unit_get("private-address")
excluded_ips = []
excluded_interfaces = ["vxlan", "kube", "wg", "docker", "cali", "virbr", "cni", "flannel"]
for addr in network_info["bind-addresses"]:
for prefix in excluded_interfaces:
if addr["interface-name"].startswith(prefix):
for ip in addr["addresses"]:
excluded_ips.append(ip["value"])
ingress_addresses = network_info["ingress-addresses"]
network_info["ingress-addresses"] = [ip for ip in ingress_addresses if ip not in excluded_ips]
addresses = network_info["ingress-addresses"]
if ignore_addresses:

View File

@ -170,17 +170,6 @@ def get_ingress_address(endpoint_name, ignore_addresses=None):
# doesn't support spaces, so just return the private address
return hookenv.unit_get("private-address")
excluded_ips = []
excluded_interfaces = ["vxlan", "kube", "wg", "docker", "cali", "virbr", "cni", "flannel"]
for addr in network_info["bind-addresses"]:
for prefix in excluded_interfaces:
if addr["interface-name"].startswith(prefix):
for ip in addr["addresses"]:
excluded_ips.append(ip["value"])
ingress_addresses = network_info["ingress-addresses"]
network_info["ingress-addresses"] = [ip for ip in ingress_addresses if ip not in excluded_ips]
addresses = network_info["ingress-addresses"]
if ignore_addresses:

View File

@ -170,17 +170,6 @@ def get_ingress_address(endpoint_name, ignore_addresses=None):
# doesn't support spaces, so just return the private address
return hookenv.unit_get("private-address")
excluded_ips = []
excluded_interfaces = ["vxlan", "kube", "wg", "docker", "cali", "virbr", "cni", "flannel"]
for addr in network_info["bind-addresses"]:
for prefix in excluded_interfaces:
if addr["interface-name"].startswith(prefix):
for ip in addr["addresses"]:
excluded_ips.append(ip["value"])
ingress_addresses = network_info["ingress-addresses"]
network_info["ingress-addresses"] = [ip for ip in ingress_addresses if ip not in excluded_ips]
addresses = network_info["ingress-addresses"]
if ignore_addresses: