update
This commit is contained in:
parent
61f9bed181
commit
8c724b560b
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -0,0 +1,681 @@
|
|||
{
|
||||
"layers": [
|
||||
{
|
||||
"branch": "refs/heads/master",
|
||||
"rev": "fcdcea4e5de3e1556c24e6704607862d0ba00a56",
|
||||
"url": "layer:options"
|
||||
},
|
||||
{
|
||||
"branch": "refs/heads/master",
|
||||
"rev": "a3ff62c32c993d80417f6e093e3ef95e42f62083",
|
||||
"url": "layer:basic"
|
||||
},
|
||||
{
|
||||
"branch": "refs/heads/master",
|
||||
"rev": "527dd64fc4b9a6b0f8d80a3c2c0b865155050275",
|
||||
"url": "layer:debug"
|
||||
},
|
||||
{
|
||||
"branch": "refs/heads/master",
|
||||
"rev": "47dfcd4920ef6317850a4837ef0057ab0092a18e",
|
||||
"url": "layer:nagios"
|
||||
},
|
||||
{
|
||||
"branch": "refs/heads/master",
|
||||
"rev": "a7d7b6423db37a47611310039e6ed1929c0a2eab",
|
||||
"url": "layer:status"
|
||||
},
|
||||
{
|
||||
"branch": "refs/heads/master",
|
||||
"rev": "bbeabfee52c4442cdaf3a34e5e35530a3bd71156",
|
||||
"url": "layer:kubernetes-common"
|
||||
},
|
||||
{
|
||||
"branch": "refs/heads/master",
|
||||
"rev": "a0b41eeb5837bc087a7c0d32b8e23682566cb2ad",
|
||||
"url": "flannel"
|
||||
},
|
||||
{
|
||||
"branch": "refs/heads/master",
|
||||
"rev": "44f244cbd08b86bf2b68bd71c3fb34c7c070c382",
|
||||
"url": "interface:etcd"
|
||||
},
|
||||
{
|
||||
"branch": "refs/heads/master",
|
||||
"rev": "88b1e8fad78d06efdbf512cd75eaa0bb308eb1c1",
|
||||
"url": "interface:kubernetes-cni"
|
||||
},
|
||||
{
|
||||
"branch": "refs/heads/master",
|
||||
"rev": "2e0e1fdea6d83b55078200aacb537d60013ec5bc",
|
||||
"url": "interface:nrpe-external-master"
|
||||
}
|
||||
],
|
||||
"signatures": {
|
||||
".build.manifest": [
|
||||
"build",
|
||||
"dynamic",
|
||||
"unchecked"
|
||||
],
|
||||
".github/workflows/main.yml": [
|
||||
"layer:kubernetes-common",
|
||||
"static",
|
||||
"d4f8fec0456cb2fc05993253a995983488a76fbbef10c2ee40649e83d6c9e078"
|
||||
],
|
||||
".github/workflows/tests.yaml": [
|
||||
"flannel",
|
||||
"static",
|
||||
"5476786d9ace5356136858f2cfcfcf8dcfdf2add3be89a0de7175d5c726203ff"
|
||||
],
|
||||
".gitignore": [
|
||||
"flannel",
|
||||
"static",
|
||||
"eec008c35119baa5e06882e52f99a510b5773931f1ca829a80d99e8ca751669f"
|
||||
],
|
||||
".travis.yml": [
|
||||
"flannel",
|
||||
"static",
|
||||
"c2bd1b88f26c88b883696cca155c28671359a256ed48b90a9ea724b376f2a829"
|
||||
],
|
||||
"CONTRIBUTING.md": [
|
||||
"flannel",
|
||||
"static",
|
||||
"1e1138fc9658719db34ae11a62f017b6a02bad466011f306cd62667c9c49fdd7"
|
||||
],
|
||||
"LICENSE": [
|
||||
"flannel",
|
||||
"static",
|
||||
"58d1e17ffe5109a7ae296caafcadfdbe6a7d176f0bc4ab01e12a689b0499d8bd"
|
||||
],
|
||||
"Makefile": [
|
||||
"layer:basic",
|
||||
"static",
|
||||
"b7ab3a34e5faf79b96a8632039a0ad0aa87f2a9b5f0ba604e007cafb22190301"
|
||||
],
|
||||
"README.md": [
|
||||
"flannel",
|
||||
"static",
|
||||
"365e1cde559f36067414a90405953571c74613697de8ff8d9d8b2ff0ffb0d3db"
|
||||
],
|
||||
"actions.yaml": [
|
||||
"layer:debug",
|
||||
"dynamic",
|
||||
"cea290e28bc78458ea4a56dcad39b9a880c67e4ba53b774ac46bd8778618c7b9"
|
||||
],
|
||||
"actions/debug": [
|
||||
"layer:debug",
|
||||
"static",
|
||||
"db0a42dae4c5045b2c06385bf22209dfe0e2ded55822ef847d84b01d9ff2b046"
|
||||
],
|
||||
"bin/charm-env": [
|
||||
"layer:basic",
|
||||
"static",
|
||||
"fb6a20fac4102a6a4b6ffe903fcf666998f9a95a3647e6f9af7a1eeb44e58fd5"
|
||||
],
|
||||
"bin/layer_option": [
|
||||
"layer:options",
|
||||
"static",
|
||||
"e959bf29da4c5edff28b2602c24113c4df9e25cdc9f2aa3b5d46c8577b2a40cc"
|
||||
],
|
||||
"build-flannel-resources.sh": [
|
||||
"flannel",
|
||||
"static",
|
||||
"995fe25171d34a787cef1189d8df5e1f3575041a6f89162ec928d56f60b5917d"
|
||||
],
|
||||
"config.yaml": [
|
||||
"flannel",
|
||||
"dynamic",
|
||||
"56168ff734eedffe5b838c2f60fc797fb4f247c3a734549885b474ddf0c71423"
|
||||
],
|
||||
"copyright": [
|
||||
"flannel",
|
||||
"static",
|
||||
"9c53958dbdcd6526c71fbe4d6eb5c1d03980e39b1e4259525dea16e91f00d68e"
|
||||
],
|
||||
"copyright.layer-basic": [
|
||||
"layer:basic",
|
||||
"static",
|
||||
"f6740d66fd60b60f2533d9fcb53907078d1e20920a0219afce7182e2a1c97629"
|
||||
],
|
||||
"copyright.layer-nagios": [
|
||||
"layer:nagios",
|
||||
"static",
|
||||
"47b2363574909e748bcc471d9004780ac084b301c154905654b5b6f088474749"
|
||||
],
|
||||
"copyright.layer-options": [
|
||||
"layer:options",
|
||||
"static",
|
||||
"f6740d66fd60b60f2533d9fcb53907078d1e20920a0219afce7182e2a1c97629"
|
||||
],
|
||||
"copyright.layer-status": [
|
||||
"layer:status",
|
||||
"static",
|
||||
"7c0e36e618a8544faaaa3f8e0533c2f1f4a18bcacbdd8b99b537742e6b587d58"
|
||||
],
|
||||
"debug-scripts/charm-unitdata": [
|
||||
"layer:debug",
|
||||
"static",
|
||||
"c952b9d31f3942e4e722cb3e70f5119707b69b8e76cc44e2e906bc6d9aef49b7"
|
||||
],
|
||||
"debug-scripts/filesystem": [
|
||||
"layer:debug",
|
||||
"static",
|
||||
"d29cc8687f4422d024001c91b1ac756ee6bf8a2a125bc98db1199ba775eb8fd7"
|
||||
],
|
||||
"debug-scripts/juju-logs": [
|
||||
"layer:debug",
|
||||
"static",
|
||||
"d260b35753a917368cb8c64c1312546a0a40ef49cba84c75bc6369549807c55e"
|
||||
],
|
||||
"debug-scripts/juju-network-get": [
|
||||
"layer:debug",
|
||||
"static",
|
||||
"6d849a1f8e6569bd0d5ea38299f7937cb8b36a5f505e3532f6c756eabeb8b6c5"
|
||||
],
|
||||
"debug-scripts/network": [
|
||||
"layer:debug",
|
||||
"static",
|
||||
"714afae5dcb45554ff1f05285501e3b7fcc656c8de51217e263b93dab25a9d2e"
|
||||
],
|
||||
"debug-scripts/packages": [
|
||||
"layer:debug",
|
||||
"static",
|
||||
"e8177102dc2ca853cb9272c1257cf2cfd5253d2a074e602d07c8bc4ea8e27c75"
|
||||
],
|
||||
"debug-scripts/sysctl": [
|
||||
"layer:debug",
|
||||
"static",
|
||||
"990035b320e09cc2228e1f2f880e795d51118b2959339eacddff9cbb74349c6a"
|
||||
],
|
||||
"debug-scripts/systemd": [
|
||||
"layer:debug",
|
||||
"static",
|
||||
"23ddf533198bf5b1ce723acde31ada806aab8539292b514c721d8ec08af74106"
|
||||
],
|
||||
"docs/status.md": [
|
||||
"layer:status",
|
||||
"static",
|
||||
"975dec9f8c938196e102e954a80226bda293407c4e5ae857c118bf692154702a"
|
||||
],
|
||||
"hooks/cni-relation-broken": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/cni-relation-changed": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/cni-relation-created": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/cni-relation-departed": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/cni-relation-joined": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/config-changed": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/etcd-relation-broken": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/etcd-relation-changed": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/etcd-relation-created": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/etcd-relation-departed": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/etcd-relation-joined": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/hook.template": [
|
||||
"layer:basic",
|
||||
"static",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/install": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/leader-elected": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/leader-settings-changed": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/nrpe-external-master-relation-broken": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/nrpe-external-master-relation-changed": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/nrpe-external-master-relation-created": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/nrpe-external-master-relation-departed": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/nrpe-external-master-relation-joined": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/post-series-upgrade": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/pre-series-upgrade": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/relations/etcd/.gitignore": [
|
||||
"interface:etcd",
|
||||
"static",
|
||||
"cf237c7aff44efbe6e502e645c3e06da03a69d7bdeb43392108ef3348143417e"
|
||||
],
|
||||
"hooks/relations/etcd/README.md": [
|
||||
"interface:etcd",
|
||||
"static",
|
||||
"93873d073f5f5302d352e09321aaf87458556e9730f89e1c682699c1d0db2386"
|
||||
],
|
||||
"hooks/relations/etcd/__init__.py": [
|
||||
"interface:etcd",
|
||||
"static",
|
||||
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
||||
],
|
||||
"hooks/relations/etcd/interface.yaml": [
|
||||
"interface:etcd",
|
||||
"static",
|
||||
"ba9f723b57a434f7efb2c06abec4167cd412c16da5f496a477dd7691e9a715be"
|
||||
],
|
||||
"hooks/relations/etcd/peers.py": [
|
||||
"interface:etcd",
|
||||
"static",
|
||||
"99419c3d139fb5bb90021e0482f9e7ac2cfb776fb7af79b46209c6a75b36e834"
|
||||
],
|
||||
"hooks/relations/etcd/provides.py": [
|
||||
"interface:etcd",
|
||||
"static",
|
||||
"3db1f644ab669e2dec59d59b61de63b721bc05b38fe646e525fff8f0d60982f9"
|
||||
],
|
||||
"hooks/relations/etcd/requires.py": [
|
||||
"interface:etcd",
|
||||
"static",
|
||||
"8ffc1a094807fd36a1d1428b0a07b2428074134d46086066ecd6c0acd9fcd13e"
|
||||
],
|
||||
"hooks/relations/kubernetes-cni/.github/workflows/tests.yaml": [
|
||||
"interface:kubernetes-cni",
|
||||
"static",
|
||||
"d0015cd49675976ff87832f5ef7ea20ffca961786379c72bb6acdbdeddd9137c"
|
||||
],
|
||||
"hooks/relations/kubernetes-cni/.gitignore": [
|
||||
"interface:kubernetes-cni",
|
||||
"static",
|
||||
"0594213ebf9c6ef87827b30405ee67d847f73f4185a865e0e5e9c0be9d29eabe"
|
||||
],
|
||||
"hooks/relations/kubernetes-cni/README.md": [
|
||||
"interface:kubernetes-cni",
|
||||
"static",
|
||||
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
||||
],
|
||||
"hooks/relations/kubernetes-cni/__init__.py": [
|
||||
"interface:kubernetes-cni",
|
||||
"static",
|
||||
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
||||
],
|
||||
"hooks/relations/kubernetes-cni/interface.yaml": [
|
||||
"interface:kubernetes-cni",
|
||||
"static",
|
||||
"03affdaf7e879adfdf8c434aa31d40faa6d2872faa7dfd93a5d3a1ebae02487d"
|
||||
],
|
||||
"hooks/relations/kubernetes-cni/provides.py": [
|
||||
"interface:kubernetes-cni",
|
||||
"static",
|
||||
"e436e187f2bab6e73add2b897cd43a2f000fde4726e40b772b66f27786c85dee"
|
||||
],
|
||||
"hooks/relations/kubernetes-cni/requires.py": [
|
||||
"interface:kubernetes-cni",
|
||||
"static",
|
||||
"45398af27246eaf2005115bd3f270b78fc830d4345b02cc0c4d438711b7cd9fe"
|
||||
],
|
||||
"hooks/relations/kubernetes-cni/tox.ini": [
|
||||
"interface:kubernetes-cni",
|
||||
"static",
|
||||
"f08626c9b65362031edb07f96f15f101bc3dda075abc64f54d1c83efd2c05e39"
|
||||
],
|
||||
"hooks/relations/nrpe-external-master/README.md": [
|
||||
"interface:nrpe-external-master",
|
||||
"static",
|
||||
"d8ed3bc7334f6581b12b6091923f58e6f5ef62075a095a4e78fb8f434a948636"
|
||||
],
|
||||
"hooks/relations/nrpe-external-master/__init__.py": [
|
||||
"interface:nrpe-external-master",
|
||||
"static",
|
||||
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
||||
],
|
||||
"hooks/relations/nrpe-external-master/interface.yaml": [
|
||||
"interface:nrpe-external-master",
|
||||
"static",
|
||||
"894f24ba56148044dae5b7febf874b427d199239bcbe1f2f55c3db06bb77b5f0"
|
||||
],
|
||||
"hooks/relations/nrpe-external-master/provides.py": [
|
||||
"interface:nrpe-external-master",
|
||||
"static",
|
||||
"e6ba708d05b227b139a86be59c83ed95a2bad030bc81e5819167ba5e1e67ecd4"
|
||||
],
|
||||
"hooks/relations/nrpe-external-master/requires.py": [
|
||||
"interface:nrpe-external-master",
|
||||
"static",
|
||||
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
||||
],
|
||||
"hooks/start": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/stop": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/update-status": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"hooks/upgrade-charm": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"2b693cb2a11594a80cc91235c2dc219a0a6303ae62bee8aa87eb35781f7158f7"
|
||||
],
|
||||
"icon.svg": [
|
||||
"flannel",
|
||||
"static",
|
||||
"bb6bcf05faa5952b889c356c9ffca6fd5082657efac85626713249ae218f763b"
|
||||
],
|
||||
"layer.yaml": [
|
||||
"flannel",
|
||||
"dynamic",
|
||||
"3e018cc6317096a1482ca753551a00c05e8ead7c2ab61809e740ab84f9ac0e3d"
|
||||
],
|
||||
"lib/charms/flannel/common.py": [
|
||||
"flannel",
|
||||
"static",
|
||||
"e6f58d426cf7547eb9ab2169bea3628f048513ca77c7f9dfea50d8b452ec0e9f"
|
||||
],
|
||||
"lib/charms/layer/__init__.py": [
|
||||
"layer:basic",
|
||||
"static",
|
||||
"dfe0d26c6bf409767de6e2546bc648f150e1b396243619bad3aa0553ab7e0e6f"
|
||||
],
|
||||
"lib/charms/layer/basic.py": [
|
||||
"layer:basic",
|
||||
"static",
|
||||
"98b47134770ed6e4c0b2d4aad73cd5bc200bec84aa9c1c4e075fd70c3222a0c9"
|
||||
],
|
||||
"lib/charms/layer/execd.py": [
|
||||
"layer:basic",
|
||||
"static",
|
||||
"fda8bd491032db1db8ddaf4e99e7cc878c6fb5432efe1f91cadb5b34765d076d"
|
||||
],
|
||||
"lib/charms/layer/kubernetes_common.py": [
|
||||
"layer:kubernetes-common",
|
||||
"static",
|
||||
"29cedffd490e6295273d195a7c9bace2fcdf149826e7427f2af9698f7f75055b"
|
||||
],
|
||||
"lib/charms/layer/nagios.py": [
|
||||
"layer:nagios",
|
||||
"static",
|
||||
"0246710bdbea844356007a64409907d93e6e94a289d83266e8b7c5d921fb3a6c"
|
||||
],
|
||||
"lib/charms/layer/options.py": [
|
||||
"layer:options",
|
||||
"static",
|
||||
"8ae7a07d22542fc964f2d2bee8219d1c78a68dace70a1b38d36d4aea47b1c3b2"
|
||||
],
|
||||
"lib/charms/layer/status.py": [
|
||||
"layer:status",
|
||||
"static",
|
||||
"d560a5e07b2e5f2b0f25f30e1f0278b06f3f90c01e4dbad5c83d71efc79018c6"
|
||||
],
|
||||
"lib/debug_script.py": [
|
||||
"layer:debug",
|
||||
"static",
|
||||
"a4d56f2d3e712b1b5cadb657c7195c6268d0aac6d228991049fd769e0ddaf453"
|
||||
],
|
||||
"make_docs": [
|
||||
"layer:status",
|
||||
"static",
|
||||
"c990f55c8e879793a62ed8464ee3d7e0d7d2225fdecaf17af24b0df0e2daa8c1"
|
||||
],
|
||||
"metadata.yaml": [
|
||||
"flannel",
|
||||
"dynamic",
|
||||
"009fb9e888c9b434913f153901ef4d419d56b8d94e3a1ca241e1417f48a3c822"
|
||||
],
|
||||
"pydocmd.yml": [
|
||||
"layer:status",
|
||||
"static",
|
||||
"11d9293901f32f75f4256ae4ac2073b92ce1d7ef7b6c892ba9fbb98690a0b330"
|
||||
],
|
||||
"reactive/__init__.py": [
|
||||
"layer:basic",
|
||||
"static",
|
||||
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
||||
],
|
||||
"reactive/flannel.py": [
|
||||
"flannel",
|
||||
"static",
|
||||
"a13f33c694500f7bd00265f9db82492b2009e469295f9dca706dbb939702d795"
|
||||
],
|
||||
"reactive/status.py": [
|
||||
"layer:status",
|
||||
"static",
|
||||
"30207fc206f24e91def5252f1c7f7c8e23c0aed0e93076babf5e03c05296d207"
|
||||
],
|
||||
"requirements.txt": [
|
||||
"layer:basic",
|
||||
"static",
|
||||
"a00f75d80849e5b4fc5ad2e7536f947c25b1a4044b341caa8ee87a92d3a4c804"
|
||||
],
|
||||
"templates/10-flannel.conflist": [
|
||||
"flannel",
|
||||
"static",
|
||||
"257223dfc7fde23c0adb75f21484cdb4f35dfc2b34bd905f09931dff8038c651"
|
||||
],
|
||||
"templates/cdk.auth-webhook-secret.yaml": [
|
||||
"layer:kubernetes-common",
|
||||
"static",
|
||||
"efaf34c12a5c961fa7843199070945ba05717b3656a0f3acc3327f45334bcaec"
|
||||
],
|
||||
"templates/flannel.service": [
|
||||
"flannel",
|
||||
"static",
|
||||
"c22a91a5da6db0079717143ae95d4bbe95734c9d04f87d12ddd6ae1e3a5d9bd7"
|
||||
],
|
||||
"tests/data/bundle.yaml": [
|
||||
"flannel",
|
||||
"static",
|
||||
"ff7247c127db371fa12d510ab470a0d82070e62e2a7087e3cc84021e9c6a0a5a"
|
||||
],
|
||||
"tests/functional/conftest.py": [
|
||||
"layer:kubernetes-common",
|
||||
"static",
|
||||
"fd53e0c38b4dda0c18096167889cd0d85b98b0a13225f9f8853261241e94078c"
|
||||
],
|
||||
"tests/functional/test_k8s_common.py": [
|
||||
"layer:kubernetes-common",
|
||||
"static",
|
||||
"680a53724154771dd78422bbaf24b151788d86dd07960712c5d9e0d758499b50"
|
||||
],
|
||||
"tests/integration/conftest.py": [
|
||||
"flannel",
|
||||
"static",
|
||||
"92e2e5f765bbc9b6b6f394bac2899878b5e3e78615692dcd6fef218381ef8f20"
|
||||
],
|
||||
"tests/integration/test_flannel_integration.py": [
|
||||
"flannel",
|
||||
"static",
|
||||
"841fc0d23642fa78e623dc1ceb6765676144205f981d7d3d384acaf8203ee6ef"
|
||||
],
|
||||
"tests/unit/conftest.py": [
|
||||
"flannel",
|
||||
"static",
|
||||
"fd53e0c38b4dda0c18096167889cd0d85b98b0a13225f9f8853261241e94078c"
|
||||
],
|
||||
"tests/unit/test_flannel.py": [
|
||||
"flannel",
|
||||
"static",
|
||||
"a017d5b4edb16c9e94a0b017905b7ff74f953298bab0fb5a38d4bdaa3090c230"
|
||||
],
|
||||
"tests/unit/test_k8s_common.py": [
|
||||
"layer:kubernetes-common",
|
||||
"static",
|
||||
"da9bcea8e75160311a4055c1cbf577b497ddd45dc00223c5f1667598f94d9be4"
|
||||
],
|
||||
"tox.ini": [
|
||||
"flannel",
|
||||
"static",
|
||||
"3c97b60f08edb8f03cddc1779cc8f57472169f0170dd5a0c98169c0b9953bab6"
|
||||
],
|
||||
"version": [
|
||||
"flannel",
|
||||
"dynamic",
|
||||
"ee92bae3de0e84508e2008c42996c64f7c7728c2eafcb21d2efa1b534b1e2939"
|
||||
],
|
||||
"wheelhouse.txt": [
|
||||
"flannel",
|
||||
"dynamic",
|
||||
"c02d05375f2be2cb514cab90f7ef4e9b688e372cc42d3f29bf4e0a9ad27be62f"
|
||||
],
|
||||
"wheelhouse/Jinja2-2.10.1.tar.gz": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013"
|
||||
],
|
||||
"wheelhouse/MarkupSafe-1.1.1.tar.gz": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"
|
||||
],
|
||||
"wheelhouse/PyYAML-5.2.tar.gz": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"c0ee8eca2c582d29c3c2ec6e2c4f703d1b7f1fb10bc72317355a746057e7346c"
|
||||
],
|
||||
"wheelhouse/Tempita-0.5.2.tar.gz": [
|
||||
"__pip__",
|
||||
"dynamic",
|
||||
"cacecf0baa674d356641f1d406b8bff1d756d739c46b869a54de515d08e6fc9c"
|
||||
],
|
||||
"wheelhouse/charmhelpers-0.20.23.tar.gz": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"59a9776594e91cd3e3e000043f8668b4d7b279422dbb17e320f01dc16385b80e"
|
||||
],
|
||||
"wheelhouse/charms.reactive-1.4.1.tar.gz": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"bba21b4fd40b26c240c9ef2aa10c6fdf73592031c68591da4e7ccc46ca9cb616"
|
||||
],
|
||||
"wheelhouse/charms.templating.jinja2-1.0.2.tar.gz": [
|
||||
"flannel",
|
||||
"dynamic",
|
||||
"8193c6a1d40bdb66fe272c359b4e4780501c658acfaf2b1118c4230927815fe2"
|
||||
],
|
||||
"wheelhouse/dnspython-1.16.0.zip": [
|
||||
"flannel",
|
||||
"dynamic",
|
||||
"36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01"
|
||||
],
|
||||
"wheelhouse/netaddr-0.7.19.tar.gz": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"38aeec7cdd035081d3a4c306394b19d677623bf76fa0913f6695127c7753aefd"
|
||||
],
|
||||
"wheelhouse/pbr-5.6.0.tar.gz": [
|
||||
"__pip__",
|
||||
"dynamic",
|
||||
"42df03e7797b796625b1029c0400279c7c34fd7df24a7d7818a1abb5b38710dd"
|
||||
],
|
||||
"wheelhouse/pip-18.1.tar.gz": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"c0a292bd977ef590379a3f05d7b7f65135487b67470f6281289a94e015650ea1"
|
||||
],
|
||||
"wheelhouse/pyaml-21.10.1.tar.gz": [
|
||||
"__pip__",
|
||||
"dynamic",
|
||||
"c6519fee13bf06e3bb3f20cacdea8eba9140385a7c2546df5dbae4887f768383"
|
||||
],
|
||||
"wheelhouse/python-etcd-0.4.5.tar.gz": [
|
||||
"__pip__",
|
||||
"dynamic",
|
||||
"f1b5ebb825a3e8190494f5ce1509fde9069f2754838ed90402a8c11e1f52b8cb"
|
||||
],
|
||||
"wheelhouse/setuptools-41.6.0.zip": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"6afa61b391dcd16cb8890ec9f66cc4015a8a31a6e1c2b4e0c464514be1a3d722"
|
||||
],
|
||||
"wheelhouse/setuptools_scm-1.17.0.tar.gz": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"70a4cf5584e966ae92f54a764e6437af992ba42ac4bca7eb37cc5d02b98ec40a"
|
||||
],
|
||||
"wheelhouse/six-1.16.0.tar.gz": [
|
||||
"__pip__",
|
||||
"dynamic",
|
||||
"1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"
|
||||
],
|
||||
"wheelhouse/urllib3-1.26.7.tar.gz": [
|
||||
"__pip__",
|
||||
"dynamic",
|
||||
"4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"
|
||||
],
|
||||
"wheelhouse/wheel-0.33.6.tar.gz": [
|
||||
"layer:basic",
|
||||
"dynamic",
|
||||
"10c9da68765315ed98850f8e048347c3eb06dd81822dc2ab1d4fde9dc9702646"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
name: Test Suite
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: Lint, Unit, & Func Tests
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python: [3.6, 3.7, 3.8, 3.9]
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
pip install tox
|
||||
- name: Run lint
|
||||
run: tox
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
name: Run tests with Tox
|
||||
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
unit-tests:
|
||||
name: Lint, Unit Tests
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python: [3.5, 3.6, 3.7, 3.8, 3.9]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
- name: Install Tox
|
||||
run: pip install tox
|
||||
- name: Run Tox
|
||||
run: tox # Run tox using the version of Python in `PATH`
|
||||
|
||||
integration-tests:
|
||||
name: Integration test with LXD
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Setup operator environment
|
||||
uses: charmed-kubernetes/actions-operator@master
|
||||
with:
|
||||
provider: lxd
|
||||
- name: Install docker
|
||||
run: sudo snap install docker
|
||||
- name: Build flannel resources
|
||||
run: ARCH=amd64 sudo ./build-flannel-resources.sh
|
||||
- name: Run integration test
|
||||
run: tox -e integration
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
.tox/
|
||||
__pycache__/
|
||||
*.pyc
|
||||
*.tar.gz
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
language: python
|
||||
python:
|
||||
- "3.5"
|
||||
- "3.6"
|
||||
- "3.7"
|
||||
install:
|
||||
- pip install tox-travis
|
||||
script:
|
||||
- tox
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
# Contributor Guide
|
||||
|
||||
This Juju charm is open source ([Apache License 2.0](./LICENSE)) and we actively seek any community contibutions
|
||||
for code, suggestions and documentation.
|
||||
This page details a few notes, workflows and suggestions for how to make contributions most effective and help us
|
||||
all build a better charm - please give them a read before working on any contributions.
|
||||
|
||||
## Licensing
|
||||
|
||||
This charm has been created under the [Apache License 2.0](./LICENSE), which will cover any contributions you may
|
||||
make to this project. Please familiarise yourself with the terms of the license.
|
||||
|
||||
Additionally, this charm uses the Harmony CLA agreement. It’s the easiest way for you to give us permission to
|
||||
use your contributions.
|
||||
In effect, you’re giving us a license, but you still own the copyright — so you retain the right to modify your
|
||||
code and use it in other projects. Please [sign the CLA here](https://ubuntu.com/legal/contributors/agreement) before
|
||||
making any contributions.
|
||||
|
||||
## Code of conduct
|
||||
|
||||
We have adopted the Ubuntu code of Conduct. You can read this in full [here](https://ubuntu.com/community/code-of-conduct).
|
||||
|
||||
## Contributing code
|
||||
|
||||
To contribute code to this project, please use the following workflow:
|
||||
|
||||
1. [Submit a bug](https://bugs.launchpad.net/charm-flannel/+filebug) to explain the need for and track the change.
|
||||
2. Create a branch on your fork of the repo with your changes, including a unit test covering the new or modified code.
|
||||
3. Submit a PR. The PR description should include a link to the bug on Launchpad.
|
||||
4. Update the Launchpad bug to include a link to the PR and the `review-needed` tag.
|
||||
5. Once reviewed and merged, the change will become available on the edge channel and assigned to an appropriate milestone
|
||||
for further release according to priority.
|
||||
|
||||
## Documentation
|
||||
|
||||
Documentation for this charm is currently maintained as part of the Charmed Kubernetes docs.
|
||||
See [this page](https://github.com/charmed-kubernetes/kubernetes-docs/blob/master/pages/k8s/charm-flannel.md)
|
||||
|
|
@ -0,0 +1,202 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
#!/usr/bin/make
|
||||
|
||||
all: lint unit_test
|
||||
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
@rm -rf .tox
|
||||
|
||||
.PHONY: apt_prereqs
|
||||
apt_prereqs:
|
||||
@# Need tox, but don't install the apt version unless we have to (don't want to conflict with pip)
|
||||
@which tox >/dev/null || (sudo apt-get install -y python-pip && sudo pip install tox)
|
||||
|
||||
.PHONY: lint
|
||||
lint: apt_prereqs
|
||||
@tox --notest
|
||||
@PATH=.tox/py34/bin:.tox/py35/bin flake8 $(wildcard hooks reactive lib unit_tests tests)
|
||||
@charm proof
|
||||
|
||||
.PHONY: unit_test
|
||||
unit_test: apt_prereqs
|
||||
@echo Starting tests...
|
||||
tox
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
# Flannel Charm
|
||||
|
||||
Flannel is a virtual network that gives a subnet to each host for use with
|
||||
container runtimes.
|
||||
|
||||
This charm will deploy flannel as a background service, and configure CNI for
|
||||
use with flannel, on any principal charm that implements the
|
||||
[`kubernetes-cni`](https://github.com/juju-solutions/interface-kubernetes-cni) interface.
|
||||
|
||||
This charm is maintained along with the components of Charmed Kubernetes. For full information,
|
||||
please visit the [official Charmed Kubernetes docs](https://www.ubuntu.com/kubernetes/docs/charm-flannel).
|
||||
|
||||
# Developers
|
||||
|
||||
## Building the charm
|
||||
|
||||
```
|
||||
charm build -o <build-dir>
|
||||
```
|
||||
|
||||
## Building the flannel resources
|
||||
|
||||
```
|
||||
./build-flannel-resources.sh
|
||||
```
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
"debug":
|
||||
"description": "Collect debug data"
|
||||
|
|
@ -0,0 +1,102 @@
|
|||
#!/usr/local/sbin/charm-env python3
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import tarfile
|
||||
import tempfile
|
||||
import traceback
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime
|
||||
from charmhelpers.core.hookenv import action_set, local_unit
|
||||
|
||||
archive_dir = None
|
||||
log_file = None
|
||||
|
||||
|
||||
@contextmanager
|
||||
def archive_context():
|
||||
""" Open a context with a new temporary directory.
|
||||
|
||||
When the context closes, the directory is archived, and the archive
|
||||
location is added to Juju action output. """
|
||||
global archive_dir
|
||||
global log_file
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
name = "debug-" + datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
archive_dir = os.path.join(temp_dir, name)
|
||||
os.makedirs(archive_dir)
|
||||
with open("%s/debug.log" % archive_dir, "w") as log_file:
|
||||
yield
|
||||
os.chdir(temp_dir)
|
||||
tar_path = "/home/ubuntu/%s.tar.gz" % name
|
||||
with tarfile.open(tar_path, "w:gz") as f:
|
||||
f.add(name)
|
||||
action_set({
|
||||
"path": tar_path,
|
||||
"command": "juju scp %s:%s ." % (local_unit(), tar_path),
|
||||
"message": " ".join([
|
||||
"Archive has been created on unit %s." % local_unit(),
|
||||
"Use the juju scp command to copy it to your local machine."
|
||||
])
|
||||
})
|
||||
|
||||
|
||||
def log(msg):
|
||||
""" Log a message that will be included in the debug archive.
|
||||
|
||||
Must be run within archive_context """
|
||||
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
for line in str(msg).splitlines():
|
||||
log_file.write(timestamp + " | " + line.rstrip() + "\n")
|
||||
|
||||
|
||||
def run_script(script):
|
||||
""" Run a single script. Must be run within archive_context """
|
||||
log("Running script: " + script)
|
||||
script_dir = os.path.join(archive_dir, script)
|
||||
os.makedirs(script_dir)
|
||||
env = os.environ.copy()
|
||||
env["PYTHONPATH"] = "lib" # allow same imports as reactive code
|
||||
env["DEBUG_SCRIPT_DIR"] = script_dir
|
||||
with open(script_dir + "/stdout", "w") as stdout:
|
||||
with open(script_dir + "/stderr", "w") as stderr:
|
||||
process = subprocess.Popen(
|
||||
"debug-scripts/" + script,
|
||||
stdout=stdout, stderr=stderr, env=env
|
||||
)
|
||||
try:
|
||||
exit_code = process.wait(timeout=300)
|
||||
except subprocess.TimeoutExpired:
|
||||
log("ERROR: still running, terminating")
|
||||
process.terminate()
|
||||
try:
|
||||
exit_code = process.wait(timeout=10)
|
||||
except subprocess.TimeoutExpired:
|
||||
log("ERROR: still running, killing")
|
||||
process.kill()
|
||||
exit_code = process.wait(timeout=10)
|
||||
if exit_code != 0:
|
||||
log("ERROR: %s failed with exit code %d" % (script, exit_code))
|
||||
|
||||
|
||||
def run_all_scripts():
|
||||
""" Run all scripts. For the sake of robustness, log and ignore any
|
||||
exceptions that occur.
|
||||
|
||||
Must be run within archive_context """
|
||||
scripts = os.listdir("debug-scripts")
|
||||
for script in scripts:
|
||||
try:
|
||||
run_script(script)
|
||||
except:
|
||||
log(traceback.format_exc())
|
||||
|
||||
|
||||
def main():
|
||||
""" Open an archive context and run all scripts. """
|
||||
with archive_context():
|
||||
run_all_scripts()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -0,0 +1,107 @@
|
|||
#!/bin/bash
|
||||
|
||||
VERSION="1.0.0"
|
||||
|
||||
|
||||
find_charm_dirs() {
|
||||
# Hopefully, $JUJU_CHARM_DIR is set so which venv to use in unambiguous.
|
||||
if [[ -n "$JUJU_CHARM_DIR" || -n "$CHARM_DIR" ]]; then
|
||||
if [[ -z "$JUJU_CHARM_DIR" ]]; then
|
||||
# accept $CHARM_DIR to be more forgiving
|
||||
export JUJU_CHARM_DIR="$CHARM_DIR"
|
||||
fi
|
||||
if [[ -z "$CHARM_DIR" ]]; then
|
||||
# set CHARM_DIR as well to help with backwards compatibility
|
||||
export CHARM_DIR="$JUJU_CHARM_DIR"
|
||||
fi
|
||||
return
|
||||
fi
|
||||
# Try to guess the value for JUJU_CHARM_DIR by looking for a non-subordinate
|
||||
# (because there's got to be at least one principle) charm directory;
|
||||
# if there are several, pick the first by alpha order.
|
||||
agents_dir="/var/lib/juju/agents"
|
||||
if [[ -d "$agents_dir" ]]; then
|
||||
desired_charm="$1"
|
||||
found_charm_dir=""
|
||||
if [[ -n "$desired_charm" ]]; then
|
||||
for charm_dir in $(/bin/ls -d "$agents_dir"/unit-*/charm); do
|
||||
charm_name="$(grep -o '^['\''"]\?name['\''"]\?:.*' $charm_dir/metadata.yaml 2> /dev/null | sed -e 's/.*: *//' -e 's/['\''"]//g')"
|
||||
if [[ "$charm_name" == "$desired_charm" ]]; then
|
||||
if [[ -n "$found_charm_dir" ]]; then
|
||||
>&2 echo "Ambiguous possibilities for JUJU_CHARM_DIR matching '$desired_charm'; please run within a Juju hook context"
|
||||
exit 1
|
||||
fi
|
||||
found_charm_dir="$charm_dir"
|
||||
fi
|
||||
done
|
||||
if [[ -z "$found_charm_dir" ]]; then
|
||||
>&2 echo "Unable to determine JUJU_CHARM_DIR matching '$desired_charm'; please run within a Juju hook context"
|
||||
exit 1
|
||||
fi
|
||||
export JUJU_CHARM_DIR="$found_charm_dir"
|
||||
export CHARM_DIR="$found_charm_dir"
|
||||
return
|
||||
fi
|
||||
# shellcheck disable=SC2126
|
||||
non_subordinates="$(grep -L 'subordinate"\?:.*true' "$agents_dir"/unit-*/charm/metadata.yaml | wc -l)"
|
||||
if [[ "$non_subordinates" -gt 1 ]]; then
|
||||
>&2 echo 'Ambiguous possibilities for JUJU_CHARM_DIR; please use --charm or run within a Juju hook context'
|
||||
exit 1
|
||||
elif [[ "$non_subordinates" -eq 1 ]]; then
|
||||
for charm_dir in $(/bin/ls -d "$agents_dir"/unit-*/charm); do
|
||||
if grep -q 'subordinate"\?:.*true' "$charm_dir/metadata.yaml"; then
|
||||
continue
|
||||
fi
|
||||
export JUJU_CHARM_DIR="$charm_dir"
|
||||
export CHARM_DIR="$charm_dir"
|
||||
return
|
||||
done
|
||||
fi
|
||||
fi
|
||||
>&2 echo 'Unable to determine JUJU_CHARM_DIR; please run within a Juju hook context'
|
||||
exit 1
|
||||
}
|
||||
|
||||
try_activate_venv() {
|
||||
if [[ -d "$JUJU_CHARM_DIR/../.venv" ]]; then
|
||||
. "$JUJU_CHARM_DIR/../.venv/bin/activate"
|
||||
fi
|
||||
}
|
||||
|
||||
find_wrapped() {
|
||||
PATH="${PATH/\/usr\/local\/sbin:}" which "$(basename "$0")"
|
||||
}
|
||||
|
||||
|
||||
if [[ "$1" == "--version" || "$1" == "-v" ]]; then
|
||||
echo "$VERSION"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
|
||||
# allow --charm option to hint which JUJU_CHARM_DIR to choose when ambiguous
|
||||
# NB: --charm option must come first
|
||||
# NB: option must be processed outside find_charm_dirs to modify $@
|
||||
charm_name=""
|
||||
if [[ "$1" == "--charm" ]]; then
|
||||
charm_name="$2"
|
||||
shift; shift
|
||||
fi
|
||||
|
||||
find_charm_dirs "$charm_name"
|
||||
try_activate_venv
|
||||
export PYTHONPATH="$JUJU_CHARM_DIR/lib:$PYTHONPATH"
|
||||
|
||||
if [[ "$(basename "$0")" == "charm-env" ]]; then
|
||||
# being used as a shebang
|
||||
exec "$@"
|
||||
elif [[ "$0" == "$BASH_SOURCE" ]]; then
|
||||
# being invoked as a symlink wrapping something to find in the venv
|
||||
exec "$(find_wrapped)" "$@"
|
||||
elif [[ "$(basename "$BASH_SOURCE")" == "charm-env" ]]; then
|
||||
# being sourced directly; do nothing
|
||||
/bin/true
|
||||
else
|
||||
# being sourced for wrapped bash helpers
|
||||
. "$(find_wrapped)"
|
||||
fi
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
from charms import layer
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(description='Access layer options.')
|
||||
parser.add_argument('section',
|
||||
help='the section, or layer, the option is from')
|
||||
parser.add_argument('option',
|
||||
help='the option to access')
|
||||
|
||||
args = parser.parse_args()
|
||||
value = layer.options.get(args.section, args.option)
|
||||
if isinstance(value, bool):
|
||||
sys.exit(0 if value else 1)
|
||||
elif isinstance(value, list):
|
||||
for val in value:
|
||||
print(val)
|
||||
else:
|
||||
print(value)
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
#!/usr/bin/env bash
|
||||
set -eux
|
||||
|
||||
FLANNEL_VERSION=${FLANNEL_VERSION:-"v0.11.0"}
|
||||
ETCD_VERSION=${ETCD_VERSION:-"v2.3.7"}
|
||||
|
||||
ARCH=${ARCH:-"amd64 arm64 s390x"}
|
||||
|
||||
build_script_commit="$(git show --oneline -q)"
|
||||
temp_dir="$(readlink -f build-flannel-resources.tmp)"
|
||||
rm -rf "$temp_dir"
|
||||
mkdir "$temp_dir"
|
||||
(cd "$temp_dir"
|
||||
git clone https://github.com/coreos/flannel.git flannel \
|
||||
--branch "$FLANNEL_VERSION" \
|
||||
--depth 1
|
||||
|
||||
git clone https://github.com/coreos/etcd.git etcd \
|
||||
--branch "$ETCD_VERSION" \
|
||||
--depth 1
|
||||
|
||||
# Grab the user id and group id of this current user.
|
||||
GROUP_ID=$(id -g)
|
||||
USER_ID=$(id -u)
|
||||
|
||||
for arch in $ARCH; do
|
||||
echo "Building flannel $FLANNEL_VERSION for $arch"
|
||||
(cd flannel
|
||||
ARCH=$arch make dist/flanneld-$arch
|
||||
)
|
||||
|
||||
echo "Building etcd $ETCD_VERSION for $arch"
|
||||
docker run \
|
||||
--rm \
|
||||
-e GOOS=linux \
|
||||
-e GOARCH="$arch" \
|
||||
-v $temp_dir/etcd:/etcd \
|
||||
golang:1.15 \
|
||||
/bin/bash -c "cd /etcd && ./build && chown -R ${USER_ID}:${GROUP_ID} /etcd"
|
||||
|
||||
rm -rf contents
|
||||
mkdir contents
|
||||
(cd contents
|
||||
echo "flannel-$arch $FLANNEL_VERSION" >> BUILD_INFO
|
||||
echo "etcdctl version $ETCD_VERSION" >> BUILD_INFO
|
||||
echo "built $(date)" >> BUILD_INFO
|
||||
echo "build script commit: $build_script_commit" >> BUILD_INFO
|
||||
cp "$temp_dir"/etcd/bin/etcdctl .
|
||||
cp "$temp_dir"/flannel/dist/flanneld-$arch ./flanneld
|
||||
tar -caf "$temp_dir/flannel-$arch.tar.gz" .
|
||||
)
|
||||
done
|
||||
)
|
||||
mv "$temp_dir"/flannel-*.tar.gz .
|
||||
rm -rf "$temp_dir"
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
"options":
|
||||
"nagios_context":
|
||||
"default": "juju"
|
||||
"type": "string"
|
||||
"description": |
|
||||
Used by the nrpe subordinate charms.
|
||||
A string that will be prepended to instance name to set the host name
|
||||
in nagios. So for instance the hostname would be something like:
|
||||
juju-myservice-0
|
||||
If you're running multiple environments with the same services in them
|
||||
this allows you to differentiate between them.
|
||||
"nagios_servicegroups":
|
||||
"default": ""
|
||||
"type": "string"
|
||||
"description": |
|
||||
A comma-separated list of nagios servicegroups.
|
||||
If left empty, the nagios_context will be used as the servicegroup
|
||||
"iface":
|
||||
"type": "string"
|
||||
"default": ""
|
||||
"description": |
|
||||
The interface to bind flannel overlay networking. The default value is
|
||||
the interface bound to the cni endpoint.
|
||||
"cidr":
|
||||
"type": "string"
|
||||
"default": "10.1.0.0/16"
|
||||
"description": |
|
||||
Network CIDR to assign to Flannel
|
||||
"port":
|
||||
"type": "int"
|
||||
"default": !!int "0"
|
||||
"description": |
|
||||
Network port to use for Flannel
|
||||
"vni":
|
||||
"type": "int"
|
||||
"default": !!int "0"
|
||||
"description": |
|
||||
VXLAN network id to assign to Flannel
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
Copyright 2016 Canonical LTD
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
Format: http://dep.debian.net/deps/dep5/
|
||||
|
||||
Files: *
|
||||
Copyright: Copyright 2015-2017, Canonical Ltd., All Rights Reserved.
|
||||
License: Apache License 2.0
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
.
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
.
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
Format: http://dep.debian.net/deps/dep5/
|
||||
|
||||
Files: *
|
||||
Copyright: Copyright 2016, Canonical Ltd.
|
||||
License: GPL-3
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License version 3, as
|
||||
published by the Free Software Foundation.
|
||||
.
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranties of
|
||||
MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
|
||||
PURPOSE. See the GNU General Public License for more details.
|
||||
.
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
Format: http://dep.debian.net/deps/dep5/
|
||||
|
||||
Files: *
|
||||
Copyright: Copyright 2015-2017, Canonical Ltd., All Rights Reserved.
|
||||
License: Apache License 2.0
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
.
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
.
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
Format: http://dep.debian.net/deps/dep5/
|
||||
|
||||
Files: *
|
||||
Copyright: Copyright 2018, Canonical Ltd., All Rights Reserved.
|
||||
License: Apache License 2.0
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
.
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
.
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
#!/usr/local/sbin/charm-env python3
|
||||
|
||||
import debug_script
|
||||
import json
|
||||
from charmhelpers.core import unitdata
|
||||
|
||||
kv = unitdata.kv()
|
||||
data = kv.getrange("")
|
||||
|
||||
with debug_script.open_file("unitdata.json", "w") as f:
|
||||
json.dump(data, f, indent=2)
|
||||
f.write("\n")
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
#!/bin/sh
|
||||
set -ux
|
||||
|
||||
# report file system disk space usage
|
||||
df -hT > $DEBUG_SCRIPT_DIR/df-hT
|
||||
# estimate file space usage
|
||||
du -h / 2>&1 > $DEBUG_SCRIPT_DIR/du-h
|
||||
# list the mounted filesystems
|
||||
mount > $DEBUG_SCRIPT_DIR/mount
|
||||
# list the mounted systems with ascii trees
|
||||
findmnt -A > $DEBUG_SCRIPT_DIR/findmnt
|
||||
# list block devices
|
||||
lsblk > $DEBUG_SCRIPT_DIR/lsblk
|
||||
# list open files
|
||||
lsof 2>&1 > $DEBUG_SCRIPT_DIR/lsof
|
||||
# list local system locks
|
||||
lslocks > $DEBUG_SCRIPT_DIR/lslocks
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
#!/bin/sh
|
||||
set -ux
|
||||
|
||||
cp -v /var/log/juju/* $DEBUG_SCRIPT_DIR
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
#!/usr/local/sbin/charm-env python3
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import yaml
|
||||
import debug_script
|
||||
|
||||
with open('metadata.yaml') as f:
|
||||
metadata = yaml.load(f)
|
||||
|
||||
relations = []
|
||||
for key in ['requires', 'provides', 'peers']:
|
||||
relations += list(metadata.get(key, {}).keys())
|
||||
|
||||
os.mkdir(os.path.join(debug_script.dir, 'relations'))
|
||||
|
||||
for relation in relations:
|
||||
path = 'relations/' + relation
|
||||
with debug_script.open_file(path, 'w') as f:
|
||||
cmd = ['network-get', relation]
|
||||
subprocess.call(cmd, stdout=f, stderr=subprocess.STDOUT)
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
#!/bin/sh
|
||||
set -ux
|
||||
|
||||
ifconfig -a > $DEBUG_SCRIPT_DIR/ifconfig
|
||||
cp -v /etc/resolv.conf $DEBUG_SCRIPT_DIR/resolv.conf
|
||||
cp -v /etc/network/interfaces $DEBUG_SCRIPT_DIR/interfaces
|
||||
netstat -planut > $DEBUG_SCRIPT_DIR/netstat
|
||||
route -n > $DEBUG_SCRIPT_DIR/route
|
||||
iptables-save > $DEBUG_SCRIPT_DIR/iptables-save
|
||||
dig google.com > $DEBUG_SCRIPT_DIR/dig-google
|
||||
ping -w 2 -i 0.1 google.com > $DEBUG_SCRIPT_DIR/ping-google
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
#!/bin/sh
|
||||
set -ux
|
||||
|
||||
dpkg --list > $DEBUG_SCRIPT_DIR/dpkg-list
|
||||
snap list > $DEBUG_SCRIPT_DIR/snap-list
|
||||
pip2 list > $DEBUG_SCRIPT_DIR/pip2-list
|
||||
pip3 list > $DEBUG_SCRIPT_DIR/pip3-list
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
#!/bin/sh
|
||||
set -ux
|
||||
|
||||
sysctl -a > $DEBUG_SCRIPT_DIR/sysctl
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
#!/bin/sh
|
||||
set -ux
|
||||
|
||||
systemctl --all > $DEBUG_SCRIPT_DIR/systemctl
|
||||
journalctl > $DEBUG_SCRIPT_DIR/journalctl
|
||||
systemd-analyze time > $DEBUG_SCRIPT_DIR/systemd-analyze-time
|
||||
systemd-analyze blame > $DEBUG_SCRIPT_DIR/systemd-analyze-blame
|
||||
systemd-analyze critical-chain > $DEBUG_SCRIPT_DIR/systemd-analyze-critical-chain
|
||||
systemd-analyze dump > $DEBUG_SCRIPT_DIR/systemd-analyze-dump
|
||||
|
|
@ -0,0 +1,91 @@
|
|||
<h1 id="charms.layer.status.WorkloadState">WorkloadState</h1>
|
||||
|
||||
```python
|
||||
WorkloadState(self, /, *args, **kwargs)
|
||||
```
|
||||
|
||||
Enum of the valid workload states.
|
||||
|
||||
Valid options are:
|
||||
|
||||
* `WorkloadState.MAINTENANCE`
|
||||
* `WorkloadState.BLOCKED`
|
||||
* `WorkloadState.WAITING`
|
||||
* `WorkloadState.ACTIVE`
|
||||
|
||||
<h1 id="charms.layer.status.maintenance">maintenance</h1>
|
||||
|
||||
```python
|
||||
maintenance(message)
|
||||
```
|
||||
|
||||
Set the status to the `MAINTENANCE` state with the given operator message.
|
||||
|
||||
__Parameters__
|
||||
|
||||
- __`message` (str)__: Message to convey to the operator.
|
||||
|
||||
<h1 id="charms.layer.status.maint">maint</h1>
|
||||
|
||||
```python
|
||||
maint(message)
|
||||
```
|
||||
|
||||
Shorthand alias for
|
||||
[maintenance](status.md#charms.layer.status.maintenance).
|
||||
|
||||
__Parameters__
|
||||
|
||||
- __`message` (str)__: Message to convey to the operator.
|
||||
|
||||
<h1 id="charms.layer.status.blocked">blocked</h1>
|
||||
|
||||
```python
|
||||
blocked(message)
|
||||
```
|
||||
|
||||
Set the status to the `BLOCKED` state with the given operator message.
|
||||
|
||||
__Parameters__
|
||||
|
||||
- __`message` (str)__: Message to convey to the operator.
|
||||
|
||||
<h1 id="charms.layer.status.waiting">waiting</h1>
|
||||
|
||||
```python
|
||||
waiting(message)
|
||||
```
|
||||
|
||||
Set the status to the `WAITING` state with the given operator message.
|
||||
|
||||
__Parameters__
|
||||
|
||||
- __`message` (str)__: Message to convey to the operator.
|
||||
|
||||
<h1 id="charms.layer.status.active">active</h1>
|
||||
|
||||
```python
|
||||
active(message)
|
||||
```
|
||||
|
||||
Set the status to the `ACTIVE` state with the given operator message.
|
||||
|
||||
__Parameters__
|
||||
|
||||
- __`message` (str)__: Message to convey to the operator.
|
||||
|
||||
<h1 id="charms.layer.status.status_set">status_set</h1>
|
||||
|
||||
```python
|
||||
status_set(workload_state, message)
|
||||
```
|
||||
|
||||
Set the status to the given workload state with a message.
|
||||
|
||||
__Parameters__
|
||||
|
||||
- __`workload_state` (WorkloadState or str)__: State of the workload. Should be
|
||||
a [WorkloadState](status.md#charms.layer.status.WorkloadState) enum
|
||||
member, or the string value of one of those members.
|
||||
- __`message` (str)__: Message to convey to the operator.
|
||||
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1 @@
|
|||
.DS_Store
|
||||
|
|
@ -0,0 +1,89 @@
|
|||
# Overview
|
||||
|
||||
This interface layer handles the communication with Etcd via the `etcd`
|
||||
interface.
|
||||
|
||||
# Usage
|
||||
|
||||
## Requires
|
||||
|
||||
This interface layer will set the following states, as appropriate:
|
||||
|
||||
* `{relation_name}.connected` The relation is established, but Etcd may not
|
||||
yet have provided any connection or service information.
|
||||
|
||||
* `{relation_name}.available` Etcd has provided its connection string
|
||||
information, and is ready to serve as a KV store.
|
||||
The provided information can be accessed via the following methods:
|
||||
* `etcd.get_connection_string()`
|
||||
* `etcd.get_version()`
|
||||
* `{relation_name}.tls.available` Etcd has provided the connection string
|
||||
information, and the tls client credentials to communicate with it.
|
||||
The client credentials can be accessed via:
|
||||
* `{relation_name}.get_client_credentials()` returning a dictionary of
|
||||
the clinet certificate, key and CA.
|
||||
* `{relation_name}.save_client_credentials(key, cert, ca)` is a convenience
|
||||
method to save the client certificate, key and CA to files of your
|
||||
choosing.
|
||||
|
||||
|
||||
For example, a common application for this is configuring an applications
|
||||
backend key/value storage, like Docker.
|
||||
|
||||
```python
|
||||
@when('etcd.available', 'docker.available')
|
||||
def swarm_etcd_cluster_setup(etcd):
|
||||
con_string = etcd.connection_string().replace('http', 'etcd')
|
||||
opts = {}
|
||||
opts['connection_string'] = con_string
|
||||
render('docker-compose.yml', 'files/swarm/docker-compose.yml', opts)
|
||||
|
||||
```
|
||||
|
||||
|
||||
## Provides
|
||||
|
||||
A charm providing this interface is providing the Etcd rest api service.
|
||||
|
||||
This interface layer will set the following states, as appropriate:
|
||||
|
||||
* `{relation_name}.connected` One or more clients of any type have
|
||||
been related. The charm should call the following methods to provide the
|
||||
appropriate information to the clients:
|
||||
|
||||
* `{relation_name}.set_connection_string(string, version)`
|
||||
* `{relation_name}.set_client_credentials(key, cert, ca)`
|
||||
|
||||
Example:
|
||||
|
||||
```python
|
||||
@when('db.connected')
|
||||
def send_connection_details(db):
|
||||
cert = leader_get('client_certificate')
|
||||
key = leader_get('client_key')
|
||||
ca = leader_get('certificate_authority')
|
||||
# Set the key, cert, and ca on the db relation
|
||||
db.set_client_credentials(key, cert, ca)
|
||||
|
||||
port = hookenv.config().get('port')
|
||||
# Get all the peers participating in the cluster relation.
|
||||
addresses = cluster.get_peer_addresses()
|
||||
connections = []
|
||||
for address in addresses:
|
||||
connections.append('http://{0}:{1}'.format(address, port))
|
||||
# Set the connection string on the db relation.
|
||||
db.set_connection_string(','.join(conections))
|
||||
```
|
||||
|
||||
|
||||
# Contact Information
|
||||
|
||||
### Maintainer
|
||||
- Charles Butler <charles.butler@canonical.com>
|
||||
|
||||
|
||||
# Etcd
|
||||
|
||||
- [Etcd](https://coreos.com/etcd/) home page
|
||||
- [Etcd bug trackers](https://github.com/coreos/etcd/issues)
|
||||
- [Etcd Juju Charm](http://jujucharms.com/?text=etcd)
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
name: etcd
|
||||
summary: Interface for relating to ETCD
|
||||
version: 2
|
||||
maintainer: "Charles Butler <charles.butler@canonical.com>"
|
||||
|
|
@ -0,0 +1,70 @@
|
|||
#!/usr/bin/python
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from charms.reactive import RelationBase
|
||||
from charms.reactive import hook
|
||||
from charms.reactive import scopes
|
||||
|
||||
|
||||
class EtcdPeer(RelationBase):
|
||||
'''This class handles peer relation communication by setting states that
|
||||
the reactive code can respond to. '''
|
||||
|
||||
scope = scopes.UNIT
|
||||
|
||||
@hook('{peers:etcd}-relation-joined')
|
||||
def peer_joined(self):
|
||||
'''A new peer has joined, set the state on the unit so we can track
|
||||
when they are departed. '''
|
||||
conv = self.conversation()
|
||||
conv.set_state('{relation_name}.joined')
|
||||
|
||||
@hook('{peers:etcd}-relation-departed')
|
||||
def peers_going_away(self):
|
||||
'''Trigger a state on the unit that it is leaving. We can use this
|
||||
state in conjunction with the joined state to determine which unit to
|
||||
unregister from the etcd cluster. '''
|
||||
conv = self.conversation()
|
||||
conv.remove_state('{relation_name}.joined')
|
||||
conv.set_state('{relation_name}.departing')
|
||||
|
||||
def dismiss(self):
|
||||
'''Remove the departing state from all other units in the conversation,
|
||||
and we can resume normal operation.
|
||||
'''
|
||||
for conv in self.conversations():
|
||||
conv.remove_state('{relation_name}.departing')
|
||||
|
||||
def get_peers(self):
|
||||
'''Return a list of names for the peers participating in this
|
||||
conversation scope. '''
|
||||
peers = []
|
||||
# Iterate over all the conversations of this type.
|
||||
for conversation in self.conversations():
|
||||
peers.append(conversation.scope)
|
||||
return peers
|
||||
|
||||
def set_db_ingress_address(self, address):
|
||||
'''Set the ingress address belonging to the db relation.'''
|
||||
for conversation in self.conversations():
|
||||
conversation.set_remote('db-ingress-address', address)
|
||||
|
||||
def get_db_ingress_addresses(self):
|
||||
'''Return a list of db ingress addresses'''
|
||||
addresses = []
|
||||
# Iterate over all the conversations of this type.
|
||||
for conversation in self.conversations():
|
||||
address = conversation.get_remote('db-ingress-address')
|
||||
if address:
|
||||
addresses.append(address)
|
||||
return addresses
|
||||
|
|
@ -0,0 +1,47 @@
|
|||
#!/usr/bin/python
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from charms.reactive import RelationBase
|
||||
from charms.reactive import hook
|
||||
from charms.reactive import scopes
|
||||
|
||||
|
||||
class EtcdProvider(RelationBase):
|
||||
scope = scopes.GLOBAL
|
||||
|
||||
@hook('{provides:etcd}-relation-{joined,changed}')
|
||||
def joined_or_changed(self):
|
||||
''' Set the connected state from the provides side of the relation. '''
|
||||
self.set_state('{relation_name}.connected')
|
||||
|
||||
@hook('{provides:etcd}-relation-{broken,departed}')
|
||||
def broken_or_departed(self):
|
||||
'''Remove connected state from the provides side of the relation. '''
|
||||
conv = self.conversation()
|
||||
if len(conv.units) == 1:
|
||||
conv.remove_state('{relation_name}.connected')
|
||||
|
||||
def set_client_credentials(self, key, cert, ca):
|
||||
''' Set the client credentials on the global conversation for this
|
||||
relation. '''
|
||||
self.set_remote('client_key', key)
|
||||
self.set_remote('client_ca', ca)
|
||||
self.set_remote('client_cert', cert)
|
||||
|
||||
def set_connection_string(self, connection_string, version=''):
|
||||
''' Set the connection string on the global conversation for this
|
||||
relation. '''
|
||||
# Note: Version added as a late-dependency for 2 => 3 migration
|
||||
# If no version is specified, consumers should presume etcd 2.x
|
||||
self.set_remote('connection_string', connection_string)
|
||||
self.set_remote('version', version)
|
||||
|
|
@ -0,0 +1,80 @@
|
|||
#!/usr/bin/python
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
from charms.reactive import RelationBase
|
||||
from charms.reactive import hook
|
||||
from charms.reactive import scopes
|
||||
|
||||
|
||||
class EtcdClient(RelationBase):
|
||||
scope = scopes.GLOBAL
|
||||
|
||||
@hook('{requires:etcd}-relation-{joined,changed}')
|
||||
def changed(self):
|
||||
''' Indicate the relation is connected, and if the relation data is
|
||||
set it is also available. '''
|
||||
self.set_state('{relation_name}.connected')
|
||||
|
||||
if self.get_connection_string():
|
||||
self.set_state('{relation_name}.available')
|
||||
# Get the ca, key, cert from the relation data.
|
||||
cert = self.get_client_credentials()
|
||||
# The tls state depends on the existance of the ca, key and cert.
|
||||
if cert['client_cert'] and cert['client_key'] and cert['client_ca']: # noqa
|
||||
self.set_state('{relation_name}.tls.available')
|
||||
|
||||
@hook('{requires:etcd}-relation-{broken, departed}')
|
||||
def broken(self):
|
||||
''' Indicate the relation is no longer available and not connected. '''
|
||||
self.remove_state('{relation_name}.available')
|
||||
self.remove_state('{relation_name}.connected')
|
||||
self.remove_state('{relation_name}.tls.available')
|
||||
|
||||
def connection_string(self):
|
||||
''' This method is depreciated but ensures backward compatibility
|
||||
@see get_connection_string(self). '''
|
||||
return self.get_connection_string()
|
||||
|
||||
def get_connection_string(self):
|
||||
''' Return the connection string, if available, or None. '''
|
||||
return self.get_remote('connection_string')
|
||||
|
||||
def get_version(self):
|
||||
''' Return the version of the etd protocol being used, or None. '''
|
||||
return self.get_remote('version')
|
||||
|
||||
def get_client_credentials(self):
|
||||
''' Return a dict with the client certificate, ca and key to
|
||||
communicate with etcd using tls. '''
|
||||
return {'client_cert': self.get_remote('client_cert'),
|
||||
'client_key': self.get_remote('client_key'),
|
||||
'client_ca': self.get_remote('client_ca')}
|
||||
|
||||
def save_client_credentials(self, key, cert, ca):
|
||||
''' Save all the client certificates for etcd to local files. '''
|
||||
self._save_remote_data('client_cert', cert)
|
||||
self._save_remote_data('client_key', key)
|
||||
self._save_remote_data('client_ca', ca)
|
||||
|
||||
def _save_remote_data(self, key, path):
|
||||
''' Save the remote data to a file indicated by path creating the
|
||||
parent directory if needed.'''
|
||||
value = self.get_remote(key)
|
||||
if value:
|
||||
parent = os.path.dirname(path)
|
||||
if not os.path.isdir(parent):
|
||||
os.makedirs(parent)
|
||||
with open(path, 'w') as stream:
|
||||
stream.write(value)
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
name: Test Suite for K8s Service Interface
|
||||
|
||||
on:
|
||||
- pull_request
|
||||
|
||||
jobs:
|
||||
lint-and-unit-tests:
|
||||
name: Lint & Unit tests
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python: [3.6, 3.7, 3.8, 3.9]
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
- name: Install Tox
|
||||
run: pip install tox
|
||||
- name: Run lint & unit tests
|
||||
run: tox
|
||||
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
.DS_Store
|
||||
.tox
|
||||
__pycache__
|
||||
*.pyc
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
name: kubernetes-cni
|
||||
summary: Interface for relating various CNI implementations
|
||||
version: 0
|
||||
maintainer: "George Kraft <george.kraft@canonical.com>"
|
||||
ignore:
|
||||
- tests
|
||||
|
|
@ -0,0 +1,89 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
from charmhelpers.core import hookenv
|
||||
from charmhelpers.core.host import file_hash
|
||||
from charms.layer.kubernetes_common import kubeclientconfig_path
|
||||
from charms.reactive import Endpoint
|
||||
from charms.reactive import toggle_flag, is_flag_set, clear_flag, set_flag
|
||||
|
||||
|
||||
class CNIPluginProvider(Endpoint):
|
||||
def manage_flags(self):
|
||||
toggle_flag(self.expand_name("{endpoint_name}.connected"), self.is_joined)
|
||||
toggle_flag(
|
||||
self.expand_name("{endpoint_name}.available"), self.config_available()
|
||||
)
|
||||
if is_flag_set(self.expand_name("endpoint.{endpoint_name}.changed")):
|
||||
clear_flag(self.expand_name("{endpoint_name}.configured"))
|
||||
clear_flag(self.expand_name("endpoint.{endpoint_name}.changed"))
|
||||
|
||||
def set_config(self, is_master):
|
||||
"""Relays a dict of kubernetes configuration information."""
|
||||
for relation in self.relations:
|
||||
relation.to_publish_raw.update({"is_master": is_master})
|
||||
set_flag(self.expand_name("{endpoint_name}.configured"))
|
||||
|
||||
def config_available(self):
|
||||
"""Ensures all config from the CNI plugin is available."""
|
||||
goal_state = hookenv.goal_state()
|
||||
related_apps = [
|
||||
app
|
||||
for app in goal_state.get("relations", {}).get(self.endpoint_name, "")
|
||||
if "/" not in app
|
||||
]
|
||||
if not related_apps:
|
||||
return False
|
||||
configs = self.get_configs()
|
||||
return all(
|
||||
"cidr" in config and "cni-conf-file" in config
|
||||
for config in [configs.get(related_app, {}) for related_app in related_apps]
|
||||
)
|
||||
|
||||
def get_config(self, default=None):
|
||||
"""Get CNI config for one related application.
|
||||
|
||||
If default is specified, and there is a related application with a
|
||||
matching name, then that application is chosen. Otherwise, the
|
||||
application is chosen alphabetically.
|
||||
|
||||
Whichever application is chosen, that application's CNI config is
|
||||
returned.
|
||||
"""
|
||||
configs = self.get_configs()
|
||||
if not configs:
|
||||
return {}
|
||||
elif default and default not in configs:
|
||||
msg = "relation not found for default CNI %s, ignoring" % default
|
||||
hookenv.log(msg, level="WARN")
|
||||
return self.get_config()
|
||||
elif default:
|
||||
return configs.get(default, {})
|
||||
else:
|
||||
return configs.get(sorted(configs)[0], {})
|
||||
|
||||
def get_configs(self):
|
||||
"""Get CNI configs for all related applications.
|
||||
|
||||
This returns a mapping of application names to CNI configs. Here's an
|
||||
example return value:
|
||||
{
|
||||
'flannel': {
|
||||
'cidr': '10.1.0.0/16',
|
||||
'cni-conf-file': '10-flannel.conflist'
|
||||
},
|
||||
'calico': {
|
||||
'cidr': '192.168.0.0/16',
|
||||
'cni-conf-file': '10-calico.conflist'
|
||||
}
|
||||
}
|
||||
"""
|
||||
return {
|
||||
relation.application_name: relation.joined_units.received_raw
|
||||
for relation in self.relations
|
||||
if relation.application_name
|
||||
}
|
||||
|
||||
def notify_kubeconfig_changed(self):
|
||||
kubeconfig_hash = file_hash(kubeclientconfig_path)
|
||||
for relation in self.relations:
|
||||
relation.to_publish_raw.update({"kubeconfig-hash": kubeconfig_hash})
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
from charmhelpers.core import unitdata
|
||||
from charms.reactive import Endpoint
|
||||
from charms.reactive import when_any, when_not
|
||||
from charms.reactive import set_state, remove_state
|
||||
|
||||
db = unitdata.kv()
|
||||
|
||||
|
||||
class CNIPluginClient(Endpoint):
|
||||
def manage_flags(self):
|
||||
kubeconfig_hash = self.get_config().get("kubeconfig-hash")
|
||||
kubeconfig_hash_key = self.expand_name("{endpoint_name}.kubeconfig-hash")
|
||||
if kubeconfig_hash:
|
||||
set_state(self.expand_name("{endpoint_name}.kubeconfig.available"))
|
||||
if kubeconfig_hash != db.get(kubeconfig_hash_key):
|
||||
set_state(self.expand_name("{endpoint_name}.kubeconfig.changed"))
|
||||
db.set(kubeconfig_hash_key, kubeconfig_hash)
|
||||
|
||||
@when_any("endpoint.{endpoint_name}.joined", "endpoint.{endpoint_name}.changed")
|
||||
def changed(self):
|
||||
"""Indicate the relation is connected, and if the relation data is
|
||||
set it is also available."""
|
||||
set_state(self.expand_name("{endpoint_name}.connected"))
|
||||
config = self.get_config()
|
||||
if config["is_master"] == "True":
|
||||
set_state(self.expand_name("{endpoint_name}.is-master"))
|
||||
set_state(self.expand_name("{endpoint_name}.configured"))
|
||||
elif config["is_master"] == "False":
|
||||
set_state(self.expand_name("{endpoint_name}.is-worker"))
|
||||
set_state(self.expand_name("{endpoint_name}.configured"))
|
||||
else:
|
||||
remove_state(self.expand_name("{endpoint_name}.configured"))
|
||||
remove_state(self.expand_name("endpoint.{endpoint_name}.changed"))
|
||||
|
||||
@when_not("endpoint.{endpoint_name}.joined")
|
||||
def broken(self):
|
||||
"""Indicate the relation is no longer available and not connected."""
|
||||
remove_state(self.expand_name("{endpoint_name}.connected"))
|
||||
remove_state(self.expand_name("{endpoint_name}.is-master"))
|
||||
remove_state(self.expand_name("{endpoint_name}.is-worker"))
|
||||
remove_state(self.expand_name("{endpoint_name}.configured"))
|
||||
|
||||
def get_config(self):
|
||||
"""Get the kubernetes configuration information."""
|
||||
return self.all_joined_units.received_raw
|
||||
|
||||
def set_config(self, cidr, cni_conf_file):
|
||||
"""Sets the CNI configuration information."""
|
||||
for relation in self.relations:
|
||||
relation.to_publish_raw.update(
|
||||
{"cidr": cidr, "cni-conf-file": cni_conf_file}
|
||||
)
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
[tox]
|
||||
skipsdist = True
|
||||
envlist = lint,py3
|
||||
|
||||
[testenv]
|
||||
basepython = python3
|
||||
setenv =
|
||||
PYTHONPATH={toxinidir}:{toxinidir}/lib
|
||||
PYTHONBREAKPOINT=ipdb.set_trace
|
||||
deps =
|
||||
pyyaml
|
||||
pytest
|
||||
flake8
|
||||
black
|
||||
ipdb
|
||||
charms.unit_test
|
||||
commands = pytest --tb native -s {posargs}
|
||||
|
||||
[testenv:lint]
|
||||
envdir = {toxworkdir}/py3
|
||||
commands =
|
||||
flake8 {toxinidir}
|
||||
black --check {toxinidir}
|
||||
|
||||
[flake8]
|
||||
exclude=.tox
|
||||
max-line-length = 88
|
||||
|
|
@ -0,0 +1,66 @@
|
|||
# nrpe-external-master interface
|
||||
|
||||
Use this interface to register nagios checks in your charm layers.
|
||||
|
||||
## Purpose
|
||||
|
||||
This interface is designed to interoperate with the
|
||||
[nrpe-external-master](https://jujucharms.com/nrpe-external-master) subordinate charm.
|
||||
|
||||
## How to use in your layers
|
||||
|
||||
The event handler for `nrpe-external-master.available` is called with an object
|
||||
through which you can register your own custom nagios checks, when a relation
|
||||
is established with `nrpe-external-master:nrpe-external-master`.
|
||||
|
||||
This object provides a method,
|
||||
|
||||
_add_check_(args, name=_check_name_, description=_description_, context=_context_, unit=_unit_)
|
||||
|
||||
which is called to register a nagios plugin check for your service.
|
||||
|
||||
All arguments are required.
|
||||
|
||||
*args* is a list of nagios plugin command line arguments, starting with the path to the plugin executable.
|
||||
|
||||
*name* is the name of the check registered in nagios
|
||||
|
||||
*description* is some text that describes what the check is for and what it does
|
||||
|
||||
*context* is the nagios context name, something that identifies your application
|
||||
|
||||
*unit* is `hookenv.local_unit()`
|
||||
|
||||
The nrpe subordinate installs `check_http`, so you can use it like this:
|
||||
|
||||
```
|
||||
@when('nrpe-external-master.available')
|
||||
def setup_nagios(nagios):
|
||||
config = hookenv.config()
|
||||
unit_name = hookenv.local_unit()
|
||||
nagios.add_check(['/usr/lib/nagios/plugins/check_http',
|
||||
'-I', '127.0.0.1', '-p', str(config['port']),
|
||||
'-e', " 200 OK", '-u', '/publickey'],
|
||||
name="check_http",
|
||||
description="Verify my awesome service is responding",
|
||||
context=config["nagios_context"],
|
||||
unit=unit_name,
|
||||
)
|
||||
```
|
||||
If your `nagios.add_check` defines a custom plugin, you will also need to restart the `nagios-nrpe-server` service.
|
||||
|
||||
Consult the nagios documentation for more information on [how to write your own
|
||||
plugins](https://assets.nagios.com/downloads/nagioscore/docs/nagioscore/4/en/pluginapi.html)
|
||||
or [find one](https://www.nagios.org/projects/nagios-plugins/) that does what you need.
|
||||
|
||||
## Example deployment
|
||||
|
||||
```
|
||||
$ juju deploy your-awesome-charm
|
||||
$ juju deploy nrpe-external-master --config site-nagios.yaml
|
||||
$ juju add-relation your-awesome-charm nrpe-external-master
|
||||
```
|
||||
|
||||
where `site-nagios.yaml` has the necessary configuration settings for the
|
||||
subordinate to connect to nagios.
|
||||
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
name: nrpe-external-master
|
||||
summary: Nagios interface
|
||||
version: 1
|
||||
|
|
@ -0,0 +1,91 @@
|
|||
import datetime
|
||||
import os
|
||||
|
||||
from charmhelpers.core import hookenv
|
||||
|
||||
from charms.reactive import hook
|
||||
from charms.reactive import RelationBase
|
||||
from charms.reactive import scopes
|
||||
|
||||
|
||||
class NrpeExternalMasterProvides(RelationBase):
|
||||
scope = scopes.GLOBAL
|
||||
|
||||
@hook('{provides:nrpe-external-master}-relation-{joined,changed}')
|
||||
def changed_nrpe(self):
|
||||
self.set_state('{relation_name}.available')
|
||||
|
||||
@hook('{provides:nrpe-external-master}-relation-{broken,departed}')
|
||||
def broken_nrpe(self):
|
||||
self.remove_state('{relation_name}.available')
|
||||
|
||||
def add_check(self, args, name=None, description=None, context=None,
|
||||
servicegroups=None, unit=None):
|
||||
nagios_files = self.get_local('nagios.check.files', [])
|
||||
|
||||
if not unit:
|
||||
unit = hookenv.local_unit()
|
||||
unit = unit.replace('/', '-')
|
||||
context = self.get_remote('nagios_host_context', context)
|
||||
host_name = self.get_remote('nagios_hostname',
|
||||
'%s-%s' % (context, unit))
|
||||
|
||||
check_tmpl = """
|
||||
#---------------------------------------------------
|
||||
# This file is Juju managed
|
||||
#---------------------------------------------------
|
||||
command[%(check_name)s]=%(check_args)s
|
||||
"""
|
||||
service_tmpl = """
|
||||
#---------------------------------------------------
|
||||
# This file is Juju managed
|
||||
#---------------------------------------------------
|
||||
define service {
|
||||
use active-service
|
||||
host_name %(host_name)s
|
||||
service_description %(description)s
|
||||
check_command check_nrpe!%(check_name)s
|
||||
servicegroups %(servicegroups)s
|
||||
}
|
||||
"""
|
||||
check_filename = "/etc/nagios/nrpe.d/check_%s.cfg" % (name)
|
||||
with open(check_filename, "w") as fh:
|
||||
fh.write(check_tmpl % {
|
||||
'check_args': ' '.join(args),
|
||||
'check_name': name,
|
||||
})
|
||||
nagios_files.append(check_filename)
|
||||
|
||||
service_filename = "/var/lib/nagios/export/service__%s_%s.cfg" % (
|
||||
unit, name)
|
||||
with open(service_filename, "w") as fh:
|
||||
fh.write(service_tmpl % {
|
||||
'servicegroups': servicegroups or context,
|
||||
'context': context,
|
||||
'description': description,
|
||||
'check_name': name,
|
||||
'host_name': host_name,
|
||||
'unit_name': unit,
|
||||
})
|
||||
nagios_files.append(service_filename)
|
||||
|
||||
self.set_local('nagios.check.files', nagios_files)
|
||||
|
||||
def removed(self):
|
||||
files = self.get_local('nagios.check.files', [])
|
||||
for f in files:
|
||||
try:
|
||||
os.unlink(f)
|
||||
except Exception as e:
|
||||
hookenv.log("failed to remove %s: %s" % (f, e))
|
||||
self.set_local('nagios.check.files', [])
|
||||
self.remove_state('{relation_name}.removed')
|
||||
|
||||
def added(self):
|
||||
self.updated()
|
||||
|
||||
def updated(self):
|
||||
relation_info = {
|
||||
'timestamp': datetime.datetime.now().isoformat(),
|
||||
}
|
||||
self.set_remote(**relation_info)
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer import basic # noqa
|
||||
basic.bootstrap_charm_deps()
|
||||
|
||||
from charmhelpers.core import hookenv # noqa
|
||||
hookenv.atstart(basic.init_config_states)
|
||||
hookenv.atexit(basic.clear_config_states)
|
||||
|
||||
|
||||
# This will load and run the appropriate @hook and other decorated
|
||||
# handlers from $JUJU_CHARM_DIR/reactive, $JUJU_CHARM_DIR/hooks/reactive,
|
||||
# and $JUJU_CHARM_DIR/hooks/relations.
|
||||
#
|
||||
# See https://jujucharms.com/docs/stable/authors-charm-building
|
||||
# for more information on this pattern.
|
||||
from charms.reactive import main # noqa
|
||||
main()
|
||||
|
|
@ -0,0 +1,357 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||
|
||||
<svg
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
width="96"
|
||||
height="96"
|
||||
id="svg6517"
|
||||
version="1.1"
|
||||
inkscape:version="0.91 r13725"
|
||||
sodipodi:docname="flannel_circle.svg"
|
||||
viewBox="0 0 96 96">
|
||||
<defs
|
||||
id="defs6519">
|
||||
<linearGradient
|
||||
id="Background">
|
||||
<stop
|
||||
id="stop4178"
|
||||
offset="0"
|
||||
style="stop-color:#22779e;stop-opacity:1" />
|
||||
<stop
|
||||
id="stop4180"
|
||||
offset="1"
|
||||
style="stop-color:#2991c0;stop-opacity:1" />
|
||||
</linearGradient>
|
||||
<filter
|
||||
style="color-interpolation-filters:sRGB"
|
||||
inkscape:label="Inner Shadow"
|
||||
id="filter1121">
|
||||
<feFlood
|
||||
flood-opacity="0.59999999999999998"
|
||||
flood-color="rgb(0,0,0)"
|
||||
result="flood"
|
||||
id="feFlood1123" />
|
||||
<feComposite
|
||||
in="flood"
|
||||
in2="SourceGraphic"
|
||||
operator="out"
|
||||
result="composite1"
|
||||
id="feComposite1125" />
|
||||
<feGaussianBlur
|
||||
in="composite1"
|
||||
stdDeviation="1"
|
||||
result="blur"
|
||||
id="feGaussianBlur1127" />
|
||||
<feOffset
|
||||
dx="0"
|
||||
dy="2"
|
||||
result="offset"
|
||||
id="feOffset1129" />
|
||||
<feComposite
|
||||
in="offset"
|
||||
in2="SourceGraphic"
|
||||
operator="atop"
|
||||
result="composite2"
|
||||
id="feComposite1131" />
|
||||
</filter>
|
||||
<filter
|
||||
style="color-interpolation-filters:sRGB"
|
||||
inkscape:label="Drop Shadow"
|
||||
id="filter950">
|
||||
<feFlood
|
||||
flood-opacity="0.25"
|
||||
flood-color="rgb(0,0,0)"
|
||||
result="flood"
|
||||
id="feFlood952" />
|
||||
<feComposite
|
||||
in="flood"
|
||||
in2="SourceGraphic"
|
||||
operator="in"
|
||||
result="composite1"
|
||||
id="feComposite954" />
|
||||
<feGaussianBlur
|
||||
in="composite1"
|
||||
stdDeviation="1"
|
||||
result="blur"
|
||||
id="feGaussianBlur956" />
|
||||
<feOffset
|
||||
dx="0"
|
||||
dy="1"
|
||||
result="offset"
|
||||
id="feOffset958" />
|
||||
<feComposite
|
||||
in="SourceGraphic"
|
||||
in2="offset"
|
||||
operator="over"
|
||||
result="composite2"
|
||||
id="feComposite960" />
|
||||
</filter>
|
||||
<clipPath
|
||||
clipPathUnits="userSpaceOnUse"
|
||||
id="clipPath873">
|
||||
<g
|
||||
transform="matrix(0,-0.66666667,0.66604479,0,-258.25992,677.00001)"
|
||||
id="g875"
|
||||
inkscape:label="Layer 1"
|
||||
style="display:inline;fill:#ff00ff;fill-opacity:1;stroke:none">
|
||||
<path
|
||||
style="display:inline;fill:#ff00ff;fill-opacity:1;stroke:none"
|
||||
d="M 46.702703,898.22775 H 97.297297 C 138.16216,898.22775 144,904.06497 144,944.92583 v 50.73846 c 0,40.86071 -5.83784,46.69791 -46.702703,46.69791 H 46.702703 C 5.8378378,1042.3622 0,1036.525 0,995.66429 v -50.73846 c 0,-40.86086 5.8378378,-46.69808 46.702703,-46.69808 z"
|
||||
id="path877"
|
||||
inkscape:connector-curvature="0"
|
||||
sodipodi:nodetypes="sssssssss" />
|
||||
</g>
|
||||
</clipPath>
|
||||
<style
|
||||
id="style867"
|
||||
type="text/css"><![CDATA[
|
||||
.fil0 {fill:#1F1A17}
|
||||
]]></style>
|
||||
<clipPath
|
||||
id="clipPath16">
|
||||
<path
|
||||
id="path18"
|
||||
d="M -9,-9 H 605 V 222 H -9 Z"
|
||||
inkscape:connector-curvature="0" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="clipPath116">
|
||||
<path
|
||||
id="path118"
|
||||
d="m 91.7368,146.3253 -9.7039,-1.577 -8.8548,-3.8814 -7.5206,-4.7308 -7.1566,-8.7335 -4.0431,-4.282 -3.9093,-1.4409 -1.034,2.5271 1.8079,2.6096 0.4062,3.6802 1.211,-0.0488 1.3232,-1.2069 -0.3569,3.7488 -1.4667,0.9839 0.0445,1.4286 -3.4744,-1.9655 -3.1462,-3.712 -0.6559,-3.3176 1.3453,-2.6567 1.2549,-4.5133 2.5521,-1.2084 2.6847,0.1318 2.5455,1.4791 -1.698,-8.6122 1.698,-9.5825 -1.8692,-4.4246 -6.1223,-6.5965 1.0885,-3.941 2.9002,-4.5669 5.4688,-3.8486 2.9007,-0.3969 3.225,-0.1094 -2.012,-8.2601 7.3993,-3.0326 9.2188,-1.2129 3.1535,2.0619 0.2427,5.5797 3.5178,5.8224 0.2426,4.6094 8.4909,-0.6066 7.8843,0.7279 -7.8843,-4.7307 1.3343,-5.701 4.9731,-7.763 4.8521,-2.0622 3.8814,1.5769 1.577,3.1538 8.1269,6.1861 1.5769,-1.3343 12.7363,-0.485 2.5473,2.0619 0.2426,3.6391 -0.849,1.5767 -0.6066,9.8251 -4.2454,8.4909 0.7276,3.7605 2.5475,-1.3343 7.1566,-6.6716 3.5175,-0.2424 3.8815,1.5769 3.8818,2.9109 1.9406,6.3077 11.4021,-0.7277 6.914,2.6686 5.5797,5.2157 4.0028,7.5206 0.9706,8.8546 -0.8493,10.3105 -2.1832,9.2185 -2.1836,2.9112 -3.0322,0.9706 -5.3373,-5.8224 -4.8518,-1.6982 -4.2455,7.0353 -4.2454,3.8815 -2.3049,1.4556 -9.2185,7.6419 -7.3993,4.0028 -7.3993,0.6066 -8.6119,-1.4556 -7.5206,-2.7899 -5.2158,-4.2454 -4.1241,-4.9734 -4.2454,-1.2129"
|
||||
inkscape:connector-curvature="0" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="clipPath128">
|
||||
<path
|
||||
id="path130"
|
||||
d="m 91.7368,146.3253 -9.7039,-1.577 -8.8548,-3.8814 -7.5206,-4.7308 -7.1566,-8.7335 -4.0431,-4.282 -3.9093,-1.4409 -1.034,2.5271 1.8079,2.6096 0.4062,3.6802 1.211,-0.0488 1.3232,-1.2069 -0.3569,3.7488 -1.4667,0.9839 0.0445,1.4286 -3.4744,-1.9655 -3.1462,-3.712 -0.6559,-3.3176 1.3453,-2.6567 1.2549,-4.5133 2.5521,-1.2084 2.6847,0.1318 2.5455,1.4791 -1.698,-8.6122 1.698,-9.5825 -1.8692,-4.4246 -6.1223,-6.5965 1.0885,-3.941 2.9002,-4.5669 5.4688,-3.8486 2.9007,-0.3969 3.225,-0.1094 -2.012,-8.2601 7.3993,-3.0326 9.2188,-1.2129 3.1535,2.0619 0.2427,5.5797 3.5178,5.8224 0.2426,4.6094 8.4909,-0.6066 7.8843,0.7279 -7.8843,-4.7307 1.3343,-5.701 4.9731,-7.763 4.8521,-2.0622 3.8814,1.5769 1.577,3.1538 8.1269,6.1861 1.5769,-1.3343 12.7363,-0.485 2.5473,2.0619 0.2426,3.6391 -0.849,1.5767 -0.6066,9.8251 -4.2454,8.4909 0.7276,3.7605 2.5475,-1.3343 7.1566,-6.6716 3.5175,-0.2424 3.8815,1.5769 3.8818,2.9109 1.9406,6.3077 11.4021,-0.7277 6.914,2.6686 5.5797,5.2157 4.0028,7.5206 0.9706,8.8546 -0.8493,10.3105 -2.1832,9.2185 -2.1836,2.9112 -3.0322,0.9706 -5.3373,-5.8224 -4.8518,-1.6982 -4.2455,7.0353 -4.2454,3.8815 -2.3049,1.4556 -9.2185,7.6419 -7.3993,4.0028 -7.3993,0.6066 -8.6119,-1.4556 -7.5206,-2.7899 -5.2158,-4.2454 -4.1241,-4.9734 -4.2454,-1.2129"
|
||||
inkscape:connector-curvature="0" />
|
||||
</clipPath>
|
||||
<linearGradient
|
||||
id="linearGradient3850"
|
||||
inkscape:collect="always">
|
||||
<stop
|
||||
id="stop3852"
|
||||
offset="0"
|
||||
style="stop-color:#000000;stop-opacity:1;" />
|
||||
<stop
|
||||
id="stop3854"
|
||||
offset="1"
|
||||
style="stop-color:#000000;stop-opacity:0;" />
|
||||
</linearGradient>
|
||||
<clipPath
|
||||
clipPathUnits="userSpaceOnUse"
|
||||
id="clipPath3095">
|
||||
<path
|
||||
d="M 976.648,389.551 H 134.246 V 1229.55 H 976.648 V 389.551"
|
||||
id="path3097"
|
||||
inkscape:connector-curvature="0" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
clipPathUnits="userSpaceOnUse"
|
||||
id="clipPath3195">
|
||||
<path
|
||||
d="m 611.836,756.738 -106.34,105.207 c -8.473,8.289 -13.617,20.102 -13.598,33.379 L 598.301,790.207 c -0.031,-13.418 5.094,-25.031 13.535,-33.469"
|
||||
id="path3197"
|
||||
inkscape:connector-curvature="0" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
clipPathUnits="userSpaceOnUse"
|
||||
id="clipPath3235">
|
||||
<path
|
||||
d="m 1095.64,1501.81 c 35.46,-35.07 70.89,-70.11 106.35,-105.17 4.4,-4.38 7.11,-10.53 7.11,-17.55 l -106.37,105.21 c 0,7 -2.71,13.11 -7.09,17.51"
|
||||
id="path3237"
|
||||
inkscape:connector-curvature="0" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="clipPath4591"
|
||||
clipPathUnits="userSpaceOnUse">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
d="m 1106.6009,730.43734 -0.036,21.648 c -0.01,3.50825 -2.8675,6.61375 -6.4037,6.92525 l -83.6503,7.33162 c -3.5205,0.30763 -6.3812,-2.29987 -6.3671,-5.8145 l 0.036,-21.6475 20.1171,-1.76662 -0.011,4.63775 c 0,1.83937 1.4844,3.19925 3.3262,3.0395 l 49.5274,-4.33975 c 1.8425,-0.166 3.3425,-1.78125 3.3538,-3.626 l 0.01,-4.63025 20.1,-1.7575"
|
||||
style="fill:#ff00ff;fill-opacity:1;fill-rule:nonzero;stroke:none"
|
||||
id="path4593" />
|
||||
</clipPath>
|
||||
<radialGradient
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="matrix(-1.4333926,-2.2742838,1.1731823,-0.73941125,-174.08025,98.374394)"
|
||||
r="20.40658"
|
||||
fy="93.399292"
|
||||
fx="-26.508606"
|
||||
cy="93.399292"
|
||||
cx="-26.508606"
|
||||
id="radialGradient3856"
|
||||
xlink:href="#linearGradient3850"
|
||||
inkscape:collect="always" />
|
||||
<linearGradient
|
||||
gradientTransform="translate(-318.48033,212.32022)"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
y2="993.19702"
|
||||
x2="-51.879555"
|
||||
y1="593.11615"
|
||||
x1="348.20132"
|
||||
id="linearGradient3895"
|
||||
xlink:href="#linearGradient3850"
|
||||
inkscape:collect="always" />
|
||||
<clipPath
|
||||
id="clipPath3906"
|
||||
clipPathUnits="userSpaceOnUse">
|
||||
<rect
|
||||
transform="scale(1,-1)"
|
||||
style="color:#000000;display:inline;overflow:visible;visibility:visible;opacity:0.8;fill:#ff00ff;stroke:none;stroke-width:4;marker:none;enable-background:accumulate"
|
||||
id="rect3908"
|
||||
width="1019.1371"
|
||||
height="1019.1371"
|
||||
x="357.9816"
|
||||
y="-1725.8152" />
|
||||
</clipPath>
|
||||
</defs>
|
||||
<sodipodi:namedview
|
||||
id="base"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:zoom="4.074536"
|
||||
inkscape:cx="-140.43595"
|
||||
inkscape:cy="32.11876"
|
||||
inkscape:document-units="px"
|
||||
inkscape:current-layer="layer1"
|
||||
showgrid="true"
|
||||
fit-margin-top="0"
|
||||
fit-margin-left="0"
|
||||
fit-margin-right="0"
|
||||
fit-margin-bottom="0"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1029"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="24"
|
||||
inkscape:window-maximized="1"
|
||||
showborder="true"
|
||||
showguides="false"
|
||||
inkscape:guide-bbox="true"
|
||||
inkscape:showpageshadow="false"
|
||||
inkscape:snap-global="false"
|
||||
inkscape:snap-bbox="true"
|
||||
inkscape:bbox-paths="true"
|
||||
inkscape:bbox-nodes="true"
|
||||
inkscape:snap-bbox-edge-midpoints="true"
|
||||
inkscape:snap-bbox-midpoints="true"
|
||||
inkscape:object-paths="true"
|
||||
inkscape:snap-intersection-paths="true"
|
||||
inkscape:object-nodes="true"
|
||||
inkscape:snap-smooth-nodes="true"
|
||||
inkscape:snap-midpoints="true"
|
||||
inkscape:snap-object-midpoints="true"
|
||||
inkscape:snap-center="true"
|
||||
inkscape:snap-nodes="true"
|
||||
inkscape:snap-others="true"
|
||||
inkscape:snap-page="true">
|
||||
<inkscape:grid
|
||||
type="xygrid"
|
||||
id="grid821" />
|
||||
<sodipodi:guide
|
||||
orientation="1,0"
|
||||
position="16,48"
|
||||
id="guide823"
|
||||
inkscape:locked="false" />
|
||||
<sodipodi:guide
|
||||
orientation="0,1"
|
||||
position="64,80"
|
||||
id="guide825"
|
||||
inkscape:locked="false" />
|
||||
<sodipodi:guide
|
||||
orientation="1,0"
|
||||
position="80,40"
|
||||
id="guide827"
|
||||
inkscape:locked="false" />
|
||||
<sodipodi:guide
|
||||
orientation="0,1"
|
||||
position="64,16"
|
||||
id="guide829"
|
||||
inkscape:locked="false" />
|
||||
</sodipodi:namedview>
|
||||
<metadata
|
||||
id="metadata6522">
|
||||
<rdf:RDF>
|
||||
<cc:Work
|
||||
rdf:about="">
|
||||
<dc:format>image/svg+xml</dc:format>
|
||||
<dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||
<dc:title />
|
||||
</cc:Work>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
<g
|
||||
inkscape:label="BACKGROUND"
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1"
|
||||
transform="translate(268,-635.29076)"
|
||||
style="display:inline">
|
||||
<path
|
||||
style="display:inline;fill:#ffffff;fill-opacity:1;stroke:none"
|
||||
d="M 48 0 A 48 48 0 0 0 0 48 A 48 48 0 0 0 48 96 A 48 48 0 0 0 96 48 A 48 48 0 0 0 48 0 z "
|
||||
id="path6455"
|
||||
transform="translate(-268,635.29076)" />
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
style="display:inline;fill:#53a2da;fill-opacity:1;stroke:none"
|
||||
d="m -220,635.29076 a 48,48 0 0 0 -48,48 48,48 0 0 0 48,48 48,48 0 0 0 48,-48 48,48 0 0 0 -48,-48 z"
|
||||
id="path6455-3" />
|
||||
<path
|
||||
style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;direction:ltr;block-progression:tb;writing-mode:lr-tb;baseline-shift:baseline;text-anchor:start;white-space:normal;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:19.79999924;stroke-linecap:round;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate"
|
||||
d="m -210.73109,647.92331 c -12.45421,0 -22.59379,10.13959 -22.59379,22.59379 l 0,7.0975 0,3.7238 a 0.6425535,0.6425535 0 0 0 0.6425,0.64249 l 6.36089,0 a 0.6425535,0.6425535 0 0 0 0.64248,-0.64249 l 0,-3.7238 0,-7.0975 c 0,-8.35215 6.59577,-14.94791 14.94792,-14.94791 l 4.27719,0 a 0.6425535,0.6425535 0 0 0 0.6425,-0.64249 l 0,-6.36089 a 0.6425535,0.6425535 0 0 0 -0.6425,-0.6425 l -4.27719,0 z m 0.34885,10.87777 c -6.4289,0 -11.73385,5.15219 -11.98518,11.52529 a 0.6425535,0.6425535 0 0 0 0.64186,0.66757 l 6.37094,0 a 0.6425535,0.6425535 0 0 0 0.63997,-0.58915 c 0.18997,-2.28704 1.97407,-3.95846 4.33241,-3.95846 l 3.92834,0 a 0.6425535,0.6425535 0 0 0 0.6425,-0.64249 l 0,-6.36027 a 0.6425535,0.6425535 0 0 0 -0.6425,-0.64249 l -3.92834,0 z m -29.65426,13.29841 a 0.6425535,0.6425535 0 0 0 -0.63309,0.53142 l -1.12436,6.4249 a 0.6425535,0.6425535 0 0 0 0.63309,0.75354 l 6.09674,0 a 0.6425535,0.6425535 0 0 0 0.64249,-0.64249 l 0,-6.42488 a 0.6425535,0.6425535 0 0 0 -0.64249,-0.64249 l -4.97238,0 z m 16.17454,0 a 0.6425535,0.6425535 0 0 0 -0.6425,0.64249 l 0,6.42488 a 0.6425535,0.6425535 0 0 0 0.6425,0.64249 l 17.82593,0 a 0.6425535,0.6425535 0 0 0 0.63309,-0.53144 l 1.12434,-6.42488 a 0.6425535,0.6425535 0 0 0 -0.63307,-0.75354 l -18.95029,0 z m 2.1251,8.81164 a 0.6425535,0.6425535 0 0 0 -0.64248,0.64248 l 0,24.03939 c 0,4.13118 -1.61958,7.82464 -4.26277,10.51198 a 0.6425535,0.6425535 0 0 0 0.004,0.90475 l 4.4604,4.46041 a 0.6425535,0.6425535 0 0 0 0.91103,-0.003 c 4.03527,-4.08214 6.53344,-9.69461 6.53344,-15.87463 l 0,-24.0394 a 0.6425535,0.6425535 0 0 0 -0.6425,-0.64248 l -6.36089,0 z m -20.22711,2.14266 a 0.6425535,0.6425535 0 0 0 -0.63309,0.53144 l -1.12434,6.4249 a 0.6425535,0.6425535 0 0 0 0.63307,0.75355 l 18.94653,0 a 0.6425535,0.6425535 0 0 0 0.64249,-0.6425 l 0,-6.42489 a 0.6425535,0.6425535 0 0 0 -0.64249,-0.6425 l -17.82217,0 z m 29.02432,0 a 0.6425535,0.6425535 0 0 0 -0.64248,0.6425 l 0,6.42489 a 0.6425535,0.6425535 0 0 0 0.64248,0.6425 l 4.97615,0 a 0.6425535,0.6425535 0 0 0 0.63309,-0.53144 l 1.12436,-6.4249 a 0.6425535,0.6425535 0 0 0 -0.63309,-0.75355 l -6.10051,0 z m -19.74273,8.84365 a 0.6425535,0.6425535 0 0 0 -0.6425,0.64249 l 0,12.77198 c 0,1.18698 -0.43422,2.21835 -1.14883,2.9872 a 0.6425535,0.6425535 0 0 0 0.0163,0.89157 l 4.46355,4.46355 a 0.6425535,0.6425535 0 0 0 0.91542,-0.007 c 2.09711,-2.162 3.39942,-5.10779 3.39942,-8.33542 l 0,-12.77198 a 0.6425535,0.6425535 0 0 0 -0.64249,-0.64249 l -6.36089,0 z"
|
||||
id="rect4219"
|
||||
inkscape:connector-curvature="0" />
|
||||
</g>
|
||||
<g
|
||||
inkscape:groupmode="layer"
|
||||
id="layer3"
|
||||
inkscape:label="PLACE YOUR PICTOGRAM HERE"
|
||||
style="display:inline">
|
||||
<g
|
||||
id="g4185" />
|
||||
</g>
|
||||
<style
|
||||
id="style4217"
|
||||
type="text/css">
|
||||
.st0{fill:#419EDA;}
|
||||
</style>
|
||||
<style
|
||||
id="style4285"
|
||||
type="text/css">
|
||||
.st0{clip-path:url(#SVGID_2_);fill:#EFBF1B;}
|
||||
.st1{clip-path:url(#SVGID_2_);fill:#40BEB0;}
|
||||
.st2{clip-path:url(#SVGID_2_);fill:#0AA5DE;}
|
||||
.st3{clip-path:url(#SVGID_2_);fill:#231F20;}
|
||||
.st4{fill:#D7A229;}
|
||||
.st5{fill:#009B8F;}
|
||||
</style>
|
||||
<style
|
||||
id="style4240"
|
||||
type="text/css">
|
||||
.st0{fill:#E8478B;}
|
||||
.st1{fill:#40BEB0;}
|
||||
.st2{fill:#37A595;}
|
||||
.st3{fill:#231F20;}
|
||||
</style>
|
||||
<style
|
||||
id="style4812"
|
||||
type="text/css">
|
||||
.st0{fill:#0AA5DE;}
|
||||
.st1{fill:#40BEB0;}
|
||||
.st2{opacity:0.26;fill:#353535;}
|
||||
.st3{fill:#231F20;}
|
||||
</style>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 16 KiB |
|
|
@ -0,0 +1,26 @@
|
|||
"includes":
|
||||
- "layer:options"
|
||||
- "layer:basic"
|
||||
- "interface:nrpe-external-master"
|
||||
- "interface:etcd"
|
||||
- "interface:kubernetes-cni"
|
||||
- "layer:debug"
|
||||
- "layer:nagios"
|
||||
- "layer:status"
|
||||
- "layer:kubernetes-common"
|
||||
"exclude": [".travis.yml", "tests", "tox.ini", "test-requirements.txt", "unit_tests"]
|
||||
"options":
|
||||
"basic":
|
||||
"use_venv": !!bool "true"
|
||||
"packages":
|
||||
- "net-tools"
|
||||
"python_packages": []
|
||||
"include_system_packages": !!bool "false"
|
||||
"debug": {}
|
||||
"nagios": {}
|
||||
"status":
|
||||
"patch-hookenv": !!bool "true"
|
||||
"kubernetes-common": {}
|
||||
"flannel": {}
|
||||
"repo": "https://github.com/juju-solutions/charm-flannel.git"
|
||||
"is": "flannel"
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
from time import sleep
|
||||
|
||||
|
||||
def retry(times, delay_secs):
|
||||
""" Decorator for retrying a method call.
|
||||
Args:
|
||||
times: How many times should we retry before giving up
|
||||
delay_secs: Delay in secs
|
||||
Returns: A callable that would return the last call outcome
|
||||
"""
|
||||
|
||||
def retry_decorator(func):
|
||||
""" Decorator to wrap the function provided.
|
||||
Args:
|
||||
func: Provided function should return either True od False
|
||||
Returns: A callable that would return the last call outcome
|
||||
"""
|
||||
def _wrapped(*args, **kwargs):
|
||||
res = func(*args, **kwargs)
|
||||
attempt = 0
|
||||
while not res and attempt < times:
|
||||
sleep(delay_secs)
|
||||
res = func(*args, **kwargs)
|
||||
if res:
|
||||
break
|
||||
attempt += 1
|
||||
return res
|
||||
return _wrapped
|
||||
|
||||
return retry_decorator
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
import sys
|
||||
from importlib import import_module
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def import_layer_libs():
|
||||
"""
|
||||
Ensure that all layer libraries are imported.
|
||||
|
||||
This makes it possible to do the following:
|
||||
|
||||
from charms import layer
|
||||
|
||||
layer.foo.do_foo_thing()
|
||||
|
||||
Note: This function must be called after bootstrap.
|
||||
"""
|
||||
for module_file in Path('lib/charms/layer').glob('*'):
|
||||
module_name = module_file.stem
|
||||
if module_name in ('__init__', 'basic', 'execd') or not (
|
||||
module_file.suffix == '.py' or module_file.is_dir()
|
||||
):
|
||||
continue
|
||||
import_module('charms.layer.{}'.format(module_name))
|
||||
|
||||
|
||||
# Terrible hack to support the old terrible interface.
|
||||
# Try to get people to call layer.options.get() instead so
|
||||
# that we can remove this garbage.
|
||||
# Cribbed from https://stackoverfLow.com/a/48100440/4941864
|
||||
class OptionsBackwardsCompatibilityHack(sys.modules[__name__].__class__):
|
||||
def __call__(self, section=None, layer_file=None):
|
||||
if layer_file is None:
|
||||
return self.get(section=section)
|
||||
else:
|
||||
return self.get(section=section,
|
||||
layer_file=Path(layer_file))
|
||||
|
||||
|
||||
def patch_options_interface():
|
||||
from charms.layer import options
|
||||
if sys.version_info.minor >= 5:
|
||||
options.__class__ = OptionsBackwardsCompatibilityHack
|
||||
else:
|
||||
# Py 3.4 doesn't support changing the __class__, so we have to do it
|
||||
# another way. The last line is needed because we already have a
|
||||
# reference that doesn't get updated with sys.modules.
|
||||
name = options.__name__
|
||||
hack = OptionsBackwardsCompatibilityHack(name)
|
||||
hack.get = options.get
|
||||
sys.modules[name] = hack
|
||||
sys.modules[__name__].options = hack
|
||||
|
||||
|
||||
try:
|
||||
patch_options_interface()
|
||||
except ImportError:
|
||||
# This may fail if pyyaml hasn't been installed yet. But in that
|
||||
# case, the bootstrap logic will try it again once it has.
|
||||
pass
|
||||
|
|
@ -0,0 +1,501 @@
|
|||
import os
|
||||
import sys
|
||||
import re
|
||||
import shutil
|
||||
from distutils.version import LooseVersion
|
||||
from pkg_resources import Requirement
|
||||
from glob import glob
|
||||
from subprocess import check_call, check_output, CalledProcessError
|
||||
from time import sleep
|
||||
|
||||
from charms import layer
|
||||
from charms.layer.execd import execd_preinstall
|
||||
|
||||
|
||||
def _get_subprocess_env():
|
||||
env = os.environ.copy()
|
||||
env['LANG'] = env.get('LANG', 'C.UTF-8')
|
||||
return env
|
||||
|
||||
|
||||
def get_series():
|
||||
"""
|
||||
Return series for a few known OS:es.
|
||||
Tested as of 2019 november:
|
||||
* centos6, centos7, rhel6.
|
||||
* bionic
|
||||
"""
|
||||
series = ""
|
||||
|
||||
# Looking for content in /etc/os-release
|
||||
# works for ubuntu + some centos
|
||||
if os.path.isfile('/etc/os-release'):
|
||||
d = {}
|
||||
with open('/etc/os-release', 'r') as rel:
|
||||
for l in rel:
|
||||
if not re.match(r'^\s*$', l):
|
||||
k, v = l.split('=')
|
||||
d[k.strip()] = v.strip().replace('"', '')
|
||||
series = "{ID}{VERSION_ID}".format(**d)
|
||||
|
||||
# Looking for content in /etc/redhat-release
|
||||
# works for redhat enterprise systems
|
||||
elif os.path.isfile('/etc/redhat-release'):
|
||||
with open('/etc/redhat-release', 'r') as redhatlsb:
|
||||
# CentOS Linux release 7.7.1908 (Core)
|
||||
line = redhatlsb.readline()
|
||||
release = int(line.split("release")[1].split()[0][0])
|
||||
series = "centos" + str(release)
|
||||
|
||||
# Looking for content in /etc/lsb-release
|
||||
# works for ubuntu
|
||||
elif os.path.isfile('/etc/lsb-release'):
|
||||
d = {}
|
||||
with open('/etc/lsb-release', 'r') as lsb:
|
||||
for l in lsb:
|
||||
k, v = l.split('=')
|
||||
d[k.strip()] = v.strip()
|
||||
series = d['DISTRIB_CODENAME']
|
||||
|
||||
# This is what happens if we cant figure out the OS.
|
||||
else:
|
||||
series = "unknown"
|
||||
return series
|
||||
|
||||
|
||||
def bootstrap_charm_deps():
|
||||
"""
|
||||
Set up the base charm dependencies so that the reactive system can run.
|
||||
"""
|
||||
# execd must happen first, before any attempt to install packages or
|
||||
# access the network, because sites use this hook to do bespoke
|
||||
# configuration and install secrets so the rest of this bootstrap
|
||||
# and the charm itself can actually succeed. This call does nothing
|
||||
# unless the operator has created and populated $JUJU_CHARM_DIR/exec.d.
|
||||
execd_preinstall()
|
||||
# ensure that $JUJU_CHARM_DIR/bin is on the path, for helper scripts
|
||||
|
||||
series = get_series()
|
||||
|
||||
# OMG?! is build-essentials needed?
|
||||
ubuntu_packages = ['python3-pip',
|
||||
'python3-setuptools',
|
||||
'python3-yaml',
|
||||
'python3-dev',
|
||||
'python3-wheel',
|
||||
'build-essential']
|
||||
|
||||
# I'm not going to "yum group info "Development Tools"
|
||||
# omitting above madness
|
||||
centos_packages = ['python3-pip',
|
||||
'python3-setuptools',
|
||||
'python3-devel',
|
||||
'python3-wheel']
|
||||
|
||||
packages_needed = []
|
||||
if 'centos' in series:
|
||||
packages_needed = centos_packages
|
||||
else:
|
||||
packages_needed = ubuntu_packages
|
||||
|
||||
charm_dir = os.environ['JUJU_CHARM_DIR']
|
||||
os.environ['PATH'] += ':%s' % os.path.join(charm_dir, 'bin')
|
||||
venv = os.path.abspath('../.venv')
|
||||
vbin = os.path.join(venv, 'bin')
|
||||
vpip = os.path.join(vbin, 'pip')
|
||||
vpy = os.path.join(vbin, 'python')
|
||||
hook_name = os.path.basename(sys.argv[0])
|
||||
is_bootstrapped = os.path.exists('wheelhouse/.bootstrapped')
|
||||
is_charm_upgrade = hook_name == 'upgrade-charm'
|
||||
is_series_upgrade = hook_name == 'post-series-upgrade'
|
||||
is_post_upgrade = os.path.exists('wheelhouse/.upgraded')
|
||||
is_upgrade = (not is_post_upgrade and
|
||||
(is_charm_upgrade or is_series_upgrade))
|
||||
if is_bootstrapped and not is_upgrade:
|
||||
# older subordinates might have downgraded charm-env, so we should
|
||||
# restore it if necessary
|
||||
install_or_update_charm_env()
|
||||
activate_venv()
|
||||
# the .upgrade file prevents us from getting stuck in a loop
|
||||
# when re-execing to activate the venv; at this point, we've
|
||||
# activated the venv, so it's safe to clear it
|
||||
if is_post_upgrade:
|
||||
os.unlink('wheelhouse/.upgraded')
|
||||
return
|
||||
if os.path.exists(venv):
|
||||
try:
|
||||
# focal installs or upgrades prior to PR 160 could leave the venv
|
||||
# in a broken state which would prevent subsequent charm upgrades
|
||||
_load_installed_versions(vpip)
|
||||
except CalledProcessError:
|
||||
is_broken_venv = True
|
||||
else:
|
||||
is_broken_venv = False
|
||||
if is_upgrade or is_broken_venv:
|
||||
# All upgrades should do a full clear of the venv, rather than
|
||||
# just updating it, to bring in updates to Python itself
|
||||
shutil.rmtree(venv)
|
||||
if is_upgrade:
|
||||
if os.path.exists('wheelhouse/.bootstrapped'):
|
||||
os.unlink('wheelhouse/.bootstrapped')
|
||||
# bootstrap wheelhouse
|
||||
if os.path.exists('wheelhouse'):
|
||||
pre_eoan = series in ('ubuntu12.04', 'precise',
|
||||
'ubuntu14.04', 'trusty',
|
||||
'ubuntu16.04', 'xenial',
|
||||
'ubuntu18.04', 'bionic')
|
||||
pydistutils_lines = [
|
||||
"[easy_install]\n",
|
||||
"find_links = file://{}/wheelhouse/\n".format(charm_dir),
|
||||
"no_index=True\n",
|
||||
"index_url=\n", # deliberately nothing here; disables it.
|
||||
]
|
||||
if pre_eoan:
|
||||
pydistutils_lines.append("allow_hosts = ''\n")
|
||||
with open('/root/.pydistutils.cfg', 'w') as fp:
|
||||
# make sure that easy_install also only uses the wheelhouse
|
||||
# (see https://github.com/pypa/pip/issues/410)
|
||||
fp.writelines(pydistutils_lines)
|
||||
if 'centos' in series:
|
||||
yum_install(packages_needed)
|
||||
else:
|
||||
apt_install(packages_needed)
|
||||
from charms.layer import options
|
||||
cfg = options.get('basic')
|
||||
# include packages defined in layer.yaml
|
||||
if 'centos' in series:
|
||||
yum_install(cfg.get('packages', []))
|
||||
else:
|
||||
apt_install(cfg.get('packages', []))
|
||||
# if we're using a venv, set it up
|
||||
if cfg.get('use_venv'):
|
||||
if not os.path.exists(venv):
|
||||
series = get_series()
|
||||
if series in ('ubuntu12.04', 'precise',
|
||||
'ubuntu14.04', 'trusty'):
|
||||
apt_install(['python-virtualenv'])
|
||||
elif 'centos' in series:
|
||||
yum_install(['python-virtualenv'])
|
||||
else:
|
||||
apt_install(['virtualenv'])
|
||||
cmd = ['virtualenv', '-ppython3', '--never-download', venv]
|
||||
if cfg.get('include_system_packages'):
|
||||
cmd.append('--system-site-packages')
|
||||
check_call(cmd, env=_get_subprocess_env())
|
||||
os.environ['PATH'] = ':'.join([vbin, os.environ['PATH']])
|
||||
pip = vpip
|
||||
else:
|
||||
pip = 'pip3'
|
||||
# save a copy of system pip to prevent `pip3 install -U pip`
|
||||
# from changing it
|
||||
if os.path.exists('/usr/bin/pip'):
|
||||
shutil.copy2('/usr/bin/pip', '/usr/bin/pip.save')
|
||||
pre_install_pkgs = ['pip', 'setuptools', 'setuptools-scm']
|
||||
# we bundle these packages to work around bugs in older versions (such
|
||||
# as https://github.com/pypa/pip/issues/56), but if the system already
|
||||
# provided a newer version, downgrading it can cause other problems
|
||||
_update_if_newer(pip, pre_install_pkgs)
|
||||
# install the rest of the wheelhouse deps (extract the pkg names into
|
||||
# a set so that we can ignore the pre-install packages and let pip
|
||||
# choose the best version in case there are multiple from layer
|
||||
# conflicts)
|
||||
_versions = _load_wheelhouse_versions()
|
||||
_pkgs = _versions.keys() - set(pre_install_pkgs)
|
||||
# add back the versions such that each package in pkgs is
|
||||
# <package_name>==<version>.
|
||||
# This ensures that pip 20.3.4+ will install the packages from the
|
||||
# wheelhouse without (erroneously) flagging an error.
|
||||
pkgs = _add_back_versions(_pkgs, _versions)
|
||||
reinstall_flag = '--force-reinstall'
|
||||
if not cfg.get('use_venv', True) and pre_eoan:
|
||||
reinstall_flag = '--ignore-installed'
|
||||
check_call([pip, 'install', '-U', reinstall_flag, '--no-index',
|
||||
'--no-cache-dir', '-f', 'wheelhouse'] + list(pkgs),
|
||||
env=_get_subprocess_env())
|
||||
# re-enable installation from pypi
|
||||
os.remove('/root/.pydistutils.cfg')
|
||||
|
||||
# install pyyaml for centos7, since, unlike the ubuntu image, the
|
||||
# default image for centos doesn't include pyyaml; see the discussion:
|
||||
# https://discourse.jujucharms.com/t/charms-for-centos-lets-begin
|
||||
if 'centos' in series:
|
||||
check_call([pip, 'install', '-U', 'pyyaml'],
|
||||
env=_get_subprocess_env())
|
||||
|
||||
# install python packages from layer options
|
||||
if cfg.get('python_packages'):
|
||||
check_call([pip, 'install', '-U'] + cfg.get('python_packages'),
|
||||
env=_get_subprocess_env())
|
||||
if not cfg.get('use_venv'):
|
||||
# restore system pip to prevent `pip3 install -U pip`
|
||||
# from changing it
|
||||
if os.path.exists('/usr/bin/pip.save'):
|
||||
shutil.copy2('/usr/bin/pip.save', '/usr/bin/pip')
|
||||
os.remove('/usr/bin/pip.save')
|
||||
# setup wrappers to ensure envs are used for scripts
|
||||
install_or_update_charm_env()
|
||||
for wrapper in ('charms.reactive', 'charms.reactive.sh',
|
||||
'chlp', 'layer_option'):
|
||||
src = os.path.join('/usr/local/sbin', 'charm-env')
|
||||
dst = os.path.join('/usr/local/sbin', wrapper)
|
||||
if not os.path.exists(dst):
|
||||
os.symlink(src, dst)
|
||||
if cfg.get('use_venv'):
|
||||
shutil.copy2('bin/layer_option', vbin)
|
||||
else:
|
||||
shutil.copy2('bin/layer_option', '/usr/local/bin/')
|
||||
# re-link the charm copy to the wrapper in case charms
|
||||
# call bin/layer_option directly (as was the old pattern)
|
||||
os.remove('bin/layer_option')
|
||||
os.symlink('/usr/local/sbin/layer_option', 'bin/layer_option')
|
||||
# flag us as having already bootstrapped so we don't do it again
|
||||
open('wheelhouse/.bootstrapped', 'w').close()
|
||||
if is_upgrade:
|
||||
# flag us as having already upgraded so we don't do it again
|
||||
open('wheelhouse/.upgraded', 'w').close()
|
||||
# Ensure that the newly bootstrapped libs are available.
|
||||
# Note: this only seems to be an issue with namespace packages.
|
||||
# Non-namespace-package libs (e.g., charmhelpers) are available
|
||||
# without having to reload the interpreter. :/
|
||||
reload_interpreter(vpy if cfg.get('use_venv') else sys.argv[0])
|
||||
|
||||
|
||||
def _load_installed_versions(pip):
|
||||
pip_freeze = check_output([pip, 'freeze']).decode('utf8')
|
||||
versions = {}
|
||||
for pkg_ver in pip_freeze.splitlines():
|
||||
try:
|
||||
req = Requirement.parse(pkg_ver)
|
||||
except ValueError:
|
||||
continue
|
||||
versions.update({
|
||||
req.project_name: LooseVersion(ver)
|
||||
for op, ver in req.specs if op == '=='
|
||||
})
|
||||
return versions
|
||||
|
||||
|
||||
def _load_wheelhouse_versions():
|
||||
versions = {}
|
||||
for wheel in glob('wheelhouse/*'):
|
||||
pkg, ver = os.path.basename(wheel).rsplit('-', 1)
|
||||
# nb: LooseVersion ignores the file extension
|
||||
versions[pkg.replace('_', '-')] = LooseVersion(ver)
|
||||
return versions
|
||||
|
||||
|
||||
def _add_back_versions(pkgs, versions):
|
||||
"""Add back the version strings to each of the packages.
|
||||
|
||||
The versions are LooseVersion() from _load_wheelhouse_versions(). This
|
||||
function strips the ".zip" or ".tar.gz" from the end of the version string
|
||||
and adds it back to the package in the form of <package_name>==<version>
|
||||
|
||||
If a package name is not a key in the versions dictionary, then it is
|
||||
returned in the list unchanged.
|
||||
|
||||
:param pkgs: A list of package names
|
||||
:type pkgs: List[str]
|
||||
:param versions: A map of package to LooseVersion
|
||||
:type versions: Dict[str, LooseVersion]
|
||||
:returns: A list of (maybe) versioned packages
|
||||
:rtype: List[str]
|
||||
"""
|
||||
def _strip_ext(s):
|
||||
"""Strip an extension (if it exists) from the string
|
||||
|
||||
:param s: the string to strip an extension off if it exists
|
||||
:type s: str
|
||||
:returns: string without an extension of .zip or .tar.gz
|
||||
:rtype: str
|
||||
"""
|
||||
for ending in [".zip", ".tar.gz"]:
|
||||
if s.endswith(ending):
|
||||
return s[:-len(ending)]
|
||||
return s
|
||||
|
||||
def _maybe_add_version(pkg):
|
||||
"""Maybe add back the version number to a package if it exists.
|
||||
|
||||
Adds the version number, if the package exists in the lexically
|
||||
captured `versions` dictionary, in the form <pkg>==<version>. Strips
|
||||
the extension if it exists.
|
||||
|
||||
:param pkg: the package name to (maybe) add the version number to.
|
||||
:type pkg: str
|
||||
"""
|
||||
try:
|
||||
return "{}=={}".format(pkg, _strip_ext(str(versions[pkg])))
|
||||
except KeyError:
|
||||
pass
|
||||
return pkg
|
||||
|
||||
return [_maybe_add_version(pkg) for pkg in pkgs]
|
||||
|
||||
|
||||
def _update_if_newer(pip, pkgs):
|
||||
installed = _load_installed_versions(pip)
|
||||
wheelhouse = _load_wheelhouse_versions()
|
||||
for pkg in pkgs:
|
||||
if pkg not in installed or wheelhouse[pkg] > installed[pkg]:
|
||||
check_call([pip, 'install', '-U', '--no-index', '-f', 'wheelhouse',
|
||||
pkg], env=_get_subprocess_env())
|
||||
|
||||
|
||||
def install_or_update_charm_env():
|
||||
# On Trusty python3-pkg-resources is not installed
|
||||
try:
|
||||
from pkg_resources import parse_version
|
||||
except ImportError:
|
||||
apt_install(['python3-pkg-resources'])
|
||||
from pkg_resources import parse_version
|
||||
|
||||
try:
|
||||
installed_version = parse_version(
|
||||
check_output(['/usr/local/sbin/charm-env',
|
||||
'--version']).decode('utf8'))
|
||||
except (CalledProcessError, FileNotFoundError):
|
||||
installed_version = parse_version('0.0.0')
|
||||
try:
|
||||
bundled_version = parse_version(
|
||||
check_output(['bin/charm-env',
|
||||
'--version']).decode('utf8'))
|
||||
except (CalledProcessError, FileNotFoundError):
|
||||
bundled_version = parse_version('0.0.0')
|
||||
if installed_version < bundled_version:
|
||||
shutil.copy2('bin/charm-env', '/usr/local/sbin/')
|
||||
|
||||
|
||||
def activate_venv():
|
||||
"""
|
||||
Activate the venv if enabled in ``layer.yaml``.
|
||||
|
||||
This is handled automatically for normal hooks, but actions might
|
||||
need to invoke this manually, using something like:
|
||||
|
||||
# Load modules from $JUJU_CHARM_DIR/lib
|
||||
import sys
|
||||
sys.path.append('lib')
|
||||
|
||||
from charms.layer.basic import activate_venv
|
||||
activate_venv()
|
||||
|
||||
This will ensure that modules installed in the charm's
|
||||
virtual environment are available to the action.
|
||||
"""
|
||||
from charms.layer import options
|
||||
venv = os.path.abspath('../.venv')
|
||||
vbin = os.path.join(venv, 'bin')
|
||||
vpy = os.path.join(vbin, 'python')
|
||||
use_venv = options.get('basic', 'use_venv')
|
||||
if use_venv and '.venv' not in sys.executable:
|
||||
# activate the venv
|
||||
os.environ['PATH'] = ':'.join([vbin, os.environ['PATH']])
|
||||
reload_interpreter(vpy)
|
||||
layer.patch_options_interface()
|
||||
layer.import_layer_libs()
|
||||
|
||||
|
||||
def reload_interpreter(python):
|
||||
"""
|
||||
Reload the python interpreter to ensure that all deps are available.
|
||||
|
||||
Newly installed modules in namespace packages sometimes seemt to
|
||||
not be picked up by Python 3.
|
||||
"""
|
||||
os.execve(python, [python] + list(sys.argv), os.environ)
|
||||
|
||||
|
||||
def apt_install(packages):
|
||||
"""
|
||||
Install apt packages.
|
||||
|
||||
This ensures a consistent set of options that are often missed but
|
||||
should really be set.
|
||||
"""
|
||||
if isinstance(packages, (str, bytes)):
|
||||
packages = [packages]
|
||||
|
||||
env = _get_subprocess_env()
|
||||
|
||||
if 'DEBIAN_FRONTEND' not in env:
|
||||
env['DEBIAN_FRONTEND'] = 'noninteractive'
|
||||
|
||||
cmd = ['apt-get',
|
||||
'--option=Dpkg::Options::=--force-confold',
|
||||
'--assume-yes',
|
||||
'install']
|
||||
for attempt in range(3):
|
||||
try:
|
||||
check_call(cmd + packages, env=env)
|
||||
except CalledProcessError:
|
||||
if attempt == 2: # third attempt
|
||||
raise
|
||||
try:
|
||||
# sometimes apt-get update needs to be run
|
||||
check_call(['apt-get', 'update'], env=env)
|
||||
except CalledProcessError:
|
||||
# sometimes it's a dpkg lock issue
|
||||
pass
|
||||
sleep(5)
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
def yum_install(packages):
|
||||
""" Installs packages with yum.
|
||||
This function largely mimics the apt_install function for consistency.
|
||||
"""
|
||||
if packages:
|
||||
env = os.environ.copy()
|
||||
cmd = ['yum', '-y', 'install']
|
||||
for attempt in range(3):
|
||||
try:
|
||||
check_call(cmd + packages, env=env)
|
||||
except CalledProcessError:
|
||||
if attempt == 2:
|
||||
raise
|
||||
try:
|
||||
check_call(['yum', 'update'], env=env)
|
||||
except CalledProcessError:
|
||||
pass
|
||||
sleep(5)
|
||||
else:
|
||||
break
|
||||
else:
|
||||
pass
|
||||
|
||||
|
||||
def init_config_states():
|
||||
import yaml
|
||||
from charmhelpers.core import hookenv
|
||||
from charms.reactive import set_state
|
||||
from charms.reactive import toggle_state
|
||||
config = hookenv.config()
|
||||
config_defaults = {}
|
||||
config_defs = {}
|
||||
config_yaml = os.path.join(hookenv.charm_dir(), 'config.yaml')
|
||||
if os.path.exists(config_yaml):
|
||||
with open(config_yaml) as fp:
|
||||
config_defs = yaml.safe_load(fp).get('options', {})
|
||||
config_defaults = {key: value.get('default')
|
||||
for key, value in config_defs.items()}
|
||||
for opt in config_defs.keys():
|
||||
if config.changed(opt):
|
||||
set_state('config.changed')
|
||||
set_state('config.changed.{}'.format(opt))
|
||||
toggle_state('config.set.{}'.format(opt), config.get(opt))
|
||||
toggle_state('config.default.{}'.format(opt),
|
||||
config.get(opt) == config_defaults[opt])
|
||||
|
||||
|
||||
def clear_config_states():
|
||||
from charmhelpers.core import hookenv, unitdata
|
||||
from charms.reactive import remove_state
|
||||
config = hookenv.config()
|
||||
remove_state('config.changed')
|
||||
for opt in config.keys():
|
||||
remove_state('config.changed.{}'.format(opt))
|
||||
remove_state('config.set.{}'.format(opt))
|
||||
remove_state('config.default.{}'.format(opt))
|
||||
unitdata.kv().flush()
|
||||
|
|
@ -0,0 +1,114 @@
|
|||
# Copyright 2014-2016 Canonical Limited.
|
||||
#
|
||||
# This file is part of layer-basic, the reactive base layer for Juju.
|
||||
#
|
||||
# charm-helpers is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License version 3 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# charm-helpers is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with charm-helpers. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# This module may only import from the Python standard library.
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
'''
|
||||
execd/preinstall
|
||||
|
||||
Read the layer-basic docs for more info on how to use this feature.
|
||||
https://charmsreactive.readthedocs.io/en/latest/layer-basic.html#exec-d-support
|
||||
'''
|
||||
|
||||
|
||||
def default_execd_dir():
|
||||
return os.path.join(os.environ['JUJU_CHARM_DIR'], 'exec.d')
|
||||
|
||||
|
||||
def execd_module_paths(execd_dir=None):
|
||||
"""Generate a list of full paths to modules within execd_dir."""
|
||||
if not execd_dir:
|
||||
execd_dir = default_execd_dir()
|
||||
|
||||
if not os.path.exists(execd_dir):
|
||||
return
|
||||
|
||||
for subpath in os.listdir(execd_dir):
|
||||
module = os.path.join(execd_dir, subpath)
|
||||
if os.path.isdir(module):
|
||||
yield module
|
||||
|
||||
|
||||
def execd_submodule_paths(command, execd_dir=None):
|
||||
"""Generate a list of full paths to the specified command within exec_dir.
|
||||
"""
|
||||
for module_path in execd_module_paths(execd_dir):
|
||||
path = os.path.join(module_path, command)
|
||||
if os.access(path, os.X_OK) and os.path.isfile(path):
|
||||
yield path
|
||||
|
||||
|
||||
def execd_sentinel_path(submodule_path):
|
||||
module_path = os.path.dirname(submodule_path)
|
||||
execd_path = os.path.dirname(module_path)
|
||||
module_name = os.path.basename(module_path)
|
||||
submodule_name = os.path.basename(submodule_path)
|
||||
return os.path.join(execd_path,
|
||||
'.{}_{}.done'.format(module_name, submodule_name))
|
||||
|
||||
|
||||
def execd_run(command, execd_dir=None, stop_on_error=True, stderr=None):
|
||||
"""Run command for each module within execd_dir which defines it."""
|
||||
if stderr is None:
|
||||
stderr = sys.stdout
|
||||
for submodule_path in execd_submodule_paths(command, execd_dir):
|
||||
# Only run each execd once. We cannot simply run them in the
|
||||
# install hook, as potentially storage hooks are run before that.
|
||||
# We cannot rely on them being idempotent.
|
||||
sentinel = execd_sentinel_path(submodule_path)
|
||||
if os.path.exists(sentinel):
|
||||
continue
|
||||
|
||||
try:
|
||||
subprocess.check_call([submodule_path], stderr=stderr,
|
||||
universal_newlines=True)
|
||||
with open(sentinel, 'w') as f:
|
||||
f.write('{} ran successfully {}\n'.format(submodule_path,
|
||||
time.ctime()))
|
||||
f.write('Removing this file will cause it to be run again\n')
|
||||
except subprocess.CalledProcessError as e:
|
||||
# Logs get the details. We can't use juju-log, as the
|
||||
# output may be substantial and exceed command line
|
||||
# length limits.
|
||||
print("ERROR ({}) running {}".format(e.returncode, e.cmd),
|
||||
file=stderr)
|
||||
print("STDOUT<<EOM", file=stderr)
|
||||
print(e.output, file=stderr)
|
||||
print("EOM", file=stderr)
|
||||
|
||||
# Unit workload status gets a shorter fail message.
|
||||
short_path = os.path.relpath(submodule_path)
|
||||
block_msg = "Error ({}) running {}".format(e.returncode,
|
||||
short_path)
|
||||
try:
|
||||
subprocess.check_call(['status-set', 'blocked', block_msg],
|
||||
universal_newlines=True)
|
||||
if stop_on_error:
|
||||
sys.exit(0) # Leave unit in blocked state.
|
||||
except Exception:
|
||||
pass # We care about the exec.d/* failure, not status-set.
|
||||
|
||||
if stop_on_error:
|
||||
sys.exit(e.returncode or 1) # Error state for pre-1.24 Juju
|
||||
|
||||
|
||||
def execd_preinstall(execd_dir=None):
|
||||
"""Run charm-pre-install for each module within execd_dir."""
|
||||
execd_run('charm-pre-install', execd_dir=execd_dir)
|
||||
|
|
@ -0,0 +1,924 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# Copyright 2015 The Kubernetes Authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import ipaddress
|
||||
import re
|
||||
import os
|
||||
import subprocess
|
||||
import hashlib
|
||||
import json
|
||||
import traceback
|
||||
import random
|
||||
import string
|
||||
import tempfile
|
||||
import yaml
|
||||
|
||||
from base64 import b64decode, b64encode
|
||||
from pathlib import Path
|
||||
from subprocess import check_output, check_call
|
||||
from socket import gethostname, getfqdn
|
||||
from shlex import split
|
||||
from subprocess import CalledProcessError
|
||||
from charmhelpers.core import hookenv, unitdata
|
||||
from charmhelpers.core import host
|
||||
from charmhelpers.core.templating import render
|
||||
from charms.reactive import endpoint_from_flag, is_state
|
||||
from time import sleep
|
||||
|
||||
AUTH_SECRET_NS = "kube-system"
|
||||
AUTH_SECRET_TYPE = "juju.is/token-auth"
|
||||
|
||||
db = unitdata.kv()
|
||||
kubeclientconfig_path = "/root/.kube/config"
|
||||
gcp_creds_env_key = "GOOGLE_APPLICATION_CREDENTIALS"
|
||||
kubeproxyconfig_path = "/root/cdk/kubeproxyconfig"
|
||||
certs_dir = Path("/root/cdk")
|
||||
ca_crt_path = certs_dir / "ca.crt"
|
||||
server_crt_path = certs_dir / "server.crt"
|
||||
server_key_path = certs_dir / "server.key"
|
||||
client_crt_path = certs_dir / "client.crt"
|
||||
client_key_path = certs_dir / "client.key"
|
||||
|
||||
|
||||
def get_version(bin_name):
|
||||
"""Get the version of an installed Kubernetes binary.
|
||||
|
||||
:param str bin_name: Name of binary
|
||||
:return: 3-tuple version (maj, min, patch)
|
||||
|
||||
Example::
|
||||
|
||||
>>> `get_version('kubelet')
|
||||
(1, 6, 0)
|
||||
|
||||
"""
|
||||
cmd = "{} --version".format(bin_name).split()
|
||||
version_string = subprocess.check_output(cmd).decode("utf-8")
|
||||
return tuple(int(q) for q in re.findall("[0-9]+", version_string)[:3])
|
||||
|
||||
|
||||
def retry(times, delay_secs):
|
||||
"""Decorator for retrying a method call.
|
||||
|
||||
Args:
|
||||
times: How many times should we retry before giving up
|
||||
delay_secs: Delay in secs
|
||||
|
||||
Returns: A callable that would return the last call outcome
|
||||
"""
|
||||
|
||||
def retry_decorator(func):
|
||||
"""Decorator to wrap the function provided.
|
||||
|
||||
Args:
|
||||
func: Provided function should return either True od False
|
||||
|
||||
Returns: A callable that would return the last call outcome
|
||||
|
||||
"""
|
||||
|
||||
def _wrapped(*args, **kwargs):
|
||||
res = func(*args, **kwargs)
|
||||
attempt = 0
|
||||
while not res and attempt < times:
|
||||
sleep(delay_secs)
|
||||
res = func(*args, **kwargs)
|
||||
if res:
|
||||
break
|
||||
attempt += 1
|
||||
return res
|
||||
|
||||
return _wrapped
|
||||
|
||||
return retry_decorator
|
||||
|
||||
|
||||
def calculate_resource_checksum(resource):
|
||||
"""Calculate a checksum for a resource"""
|
||||
md5 = hashlib.md5()
|
||||
path = hookenv.resource_get(resource)
|
||||
if path:
|
||||
with open(path, "rb") as f:
|
||||
data = f.read()
|
||||
md5.update(data)
|
||||
return md5.hexdigest()
|
||||
|
||||
|
||||
def get_resource_checksum_db_key(checksum_prefix, resource):
|
||||
"""Convert a resource name to a resource checksum database key."""
|
||||
return checksum_prefix + resource
|
||||
|
||||
|
||||
def migrate_resource_checksums(checksum_prefix, snap_resources):
|
||||
"""Migrate resource checksums from the old schema to the new one"""
|
||||
for resource in snap_resources:
|
||||
new_key = get_resource_checksum_db_key(checksum_prefix, resource)
|
||||
if not db.get(new_key):
|
||||
path = hookenv.resource_get(resource)
|
||||
if path:
|
||||
# old key from charms.reactive.helpers.any_file_changed
|
||||
old_key = "reactive.files_changed." + path
|
||||
old_checksum = db.get(old_key)
|
||||
db.set(new_key, old_checksum)
|
||||
else:
|
||||
# No resource is attached. Previously, this meant no checksum
|
||||
# would be calculated and stored. But now we calculate it as if
|
||||
# it is a 0-byte resource, so let's go ahead and do that.
|
||||
zero_checksum = hashlib.md5().hexdigest()
|
||||
db.set(new_key, zero_checksum)
|
||||
|
||||
|
||||
def check_resources_for_upgrade_needed(checksum_prefix, snap_resources):
|
||||
hookenv.status_set("maintenance", "Checking resources")
|
||||
for resource in snap_resources:
|
||||
key = get_resource_checksum_db_key(checksum_prefix, resource)
|
||||
old_checksum = db.get(key)
|
||||
new_checksum = calculate_resource_checksum(resource)
|
||||
if new_checksum != old_checksum:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def calculate_and_store_resource_checksums(checksum_prefix, snap_resources):
|
||||
for resource in snap_resources:
|
||||
key = get_resource_checksum_db_key(checksum_prefix, resource)
|
||||
checksum = calculate_resource_checksum(resource)
|
||||
db.set(key, checksum)
|
||||
|
||||
|
||||
def get_ingress_address(endpoint_name, ignore_addresses=None):
|
||||
try:
|
||||
network_info = hookenv.network_get(endpoint_name)
|
||||
except NotImplementedError:
|
||||
network_info = {}
|
||||
|
||||
if not network_info or "ingress-addresses" not in network_info:
|
||||
# if they don't have ingress-addresses they are running a juju that
|
||||
# doesn't support spaces, so just return the private address
|
||||
return hookenv.unit_get("private-address")
|
||||
|
||||
addresses = network_info["ingress-addresses"]
|
||||
|
||||
if ignore_addresses:
|
||||
hookenv.log("ingress-addresses before filtering: {}".format(addresses))
|
||||
iter_filter = filter(lambda item: item not in ignore_addresses, addresses)
|
||||
addresses = list(iter_filter)
|
||||
hookenv.log("ingress-addresses after filtering: {}".format(addresses))
|
||||
|
||||
# Need to prefer non-fan IP addresses due to various issues, e.g.
|
||||
# https://bugs.launchpad.net/charm-gcp-integrator/+bug/1822997
|
||||
# Fan typically likes to use IPs in the 240.0.0.0/4 block, so we'll
|
||||
# prioritize those last. Not technically correct, but good enough.
|
||||
try:
|
||||
sort_key = lambda a: int(a.partition(".")[0]) >= 240 # noqa: E731
|
||||
addresses = sorted(addresses, key=sort_key)
|
||||
except Exception:
|
||||
hookenv.log(traceback.format_exc())
|
||||
|
||||
return addresses[0]
|
||||
|
||||
|
||||
def get_ingress_address6(endpoint_name):
|
||||
try:
|
||||
network_info = hookenv.network_get(endpoint_name)
|
||||
except NotImplementedError:
|
||||
network_info = {}
|
||||
|
||||
if not network_info or "ingress-addresses" not in network_info:
|
||||
return None
|
||||
|
||||
addresses = network_info["ingress-addresses"]
|
||||
|
||||
for addr in addresses:
|
||||
ip_addr = ipaddress.ip_interface(addr).ip
|
||||
if ip_addr.version == 6:
|
||||
return str(ip_addr)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def service_restart(service_name):
|
||||
hookenv.status_set("maintenance", "Restarting {0} service".format(service_name))
|
||||
host.service_restart(service_name)
|
||||
|
||||
|
||||
def service_start(service_name):
|
||||
hookenv.log("Starting {0} service.".format(service_name))
|
||||
host.service_stop(service_name)
|
||||
|
||||
|
||||
def service_stop(service_name):
|
||||
hookenv.log("Stopping {0} service.".format(service_name))
|
||||
host.service_stop(service_name)
|
||||
|
||||
|
||||
def arch():
|
||||
"""Return the package architecture as a string. Raise an exception if the
|
||||
architecture is not supported by kubernetes."""
|
||||
# Get the package architecture for this system.
|
||||
architecture = check_output(["dpkg", "--print-architecture"]).rstrip()
|
||||
# Convert the binary result into a string.
|
||||
architecture = architecture.decode("utf-8")
|
||||
return architecture
|
||||
|
||||
|
||||
def get_service_ip(service, namespace="kube-system", errors_fatal=True):
|
||||
try:
|
||||
output = kubectl(
|
||||
"get", "service", "--namespace", namespace, service, "--output", "json"
|
||||
)
|
||||
except CalledProcessError:
|
||||
if errors_fatal:
|
||||
raise
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
svc = json.loads(output.decode())
|
||||
return svc["spec"]["clusterIP"]
|
||||
|
||||
|
||||
def kubectl(*args):
|
||||
"""Run a kubectl cli command with a config file. Returns stdout and throws
|
||||
an error if the command fails."""
|
||||
command = ["kubectl", "--kubeconfig=" + kubeclientconfig_path] + list(args)
|
||||
hookenv.log("Executing {}".format(command))
|
||||
return check_output(command)
|
||||
|
||||
|
||||
def kubectl_success(*args):
|
||||
"""Runs kubectl with the given args. Returns True if successful, False if
|
||||
not."""
|
||||
try:
|
||||
kubectl(*args)
|
||||
return True
|
||||
except CalledProcessError:
|
||||
return False
|
||||
|
||||
|
||||
def kubectl_manifest(operation, manifest):
|
||||
"""Wrap the kubectl creation command when using filepath resources
|
||||
:param operation - one of get, create, delete, replace
|
||||
:param manifest - filepath to the manifest
|
||||
"""
|
||||
# Deletions are a special case
|
||||
if operation == "delete":
|
||||
# Ensure we immediately remove requested resources with --now
|
||||
return kubectl_success(operation, "-f", manifest, "--now")
|
||||
else:
|
||||
# Guard against an error re-creating the same manifest multiple times
|
||||
if operation == "create":
|
||||
# If we already have the definition, its probably safe to assume
|
||||
# creation was true.
|
||||
if kubectl_success("get", "-f", manifest):
|
||||
hookenv.log("Skipping definition for {}".format(manifest))
|
||||
return True
|
||||
# Execute the requested command that did not match any of the special
|
||||
# cases above
|
||||
return kubectl_success(operation, "-f", manifest)
|
||||
|
||||
|
||||
def get_node_name():
|
||||
kubelet_extra_args = parse_extra_args("kubelet-extra-args")
|
||||
cloud_provider = kubelet_extra_args.get("cloud-provider", "")
|
||||
if is_state("endpoint.aws.ready"):
|
||||
cloud_provider = "aws"
|
||||
elif is_state("endpoint.gcp.ready"):
|
||||
cloud_provider = "gce"
|
||||
elif is_state("endpoint.openstack.ready"):
|
||||
cloud_provider = "openstack"
|
||||
elif is_state("endpoint.vsphere.ready"):
|
||||
cloud_provider = "vsphere"
|
||||
elif is_state("endpoint.azure.ready"):
|
||||
cloud_provider = "azure"
|
||||
if cloud_provider == "aws":
|
||||
return getfqdn().lower()
|
||||
else:
|
||||
return gethostname().lower()
|
||||
|
||||
|
||||
def create_kubeconfig(
|
||||
kubeconfig,
|
||||
server,
|
||||
ca,
|
||||
key=None,
|
||||
certificate=None,
|
||||
user="ubuntu",
|
||||
context="juju-context",
|
||||
cluster="juju-cluster",
|
||||
password=None,
|
||||
token=None,
|
||||
keystone=False,
|
||||
aws_iam_cluster_id=None,
|
||||
):
|
||||
"""Create a configuration for Kubernetes based on path using the supplied
|
||||
arguments for values of the Kubernetes server, CA, key, certificate, user
|
||||
context and cluster."""
|
||||
if not key and not certificate and not password and not token:
|
||||
raise ValueError("Missing authentication mechanism.")
|
||||
elif key and not certificate:
|
||||
raise ValueError("Missing certificate.")
|
||||
elif not key and certificate:
|
||||
raise ValueError("Missing key.")
|
||||
elif token and password:
|
||||
# token and password are mutually exclusive. Error early if both are
|
||||
# present. The developer has requested an impossible situation.
|
||||
# see: kubectl config set-credentials --help
|
||||
raise ValueError("Token and Password are mutually exclusive.")
|
||||
|
||||
old_kubeconfig = Path(kubeconfig)
|
||||
new_kubeconfig = Path(str(kubeconfig) + ".new")
|
||||
|
||||
# Create the config file with the address of the master server.
|
||||
cmd = (
|
||||
"kubectl config --kubeconfig={0} set-cluster {1} "
|
||||
"--server={2} --certificate-authority={3} --embed-certs=true"
|
||||
)
|
||||
check_call(split(cmd.format(new_kubeconfig, cluster, server, ca)))
|
||||
# Delete old users
|
||||
cmd = "kubectl config --kubeconfig={0} unset users"
|
||||
check_call(split(cmd.format(new_kubeconfig)))
|
||||
# Create the credentials using the client flags.
|
||||
cmd = "kubectl config --kubeconfig={0} " "set-credentials {1} ".format(
|
||||
new_kubeconfig, user
|
||||
)
|
||||
|
||||
if key and certificate:
|
||||
cmd = (
|
||||
"{0} --client-key={1} --client-certificate={2} "
|
||||
"--embed-certs=true".format(cmd, key, certificate)
|
||||
)
|
||||
if password:
|
||||
cmd = "{0} --username={1} --password={2}".format(cmd, user, password)
|
||||
# This is mutually exclusive from password. They will not work together.
|
||||
if token:
|
||||
cmd = "{0} --token={1}".format(cmd, token)
|
||||
check_call(split(cmd))
|
||||
# Create a default context with the cluster.
|
||||
cmd = "kubectl config --kubeconfig={0} set-context {1} " "--cluster={2} --user={3}"
|
||||
check_call(split(cmd.format(new_kubeconfig, context, cluster, user)))
|
||||
# Make the config use this new context.
|
||||
cmd = "kubectl config --kubeconfig={0} use-context {1}"
|
||||
check_call(split(cmd.format(new_kubeconfig, context)))
|
||||
if keystone:
|
||||
# create keystone user
|
||||
cmd = "kubectl config --kubeconfig={0} " "set-credentials keystone-user".format(
|
||||
new_kubeconfig
|
||||
)
|
||||
check_call(split(cmd))
|
||||
# create keystone context
|
||||
cmd = (
|
||||
"kubectl config --kubeconfig={0} "
|
||||
"set-context --cluster={1} "
|
||||
"--user=keystone-user keystone".format(new_kubeconfig, cluster)
|
||||
)
|
||||
check_call(split(cmd))
|
||||
# use keystone context
|
||||
cmd = "kubectl config --kubeconfig={0} " "use-context keystone".format(
|
||||
new_kubeconfig
|
||||
)
|
||||
check_call(split(cmd))
|
||||
# manually add exec command until kubectl can do it for us
|
||||
with open(new_kubeconfig, "r") as f:
|
||||
content = f.read()
|
||||
content = content.replace(
|
||||
"""- name: keystone-user
|
||||
user: {}""",
|
||||
"""- name: keystone-user
|
||||
user:
|
||||
exec:
|
||||
command: "/snap/bin/client-keystone-auth"
|
||||
apiVersion: "client.authentication.k8s.io/v1beta1"
|
||||
""",
|
||||
)
|
||||
with open(new_kubeconfig, "w") as f:
|
||||
f.write(content)
|
||||
if aws_iam_cluster_id:
|
||||
# create aws-iam context
|
||||
cmd = (
|
||||
"kubectl config --kubeconfig={0} "
|
||||
"set-context --cluster={1} "
|
||||
"--user=aws-iam-user aws-iam-authenticator"
|
||||
)
|
||||
check_call(split(cmd.format(new_kubeconfig, cluster)))
|
||||
|
||||
# append a user for aws-iam
|
||||
cmd = (
|
||||
"kubectl --kubeconfig={0} config set-credentials "
|
||||
"aws-iam-user --exec-command=aws-iam-authenticator "
|
||||
'--exec-arg="token" --exec-arg="-i" --exec-arg="{1}" '
|
||||
'--exec-arg="-r" --exec-arg="<<insert_arn_here>>" '
|
||||
"--exec-api-version=client.authentication.k8s.io/v1alpha1"
|
||||
)
|
||||
check_call(split(cmd.format(new_kubeconfig, aws_iam_cluster_id)))
|
||||
|
||||
# not going to use aws-iam context by default since we don't have
|
||||
# the desired arn. This will make the config not usable if copied.
|
||||
|
||||
# cmd = 'kubectl config --kubeconfig={0} ' \
|
||||
# 'use-context aws-iam-authenticator'.format(new_kubeconfig)
|
||||
# check_call(split(cmd))
|
||||
if old_kubeconfig.exists():
|
||||
changed = new_kubeconfig.read_text() != old_kubeconfig.read_text()
|
||||
else:
|
||||
changed = True
|
||||
if changed:
|
||||
new_kubeconfig.rename(old_kubeconfig)
|
||||
|
||||
|
||||
def parse_extra_args(config_key):
|
||||
elements = hookenv.config().get(config_key, "").split()
|
||||
args = {}
|
||||
|
||||
for element in elements:
|
||||
if "=" in element:
|
||||
key, _, value = element.partition("=")
|
||||
args[key] = value
|
||||
else:
|
||||
args[element] = "true"
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def configure_kubernetes_service(key, service, base_args, extra_args_key):
|
||||
db = unitdata.kv()
|
||||
|
||||
prev_args_key = key + service
|
||||
prev_snap_args = db.get(prev_args_key) or {}
|
||||
|
||||
extra_args = parse_extra_args(extra_args_key)
|
||||
|
||||
args = {}
|
||||
args.update(base_args)
|
||||
args.update(extra_args)
|
||||
|
||||
# CIS benchmark action may inject kv config to pass failing tests. Merge
|
||||
# these after the func args as they should take precedence.
|
||||
cis_args_key = "cis-" + service
|
||||
cis_args = db.get(cis_args_key) or {}
|
||||
args.update(cis_args)
|
||||
|
||||
# Remove any args with 'None' values (all k8s args are 'k=v') and
|
||||
# construct an arg string for use by 'snap set'.
|
||||
args = {k: v for k, v in args.items() if v is not None}
|
||||
args = ['--%s="%s"' % arg for arg in args.items()]
|
||||
args = " ".join(args)
|
||||
|
||||
snap_opts = {}
|
||||
for arg in prev_snap_args:
|
||||
# remove previous args by setting to null
|
||||
snap_opts[arg] = "null"
|
||||
snap_opts["args"] = args
|
||||
snap_opts = ["%s=%s" % opt for opt in snap_opts.items()]
|
||||
|
||||
cmd = ["snap", "set", service] + snap_opts
|
||||
check_call(cmd)
|
||||
|
||||
# Now that we've started doing snap configuration through the "args"
|
||||
# option, we should never need to clear previous args again.
|
||||
db.set(prev_args_key, {})
|
||||
|
||||
|
||||
def _snap_common_path(component):
|
||||
return Path("/var/snap/{}/common".format(component))
|
||||
|
||||
|
||||
def cloud_config_path(component):
|
||||
return _snap_common_path(component) / "cloud-config.conf"
|
||||
|
||||
|
||||
def _gcp_creds_path(component):
|
||||
return _snap_common_path(component) / "gcp-creds.json"
|
||||
|
||||
|
||||
def _daemon_env_path(component):
|
||||
return _snap_common_path(component) / "environment"
|
||||
|
||||
|
||||
def _cloud_endpoint_ca_path(component):
|
||||
return _snap_common_path(component) / "cloud-endpoint-ca.crt"
|
||||
|
||||
|
||||
def encryption_config_path():
|
||||
apiserver_snap_common_path = _snap_common_path("kube-apiserver")
|
||||
encryption_conf_dir = apiserver_snap_common_path / "encryption"
|
||||
return encryption_conf_dir / "encryption_config.yaml"
|
||||
|
||||
|
||||
def write_gcp_snap_config(component):
|
||||
# gcp requires additional credentials setup
|
||||
gcp = endpoint_from_flag("endpoint.gcp.ready")
|
||||
creds_path = _gcp_creds_path(component)
|
||||
with creds_path.open("w") as fp:
|
||||
os.fchmod(fp.fileno(), 0o600)
|
||||
fp.write(gcp.credentials)
|
||||
|
||||
# create a cloud-config file that sets token-url to nil to make the
|
||||
# services use the creds env var instead of the metadata server, as
|
||||
# well as making the cluster multizone
|
||||
comp_cloud_config_path = cloud_config_path(component)
|
||||
comp_cloud_config_path.write_text(
|
||||
"[Global]\n" "token-url = nil\n" "multizone = true\n"
|
||||
)
|
||||
|
||||
daemon_env_path = _daemon_env_path(component)
|
||||
if daemon_env_path.exists():
|
||||
daemon_env = daemon_env_path.read_text()
|
||||
if not daemon_env.endswith("\n"):
|
||||
daemon_env += "\n"
|
||||
else:
|
||||
daemon_env = ""
|
||||
if gcp_creds_env_key not in daemon_env:
|
||||
daemon_env += "{}={}\n".format(gcp_creds_env_key, creds_path)
|
||||
daemon_env_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
daemon_env_path.write_text(daemon_env)
|
||||
|
||||
|
||||
def generate_openstack_cloud_config():
|
||||
# openstack requires additional credentials setup
|
||||
openstack = endpoint_from_flag("endpoint.openstack.ready")
|
||||
|
||||
lines = [
|
||||
"[Global]",
|
||||
"auth-url = {}".format(openstack.auth_url),
|
||||
"region = {}".format(openstack.region),
|
||||
"username = {}".format(openstack.username),
|
||||
"password = {}".format(openstack.password),
|
||||
"tenant-name = {}".format(openstack.project_name),
|
||||
"domain-name = {}".format(openstack.user_domain_name),
|
||||
"tenant-domain-name = {}".format(openstack.project_domain_name),
|
||||
]
|
||||
if openstack.endpoint_tls_ca:
|
||||
lines.append("ca-file = /etc/config/endpoint-ca.cert")
|
||||
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
"[LoadBalancer]",
|
||||
]
|
||||
)
|
||||
|
||||
if openstack.has_octavia in (True, None):
|
||||
# Newer integrator charm will detect whether underlying OpenStack has
|
||||
# Octavia enabled so we can set this intelligently. If we're still
|
||||
# related to an older integrator, though, default to assuming Octavia
|
||||
# is available.
|
||||
lines.append("use-octavia = true")
|
||||
else:
|
||||
lines.append("use-octavia = false")
|
||||
lines.append("lb-provider = haproxy")
|
||||
if openstack.subnet_id:
|
||||
lines.append("subnet-id = {}".format(openstack.subnet_id))
|
||||
if openstack.floating_network_id:
|
||||
lines.append("floating-network-id = {}".format(openstack.floating_network_id))
|
||||
if openstack.lb_method:
|
||||
lines.append("lb-method = {}".format(openstack.lb_method))
|
||||
if openstack.manage_security_groups:
|
||||
lines.append(
|
||||
"manage-security-groups = {}".format(openstack.manage_security_groups)
|
||||
)
|
||||
if any(
|
||||
[openstack.bs_version, openstack.trust_device_path, openstack.ignore_volume_az]
|
||||
):
|
||||
lines.append("")
|
||||
lines.append("[BlockStorage]")
|
||||
if openstack.bs_version is not None:
|
||||
lines.append("bs-version = {}".format(openstack.bs_version))
|
||||
if openstack.trust_device_path is not None:
|
||||
lines.append("trust-device-path = {}".format(openstack.trust_device_path))
|
||||
if openstack.ignore_volume_az is not None:
|
||||
lines.append("ignore-volume-az = {}".format(openstack.ignore_volume_az))
|
||||
return "\n".join(lines) + "\n"
|
||||
|
||||
|
||||
def write_azure_snap_config(component):
|
||||
azure = endpoint_from_flag("endpoint.azure.ready")
|
||||
comp_cloud_config_path = cloud_config_path(component)
|
||||
comp_cloud_config_path.write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"useInstanceMetadata": True,
|
||||
"useManagedIdentityExtension": azure.managed_identity,
|
||||
"subscriptionId": azure.subscription_id,
|
||||
"resourceGroup": azure.resource_group,
|
||||
"location": azure.resource_group_location,
|
||||
"vnetName": azure.vnet_name,
|
||||
"vnetResourceGroup": azure.vnet_resource_group,
|
||||
"subnetName": azure.subnet_name,
|
||||
"securityGroupName": azure.security_group_name,
|
||||
"loadBalancerSku": "standard",
|
||||
"securityGroupResourceGroup": azure.security_group_resource_group,
|
||||
"aadClientId": azure.aad_client_id,
|
||||
"aadClientSecret": azure.aad_client_secret,
|
||||
"tenantId": azure.tenant_id,
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def configure_kube_proxy(
|
||||
configure_prefix, api_servers, cluster_cidr, bind_address=None
|
||||
):
|
||||
kube_proxy_opts = {}
|
||||
kube_proxy_opts["cluster-cidr"] = cluster_cidr
|
||||
kube_proxy_opts["kubeconfig"] = kubeproxyconfig_path
|
||||
kube_proxy_opts["logtostderr"] = "true"
|
||||
kube_proxy_opts["v"] = "0"
|
||||
num_apis = len(api_servers)
|
||||
kube_proxy_opts["master"] = api_servers[get_unit_number() % num_apis]
|
||||
kube_proxy_opts["hostname-override"] = get_node_name()
|
||||
if bind_address:
|
||||
kube_proxy_opts["bind-address"] = bind_address
|
||||
elif is_ipv6(cluster_cidr):
|
||||
kube_proxy_opts["bind-address"] = "::"
|
||||
|
||||
if host.is_container():
|
||||
kube_proxy_opts["conntrack-max-per-core"] = "0"
|
||||
|
||||
if is_dual_stack(cluster_cidr):
|
||||
kube_proxy_opts["feature-gates"] = "IPv6DualStack=true"
|
||||
|
||||
configure_kubernetes_service(
|
||||
configure_prefix, "kube-proxy", kube_proxy_opts, "proxy-extra-args"
|
||||
)
|
||||
|
||||
|
||||
def get_unit_number():
|
||||
return int(hookenv.local_unit().split("/")[1])
|
||||
|
||||
|
||||
def cluster_cidr():
|
||||
"""Return the cluster CIDR provided by the CNI"""
|
||||
cni = endpoint_from_flag("cni.available")
|
||||
if not cni:
|
||||
return None
|
||||
config = hookenv.config()
|
||||
if "default-cni" in config:
|
||||
# master
|
||||
default_cni = config["default-cni"]
|
||||
else:
|
||||
# worker
|
||||
kube_control = endpoint_from_flag("kube-control.dns.available")
|
||||
if not kube_control:
|
||||
return None
|
||||
default_cni = kube_control.get_default_cni()
|
||||
return cni.get_config(default=default_cni)["cidr"]
|
||||
|
||||
|
||||
def is_dual_stack(cidrs):
|
||||
"""Detect IPv4/IPv6 dual stack from CIDRs"""
|
||||
return {net.version for net in get_networks(cidrs)} == {4, 6}
|
||||
|
||||
|
||||
def is_ipv4(cidrs):
|
||||
"""Detect IPv6 from CIDRs"""
|
||||
return get_ipv4_network(cidrs) is not None
|
||||
|
||||
|
||||
def is_ipv6(cidrs):
|
||||
"""Detect IPv6 from CIDRs"""
|
||||
return get_ipv6_network(cidrs) is not None
|
||||
|
||||
|
||||
def is_ipv6_preferred(cidrs):
|
||||
"""Detect if IPv6 is preffered from CIDRs"""
|
||||
return get_networks(cidrs)[0].version == 6
|
||||
|
||||
|
||||
def get_networks(cidrs):
|
||||
"""Convert a comma-separated list of CIDRs to a list of networks."""
|
||||
if not cidrs:
|
||||
return []
|
||||
return [ipaddress.ip_interface(cidr).network for cidr in cidrs.split(",")]
|
||||
|
||||
|
||||
def get_ipv4_network(cidrs):
|
||||
"""Get the IPv4 network from the given CIDRs or None"""
|
||||
return {net.version: net for net in get_networks(cidrs)}.get(4)
|
||||
|
||||
|
||||
def get_ipv6_network(cidrs):
|
||||
"""Get the IPv6 network from the given CIDRs or None"""
|
||||
return {net.version: net for net in get_networks(cidrs)}.get(6)
|
||||
|
||||
|
||||
def enable_ipv6_forwarding():
|
||||
"""Enable net.ipv6.conf.all.forwarding in sysctl if it is not already."""
|
||||
check_call(["sysctl", "net.ipv6.conf.all.forwarding=1"])
|
||||
|
||||
|
||||
def get_bind_addrs(ipv4=True, ipv6=True):
|
||||
"""Get all global-scoped addresses that we might bind to."""
|
||||
try:
|
||||
output = check_output(["ip", "-br", "addr", "show", "scope", "global"])
|
||||
except CalledProcessError:
|
||||
# stderr will have any details, and go to the log
|
||||
hookenv.log("Unable to determine global addresses", hookenv.ERROR)
|
||||
return []
|
||||
|
||||
ignore_interfaces = ("lxdbr", "flannel", "cni", "virbr", "docker")
|
||||
accept_versions = set()
|
||||
if ipv4:
|
||||
accept_versions.add(4)
|
||||
if ipv6:
|
||||
accept_versions.add(6)
|
||||
|
||||
addrs = []
|
||||
for line in output.decode("utf8").splitlines():
|
||||
intf, state, *intf_addrs = line.split()
|
||||
if state != "UP" or any(
|
||||
intf.startswith(prefix) for prefix in ignore_interfaces
|
||||
):
|
||||
continue
|
||||
for addr in intf_addrs:
|
||||
ip_addr = ipaddress.ip_interface(addr).ip
|
||||
if ip_addr.version in accept_versions:
|
||||
addrs.append(str(ip_addr))
|
||||
return addrs
|
||||
|
||||
|
||||
class InvalidVMwareHost(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _get_vmware_uuid():
|
||||
serial_id_file = "/sys/class/dmi/id/product_serial"
|
||||
# The serial id from VMWare VMs comes in following format:
|
||||
# VMware-42 28 13 f5 d4 20 71 61-5d b0 7b 96 44 0c cf 54
|
||||
try:
|
||||
with open(serial_id_file, "r") as f:
|
||||
serial_string = f.read().strip()
|
||||
if "VMware-" not in serial_string:
|
||||
hookenv.log(
|
||||
"Unable to find VMware ID in "
|
||||
"product_serial: {}".format(serial_string)
|
||||
)
|
||||
raise InvalidVMwareHost
|
||||
serial_string = (
|
||||
serial_string.split("VMware-")[1].replace(" ", "").replace("-", "")
|
||||
)
|
||||
uuid = "%s-%s-%s-%s-%s" % (
|
||||
serial_string[0:8],
|
||||
serial_string[8:12],
|
||||
serial_string[12:16],
|
||||
serial_string[16:20],
|
||||
serial_string[20:32],
|
||||
)
|
||||
except IOError as err:
|
||||
hookenv.log("Unable to read UUID from sysfs: {}".format(err))
|
||||
uuid = "UNKNOWN"
|
||||
|
||||
return uuid
|
||||
|
||||
|
||||
def token_generator(length=32):
|
||||
"""Generate a random token for use in account tokens.
|
||||
|
||||
param: length - the length of the token to generate
|
||||
"""
|
||||
alpha = string.ascii_letters + string.digits
|
||||
token = "".join(random.SystemRandom().choice(alpha) for _ in range(length))
|
||||
return token
|
||||
|
||||
|
||||
def get_secret_names():
|
||||
"""Return a dict of 'username: secret_id' for Charmed Kubernetes users."""
|
||||
try:
|
||||
output = kubectl(
|
||||
"get",
|
||||
"secrets",
|
||||
"-n",
|
||||
AUTH_SECRET_NS,
|
||||
"--field-selector",
|
||||
"type={}".format(AUTH_SECRET_TYPE),
|
||||
"-o",
|
||||
"json",
|
||||
).decode("UTF-8")
|
||||
except (CalledProcessError, FileNotFoundError):
|
||||
# The api server may not be up, or we may be trying to run kubelet before
|
||||
# the snap is installed. Send back an empty dict.
|
||||
hookenv.log("Unable to get existing secrets", level=hookenv.WARNING)
|
||||
return {}
|
||||
|
||||
secrets = json.loads(output)
|
||||
secret_names = {}
|
||||
if "items" in secrets:
|
||||
for secret in secrets["items"]:
|
||||
try:
|
||||
secret_id = secret["metadata"]["name"]
|
||||
username_b64 = secret["data"]["username"].encode("UTF-8")
|
||||
except (KeyError, TypeError):
|
||||
# CK secrets will have populated 'data', but not all secrets do
|
||||
continue
|
||||
secret_names[b64decode(username_b64).decode("UTF-8")] = secret_id
|
||||
return secret_names
|
||||
|
||||
|
||||
def generate_rfc1123(length=10):
|
||||
"""Generate a random string compliant with RFC 1123.
|
||||
|
||||
https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-subdomain-names
|
||||
|
||||
param: length - the length of the string to generate
|
||||
"""
|
||||
length = 253 if length > 253 else length
|
||||
valid_chars = string.ascii_lowercase + string.digits
|
||||
rand_str = "".join(random.SystemRandom().choice(valid_chars) for _ in range(length))
|
||||
return rand_str
|
||||
|
||||
|
||||
def create_secret(token, username, user, groups=None):
|
||||
secrets = get_secret_names()
|
||||
if username in secrets:
|
||||
# Use existing secret ID if one exists for our username
|
||||
secret_id = secrets[username]
|
||||
else:
|
||||
# secret IDs must be unique and rfc1123 compliant
|
||||
sani_name = re.sub("[^0-9a-z.-]+", "-", user.lower())
|
||||
secret_id = "auth-{}-{}".format(sani_name, generate_rfc1123(10))
|
||||
|
||||
# The authenticator expects tokens to be in the form user::token
|
||||
token_delim = "::"
|
||||
if token_delim not in token:
|
||||
token = "{}::{}".format(user, token)
|
||||
|
||||
context = {
|
||||
"type": AUTH_SECRET_TYPE,
|
||||
"secret_name": secret_id,
|
||||
"secret_namespace": AUTH_SECRET_NS,
|
||||
"user": b64encode(user.encode("UTF-8")).decode("utf-8"),
|
||||
"username": b64encode(username.encode("UTF-8")).decode("utf-8"),
|
||||
"password": b64encode(token.encode("UTF-8")).decode("utf-8"),
|
||||
"groups": b64encode(groups.encode("UTF-8")).decode("utf-8") if groups else "",
|
||||
}
|
||||
with tempfile.NamedTemporaryFile() as tmp_manifest:
|
||||
render("cdk.auth-webhook-secret.yaml", tmp_manifest.name, context=context)
|
||||
|
||||
if kubectl_manifest("apply", tmp_manifest.name):
|
||||
hookenv.log("Created secret for {}".format(username))
|
||||
return True
|
||||
else:
|
||||
hookenv.log("WARN: Unable to create secret for {}".format(username))
|
||||
return False
|
||||
|
||||
|
||||
def get_secret_password(username):
|
||||
"""Get the password for the given user from the secret that CK created."""
|
||||
try:
|
||||
output = kubectl(
|
||||
"get",
|
||||
"secrets",
|
||||
"-n",
|
||||
AUTH_SECRET_NS,
|
||||
"--field-selector",
|
||||
"type={}".format(AUTH_SECRET_TYPE),
|
||||
"-o",
|
||||
"json",
|
||||
).decode("UTF-8")
|
||||
except CalledProcessError:
|
||||
# NB: apiserver probably isn't up. This can happen on boostrap or upgrade
|
||||
# while trying to build kubeconfig files. If we need the 'admin' token during
|
||||
# this time, pull it directly out of the kubeconfig file if possible.
|
||||
token = None
|
||||
if username == "admin":
|
||||
admin_kubeconfig = Path("/root/.kube/config")
|
||||
if admin_kubeconfig.exists():
|
||||
data = yaml.safe_load(admin_kubeconfig.read_text())
|
||||
try:
|
||||
token = data["users"][0]["user"]["token"]
|
||||
except (KeyError, IndexError, TypeError):
|
||||
pass
|
||||
return token
|
||||
except FileNotFoundError:
|
||||
# New deployments may ask for a token before the kubectl snap is installed.
|
||||
# Give them nothing!
|
||||
return None
|
||||
|
||||
secrets = json.loads(output)
|
||||
if "items" in secrets:
|
||||
for secret in secrets["items"]:
|
||||
try:
|
||||
data_b64 = secret["data"]
|
||||
password_b64 = data_b64["password"].encode("UTF-8")
|
||||
username_b64 = data_b64["username"].encode("UTF-8")
|
||||
except (KeyError, TypeError):
|
||||
# CK authn secrets will have populated 'data', but not all secrets do
|
||||
continue
|
||||
|
||||
password = b64decode(password_b64).decode("UTF-8")
|
||||
secret_user = b64decode(username_b64).decode("UTF-8")
|
||||
if username == secret_user:
|
||||
return password
|
||||
return None
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
from pathlib import Path
|
||||
|
||||
NAGIOS_PLUGINS_DIR = '/usr/lib/nagios/plugins'
|
||||
|
||||
|
||||
def install_nagios_plugin_from_text(text, plugin_name):
|
||||
""" Install a nagios plugin.
|
||||
|
||||
Args:
|
||||
text: Plugin source code (str)
|
||||
plugin_name: Name of the plugin in nagios
|
||||
|
||||
Returns: Full path to installed plugin
|
||||
"""
|
||||
dest_path = Path(NAGIOS_PLUGINS_DIR) / plugin_name
|
||||
if dest_path.exists():
|
||||
# we could complain here, test the files are the same contents, or
|
||||
# just bail. Idempotency is a big deal in Juju, so I'd like to be
|
||||
# ok with being called with the same file multiple times, but we
|
||||
# certainly want to catch the case where multiple layers are using
|
||||
# the same filename for their nagios checks.
|
||||
dest = dest_path.read_text()
|
||||
if dest == text:
|
||||
# same file
|
||||
return dest_path
|
||||
# different file contents!
|
||||
# maybe someone changed options or something so we need to write
|
||||
# it again
|
||||
|
||||
dest_path.write_text(text)
|
||||
dest_path.chmod(0o755)
|
||||
|
||||
return dest_path
|
||||
|
||||
|
||||
def install_nagios_plugin_from_file(source_file_path, plugin_name):
|
||||
""" Install a nagios plugin.
|
||||
|
||||
Args:
|
||||
source_file_path: Path to plugin source file
|
||||
plugin_name: Name of the plugin in nagios
|
||||
|
||||
Returns: Full path to installed plugin
|
||||
"""
|
||||
|
||||
return install_nagios_plugin_from_text(Path(source_file_path).read_text(),
|
||||
plugin_name)
|
||||
|
||||
|
||||
def remove_nagios_plugin(plugin_name):
|
||||
""" Remove a nagios plugin.
|
||||
|
||||
Args:
|
||||
plugin_name: Name of the plugin in nagios
|
||||
|
||||
Returns: None
|
||||
"""
|
||||
dest_path = Path(NAGIOS_PLUGINS_DIR) / plugin_name
|
||||
if dest_path.exists():
|
||||
dest_path.unlink()
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
_CHARM_PATH = Path(os.environ.get('JUJU_CHARM_DIR', '.'))
|
||||
_DEFAULT_FILE = _CHARM_PATH / 'layer.yaml'
|
||||
_CACHE = {}
|
||||
|
||||
|
||||
def get(section=None, option=None, layer_file=_DEFAULT_FILE):
|
||||
if option and not section:
|
||||
raise ValueError('Cannot specify option without section')
|
||||
|
||||
layer_file = (_CHARM_PATH / layer_file).resolve()
|
||||
if layer_file not in _CACHE:
|
||||
with layer_file.open() as fp:
|
||||
_CACHE[layer_file] = yaml.safe_load(fp.read())
|
||||
|
||||
data = _CACHE[layer_file].get('options', {})
|
||||
if section:
|
||||
data = data.get(section, {})
|
||||
if option:
|
||||
data = data.get(option)
|
||||
return data
|
||||
|
|
@ -0,0 +1,189 @@
|
|||
import inspect
|
||||
import errno
|
||||
import subprocess
|
||||
import yaml
|
||||
from enum import Enum
|
||||
from functools import wraps
|
||||
from pathlib import Path
|
||||
|
||||
from charmhelpers.core import hookenv
|
||||
from charms import layer
|
||||
|
||||
|
||||
_orig_call = subprocess.call
|
||||
_statuses = {'_initialized': False,
|
||||
'_finalized': False}
|
||||
|
||||
|
||||
class WorkloadState(Enum):
|
||||
"""
|
||||
Enum of the valid workload states.
|
||||
|
||||
Valid options are:
|
||||
|
||||
* `WorkloadState.MAINTENANCE`
|
||||
* `WorkloadState.BLOCKED`
|
||||
* `WorkloadState.WAITING`
|
||||
* `WorkloadState.ACTIVE`
|
||||
"""
|
||||
# note: order here determines precedence of state
|
||||
MAINTENANCE = 'maintenance'
|
||||
BLOCKED = 'blocked'
|
||||
WAITING = 'waiting'
|
||||
ACTIVE = 'active'
|
||||
|
||||
|
||||
def maintenance(message):
|
||||
"""
|
||||
Set the status to the `MAINTENANCE` state with the given operator message.
|
||||
|
||||
# Parameters
|
||||
`message` (str): Message to convey to the operator.
|
||||
"""
|
||||
status_set(WorkloadState.MAINTENANCE, message)
|
||||
|
||||
|
||||
def maint(message):
|
||||
"""
|
||||
Shorthand alias for
|
||||
[maintenance](status.md#charms.layer.status.maintenance).
|
||||
|
||||
# Parameters
|
||||
`message` (str): Message to convey to the operator.
|
||||
"""
|
||||
maintenance(message)
|
||||
|
||||
|
||||
def blocked(message):
|
||||
"""
|
||||
Set the status to the `BLOCKED` state with the given operator message.
|
||||
|
||||
# Parameters
|
||||
`message` (str): Message to convey to the operator.
|
||||
"""
|
||||
status_set(WorkloadState.BLOCKED, message)
|
||||
|
||||
|
||||
def waiting(message):
|
||||
"""
|
||||
Set the status to the `WAITING` state with the given operator message.
|
||||
|
||||
# Parameters
|
||||
`message` (str): Message to convey to the operator.
|
||||
"""
|
||||
status_set(WorkloadState.WAITING, message)
|
||||
|
||||
|
||||
def active(message):
|
||||
"""
|
||||
Set the status to the `ACTIVE` state with the given operator message.
|
||||
|
||||
# Parameters
|
||||
`message` (str): Message to convey to the operator.
|
||||
"""
|
||||
status_set(WorkloadState.ACTIVE, message)
|
||||
|
||||
|
||||
def status_set(workload_state, message):
|
||||
"""
|
||||
Set the status to the given workload state with a message.
|
||||
|
||||
# Parameters
|
||||
`workload_state` (WorkloadState or str): State of the workload. Should be
|
||||
a [WorkloadState](status.md#charms.layer.status.WorkloadState) enum
|
||||
member, or the string value of one of those members.
|
||||
`message` (str): Message to convey to the operator.
|
||||
"""
|
||||
if not isinstance(workload_state, WorkloadState):
|
||||
workload_state = WorkloadState(workload_state)
|
||||
if workload_state is WorkloadState.MAINTENANCE:
|
||||
_status_set_immediate(workload_state, message)
|
||||
return
|
||||
layer = _find_calling_layer()
|
||||
_statuses.setdefault(workload_state, []).append((layer, message))
|
||||
if not _statuses['_initialized'] or _statuses['_finalized']:
|
||||
# We either aren't initialized, so the finalizer may never be run,
|
||||
# or the finalizer has already run, so it won't run again. In either
|
||||
# case, we need to manually invoke it to ensure the status gets set.
|
||||
_finalize()
|
||||
|
||||
|
||||
def _find_calling_layer():
|
||||
for frame in inspect.stack():
|
||||
# switch to .filename when trusty (Python 3.4) is EOL
|
||||
fn = Path(frame[1])
|
||||
if fn.parent.stem not in ('reactive', 'layer', 'charms'):
|
||||
continue
|
||||
layer_name = fn.stem
|
||||
if layer_name == 'status':
|
||||
continue # skip our own frames
|
||||
return layer_name
|
||||
return None
|
||||
|
||||
|
||||
def _initialize():
|
||||
if not _statuses['_initialized']:
|
||||
if layer.options.get('status', 'patch-hookenv'):
|
||||
_patch_hookenv()
|
||||
hookenv.atexit(_finalize)
|
||||
_statuses['_initialized'] = True
|
||||
|
||||
|
||||
def _finalize():
|
||||
if _statuses['_initialized']:
|
||||
# If we haven't been initialized, we can't truly be finalized.
|
||||
# This makes things more efficient if an action sets a status
|
||||
# but subsequently starts the reactive bus.
|
||||
_statuses['_finalized'] = True
|
||||
charm_name = hookenv.charm_name()
|
||||
charm_dir = Path(hookenv.charm_dir())
|
||||
with charm_dir.joinpath('layer.yaml').open() as fp:
|
||||
includes = yaml.safe_load(fp.read()).get('includes', [])
|
||||
layer_order = includes + [charm_name]
|
||||
|
||||
for workload_state in WorkloadState:
|
||||
if workload_state not in _statuses:
|
||||
continue
|
||||
if not _statuses[workload_state]:
|
||||
continue
|
||||
|
||||
def _get_key(record):
|
||||
layer_name, message = record
|
||||
if layer_name in layer_order:
|
||||
return layer_order.index(layer_name)
|
||||
else:
|
||||
return 0
|
||||
|
||||
sorted_statuses = sorted(_statuses[workload_state], key=_get_key)
|
||||
layer_name, message = sorted_statuses[-1]
|
||||
_status_set_immediate(workload_state, message)
|
||||
break
|
||||
|
||||
|
||||
def _status_set_immediate(workload_state, message):
|
||||
workload_state = workload_state.value
|
||||
try:
|
||||
hookenv.log('status-set: {}: {}'.format(workload_state, message),
|
||||
hookenv.INFO)
|
||||
ret = _orig_call(['status-set', workload_state, message])
|
||||
if ret == 0:
|
||||
return
|
||||
except OSError as e:
|
||||
# ignore status-set not available on older controllers
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
|
||||
def _patch_hookenv():
|
||||
# we can't patch hookenv.status_set directly because other layers may have
|
||||
# already imported it into their namespace, so we have to patch sp.call
|
||||
subprocess.call = _patched_call
|
||||
|
||||
|
||||
@wraps(_orig_call)
|
||||
def _patched_call(cmd, *args, **kwargs):
|
||||
if not isinstance(cmd, list) or cmd[0] != 'status-set':
|
||||
return _orig_call(cmd, *args, **kwargs)
|
||||
_, workload_state, message = cmd
|
||||
status_set(workload_state, message)
|
||||
return 0 # make hookenv.status_set not emit spurious failure logs
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
import os
|
||||
|
||||
dir = os.environ["DEBUG_SCRIPT_DIR"]
|
||||
|
||||
|
||||
def open_file(path, *args, **kwargs):
|
||||
""" Open a file within the debug script dir """
|
||||
return open(os.path.join(dir, path), *args, **kwargs)
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
#!.tox/py3/bin/python
|
||||
|
||||
import os
|
||||
import sys
|
||||
from shutil import rmtree
|
||||
from unittest.mock import patch
|
||||
|
||||
import pydocmd.__main__
|
||||
|
||||
|
||||
with patch('charmhelpers.core.hookenv.metadata') as metadata:
|
||||
sys.path.insert(0, 'lib')
|
||||
sys.path.insert(1, 'reactive')
|
||||
print(sys.argv)
|
||||
if len(sys.argv) == 1:
|
||||
sys.argv.extend(['build'])
|
||||
pydocmd.__main__.main()
|
||||
rmtree('_build')
|
||||
if os.path.exists('.unit-state.db'):
|
||||
os.remove('.unit-state.db')
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
"name": "flannel"
|
||||
"summary": "A charm that provides a robust Software Defined Network"
|
||||
"maintainers":
|
||||
- "Tim Van Steenburgh <tim.van.steenburgh@canonical.com>"
|
||||
- "George Kraft <george.kraft@canonical.com>"
|
||||
- "Rye Terrell <rye.terrell@canonical.com>"
|
||||
- "Konstantinos Tsakalozos <kos.tsakalozos@canonical.com>"
|
||||
- "Charles Butler <Chuck@dasroot.net>"
|
||||
"description": |
|
||||
it is a generic overlay network that can be used as a simple alternative
|
||||
to existing software defined networking solutions
|
||||
"tags":
|
||||
- "misc"
|
||||
- "networking"
|
||||
"series":
|
||||
- "focal"
|
||||
- "bionic"
|
||||
- "xenial"
|
||||
"requires":
|
||||
"etcd":
|
||||
"interface": "etcd"
|
||||
"cni":
|
||||
"interface": "kubernetes-cni"
|
||||
"scope": "container"
|
||||
"provides":
|
||||
"nrpe-external-master":
|
||||
"interface": "nrpe-external-master"
|
||||
"scope": "container"
|
||||
"resources":
|
||||
"flannel-amd64":
|
||||
"type": "file"
|
||||
"filename": "flannel.tar.gz"
|
||||
"description": "A tarball packaged release of flannel for amd64"
|
||||
"flannel-arm64":
|
||||
"type": "file"
|
||||
"filename": "flannel.tar.gz"
|
||||
"description": "A tarball packaged release of flannel for arm64"
|
||||
"flannel-s390x":
|
||||
"type": "file"
|
||||
"filename": "flannel.tar.gz"
|
||||
"description": "A tarball packaged release of flannel for s390x"
|
||||
"subordinate": !!bool "true"
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
site_name: 'Status Management Layer'
|
||||
|
||||
generate:
|
||||
- status.md:
|
||||
- charms.layer.status.WorkloadState
|
||||
- charms.layer.status.maintenance
|
||||
- charms.layer.status.maint
|
||||
- charms.layer.status.blocked
|
||||
- charms.layer.status.waiting
|
||||
- charms.layer.status.active
|
||||
- charms.layer.status.status_set
|
||||
|
||||
pages:
|
||||
- Status Management Layer: status.md
|
||||
|
||||
gens_dir: docs
|
||||
|
|
@ -0,0 +1,359 @@
|
|||
import os
|
||||
import json
|
||||
from shlex import split
|
||||
from subprocess import check_output, check_call, CalledProcessError, STDOUT
|
||||
|
||||
from charms.flannel.common import retry
|
||||
|
||||
from charms.reactive import set_state, remove_state, when, when_not, hook
|
||||
from charms.reactive import when_any
|
||||
from charms.templating.jinja2 import render
|
||||
from charmhelpers.core.host import service_start, service_stop, service_restart
|
||||
from charmhelpers.core.host import service_running, service
|
||||
from charmhelpers.core.hookenv import log, resource_get
|
||||
from charmhelpers.core.hookenv import config, application_version_set
|
||||
from charmhelpers.core.hookenv import network_get
|
||||
from charmhelpers.contrib.charmsupport import nrpe
|
||||
from charms.reactive.helpers import data_changed
|
||||
|
||||
from charms.layer import status
|
||||
|
||||
|
||||
ETCD_PATH = '/etc/ssl/flannel'
|
||||
ETCD_KEY_PATH = os.path.join(ETCD_PATH, 'client-key.pem')
|
||||
ETCD_CERT_PATH = os.path.join(ETCD_PATH, 'client-cert.pem')
|
||||
ETCD_CA_PATH = os.path.join(ETCD_PATH, 'client-ca.pem')
|
||||
|
||||
|
||||
@when_not('flannel.binaries.installed')
|
||||
def install_flannel_binaries():
|
||||
''' Unpack the Flannel binaries. '''
|
||||
try:
|
||||
resource_name = 'flannel-{}'.format(arch())
|
||||
archive = resource_get(resource_name)
|
||||
except Exception:
|
||||
message = 'Error fetching the flannel resource.'
|
||||
log(message)
|
||||
status.blocked(message)
|
||||
return
|
||||
if not archive:
|
||||
message = 'Missing flannel resource.'
|
||||
log(message)
|
||||
status.blocked(message)
|
||||
return
|
||||
filesize = os.stat(archive).st_size
|
||||
if filesize < 1000000:
|
||||
message = 'Incomplete flannel resource'
|
||||
log(message)
|
||||
status.blocked(message)
|
||||
return
|
||||
status.maintenance('Unpacking flannel resource.')
|
||||
charm_dir = os.getenv('CHARM_DIR')
|
||||
unpack_path = os.path.join(charm_dir, 'files', 'flannel')
|
||||
os.makedirs(unpack_path, exist_ok=True)
|
||||
cmd = ['tar', 'xfz', archive, '-C', unpack_path]
|
||||
log(cmd)
|
||||
check_call(cmd)
|
||||
apps = [
|
||||
{'name': 'flanneld', 'path': '/usr/local/bin'},
|
||||
{'name': 'etcdctl', 'path': '/usr/local/bin'}
|
||||
]
|
||||
for app in apps:
|
||||
unpacked = os.path.join(unpack_path, app['name'])
|
||||
app_path = os.path.join(app['path'], app['name'])
|
||||
install = ['install', '-v', '-D', unpacked, app_path]
|
||||
check_call(install)
|
||||
set_state('flannel.binaries.installed')
|
||||
|
||||
|
||||
@when('cni.is-worker')
|
||||
@when_not('flannel.cni.configured')
|
||||
def configure_cni(cni):
|
||||
''' Set up the flannel cni configuration file. '''
|
||||
render('10-flannel.conflist', '/etc/cni/net.d/10-flannel.conflist', {})
|
||||
set_state('flannel.cni.configured')
|
||||
|
||||
|
||||
@when('etcd.tls.available')
|
||||
@when_not('flannel.etcd.credentials.installed')
|
||||
def install_etcd_credentials(etcd):
|
||||
''' Install the etcd credential files. '''
|
||||
etcd.save_client_credentials(ETCD_KEY_PATH, ETCD_CERT_PATH, ETCD_CA_PATH)
|
||||
set_state('flannel.etcd.credentials.installed')
|
||||
|
||||
|
||||
def default_route_interface():
|
||||
''' Returns the network interface of the system's default route '''
|
||||
default_interface = None
|
||||
cmd = ['route']
|
||||
output = check_output(cmd).decode('utf8')
|
||||
for line in output.split('\n'):
|
||||
if 'default' in line:
|
||||
default_interface = line.split(' ')[-1]
|
||||
return default_interface
|
||||
|
||||
|
||||
def get_bind_address_interface():
|
||||
''' Returns a non-fan bind-address interface for the cni endpoint.
|
||||
Falls back to default_route_interface() if bind-address is not available.
|
||||
'''
|
||||
try:
|
||||
data = network_get('cni')
|
||||
except NotImplementedError:
|
||||
# Juju < 2.1
|
||||
return default_route_interface()
|
||||
|
||||
if 'bind-addresses' not in data:
|
||||
# Juju < 2.3
|
||||
return default_route_interface()
|
||||
|
||||
for bind_address in data['bind-addresses']:
|
||||
if bind_address['interfacename'].startswith('fan-'):
|
||||
continue
|
||||
return bind_address['interfacename']
|
||||
|
||||
# If we made it here, we didn't find a non-fan CNI bind-address, which is
|
||||
# unexpected. Let's log a message and play it safe.
|
||||
log('Could not find a non-fan bind-address. Using fallback interface.')
|
||||
return default_route_interface()
|
||||
|
||||
|
||||
@when('flannel.binaries.installed', 'flannel.etcd.credentials.installed',
|
||||
'etcd.tls.available')
|
||||
@when_not('flannel.service.installed')
|
||||
def install_flannel_service(etcd):
|
||||
''' Install the flannel service. '''
|
||||
status.maintenance('Installing flannel service.')
|
||||
# keep track of our etcd conn string and cert info so we can detect when it
|
||||
# changes later
|
||||
data_changed('flannel_etcd_connections', etcd.get_connection_string())
|
||||
data_changed('flannel_etcd_client_cert', etcd.get_client_credentials())
|
||||
iface = config('iface') or get_bind_address_interface()
|
||||
context = {'iface': iface,
|
||||
'connection_string': etcd.get_connection_string(),
|
||||
'cert_path': ETCD_PATH}
|
||||
render('flannel.service', '/lib/systemd/system/flannel.service', context)
|
||||
service('enable', 'flannel')
|
||||
set_state('flannel.service.installed')
|
||||
remove_state('flannel.service.started')
|
||||
|
||||
|
||||
@when('config.changed.iface')
|
||||
def reconfigure_flannel_service():
|
||||
''' Handle interface configuration change. '''
|
||||
remove_state('flannel.service.installed')
|
||||
|
||||
|
||||
@when('etcd.available', 'flannel.service.installed')
|
||||
def etcd_changed(etcd):
|
||||
if data_changed('flannel_etcd_connections', etcd.get_connection_string()):
|
||||
remove_state('flannel.service.installed')
|
||||
if data_changed('flannel_etcd_client_cert', etcd.get_client_credentials()):
|
||||
etcd.save_client_credentials(ETCD_KEY_PATH,
|
||||
ETCD_CERT_PATH,
|
||||
ETCD_CA_PATH)
|
||||
remove_state('flannel.service.installed')
|
||||
|
||||
|
||||
@when('flannel.binaries.installed', 'flannel.etcd.credentials.installed',
|
||||
'etcd.available')
|
||||
@when_not('flannel.network.configured')
|
||||
def invoke_configure_network(etcd):
|
||||
''' invoke network configuration and adjust states '''
|
||||
status.maintenance('Negotiating flannel network subnet.')
|
||||
if configure_network(etcd):
|
||||
set_state('flannel.network.configured')
|
||||
remove_state('flannel.service.started')
|
||||
else:
|
||||
status.waiting('Waiting on etcd.')
|
||||
|
||||
|
||||
@retry(times=3, delay_secs=20)
|
||||
def configure_network(etcd):
|
||||
''' Store initial flannel data in etcd.
|
||||
|
||||
Returns True if the operation completed successfully.
|
||||
|
||||
'''
|
||||
flannel_config = {
|
||||
'Network': config('cidr'),
|
||||
'Backend': {
|
||||
'Type': 'vxlan'
|
||||
}
|
||||
}
|
||||
|
||||
vni = config('vni')
|
||||
if vni:
|
||||
flannel_config['Backend']['VNI'] = vni
|
||||
|
||||
port = config('port')
|
||||
if port:
|
||||
flannel_config['Backend']['Port'] = port
|
||||
|
||||
data = json.dumps(flannel_config)
|
||||
cmd = "etcdctl "
|
||||
cmd += "--endpoint '{0}' ".format(etcd.get_connection_string())
|
||||
cmd += "--cert-file {0} ".format(ETCD_CERT_PATH)
|
||||
cmd += "--key-file {0} ".format(ETCD_KEY_PATH)
|
||||
cmd += "--ca-file {0} ".format(ETCD_CA_PATH)
|
||||
cmd += "set /coreos.com/network/config '{0}'".format(data)
|
||||
try:
|
||||
check_call(split(cmd))
|
||||
return True
|
||||
|
||||
except CalledProcessError:
|
||||
log('Unexpected error configuring network. Assuming etcd not'
|
||||
' ready. Will retry in 20s')
|
||||
return False
|
||||
|
||||
|
||||
@when_any('config.changed.cidr', 'config.changed.port', 'config.changed.vni')
|
||||
def reconfigure_network():
|
||||
''' Trigger the network configuration method. '''
|
||||
remove_state('flannel.network.configured')
|
||||
|
||||
|
||||
@when('flannel.binaries.installed', 'flannel.service.installed',
|
||||
'flannel.network.configured')
|
||||
@when_not('flannel.service.started')
|
||||
def start_flannel_service():
|
||||
''' Start the flannel service. '''
|
||||
status.maintenance('Starting flannel service.')
|
||||
if service_running('flannel'):
|
||||
service_restart('flannel')
|
||||
else:
|
||||
service_start('flannel')
|
||||
set_state('flannel.service.started')
|
||||
|
||||
|
||||
@when('cni.connected', 'flannel.service.started')
|
||||
@when_any('flannel.cni.configured', 'cni.is-master')
|
||||
@when_not('flannel.cni.available')
|
||||
def set_available(cni):
|
||||
''' Indicate to the CNI provider that we're ready. '''
|
||||
cni.set_config(cidr=config('cidr'), cni_conf_file='10-flannel.conflist')
|
||||
set_state('flannel.cni.available')
|
||||
|
||||
|
||||
@when('flannel.binaries.installed')
|
||||
@when_not('flannel.version.set')
|
||||
def set_flannel_version():
|
||||
''' Surface the currently deployed version of flannel to Juju '''
|
||||
cmd = 'flanneld -version'
|
||||
version = check_output(split(cmd), stderr=STDOUT).decode('utf-8')
|
||||
if version:
|
||||
application_version_set(version.split('v')[-1].strip())
|
||||
set_state('flannel.version.set')
|
||||
|
||||
|
||||
@when('nrpe-external-master.available')
|
||||
@when_not('nrpe-external-master.initial-config')
|
||||
def initial_nrpe_config(nagios=None):
|
||||
set_state('nrpe-external-master.initial-config')
|
||||
update_nrpe_config(nagios)
|
||||
|
||||
|
||||
@when('flannel.service.started')
|
||||
@when('nrpe-external-master.available')
|
||||
@when_any('config.changed.nagios_context',
|
||||
'config.changed.nagios_servicegroups')
|
||||
def update_nrpe_config(unused=None):
|
||||
# List of systemd services that will be checked
|
||||
services = ('flannel',)
|
||||
|
||||
# The current nrpe-external-master interface doesn't handle a lot of logic,
|
||||
# use the charm-helpers code for now.
|
||||
hostname = nrpe.get_nagios_hostname()
|
||||
current_unit = nrpe.get_nagios_unit_name()
|
||||
nrpe_setup = nrpe.NRPE(hostname=hostname, primary=False)
|
||||
nrpe.add_init_service_checks(nrpe_setup, services, current_unit)
|
||||
nrpe_setup.write()
|
||||
|
||||
|
||||
@when('flannel.service.started')
|
||||
@when('flannel.cni.available')
|
||||
def ready():
|
||||
''' Indicate that flannel is active. '''
|
||||
try:
|
||||
status.active('Flannel subnet ' + get_flannel_subnet())
|
||||
except FlannelSubnetNotFound:
|
||||
status.waiting('Waiting for Flannel')
|
||||
|
||||
|
||||
@when_not('etcd.connected')
|
||||
def halt_execution():
|
||||
''' send a clear message to the user that we are waiting on etcd '''
|
||||
status.blocked('Waiting for etcd relation.')
|
||||
|
||||
|
||||
@hook('upgrade-charm')
|
||||
def reset_states_and_redeploy():
|
||||
''' Remove state and redeploy '''
|
||||
remove_state('flannel.cni.available')
|
||||
remove_state('flannel.binaries.installed')
|
||||
remove_state('flannel.service.started')
|
||||
remove_state('flannel.version.set')
|
||||
remove_state('flannel.network.configured')
|
||||
remove_state('flannel.service.installed')
|
||||
remove_state('flannel.cni.configured')
|
||||
try:
|
||||
log('Deleting /etc/cni/net.d/10-flannel.conf')
|
||||
os.remove('/etc/cni/net.d/10-flannel.conf')
|
||||
except FileNotFoundError as e:
|
||||
log(str(e))
|
||||
|
||||
|
||||
@hook('pre-series-upgrade')
|
||||
def pre_series_upgrade():
|
||||
status.blocked('Series upgrade in progress')
|
||||
|
||||
|
||||
@hook('stop')
|
||||
def cleanup_deployment():
|
||||
''' Terminate services, and remove the deployed bins '''
|
||||
service_stop('flannel')
|
||||
down = 'ip link set flannel.1 down'
|
||||
delete = 'ip link delete flannel.1'
|
||||
try:
|
||||
check_call(split(down))
|
||||
check_call(split(delete))
|
||||
except CalledProcessError:
|
||||
log('Unable to remove iface flannel.1')
|
||||
log('Potential indication that cleanup is not possible')
|
||||
files = ['/usr/local/bin/flanneld',
|
||||
'/lib/systemd/system/flannel',
|
||||
'/lib/systemd/system/flannel.service',
|
||||
'/run/flannel/subnet.env',
|
||||
'/usr/local/bin/flanneld',
|
||||
'/usr/local/bin/etcdctl',
|
||||
'/etc/cni/net.d/10-flannel.conflist',
|
||||
ETCD_KEY_PATH,
|
||||
ETCD_CERT_PATH,
|
||||
ETCD_CA_PATH]
|
||||
for f in files:
|
||||
if os.path.exists(f):
|
||||
log('Removing {}'.format(f))
|
||||
os.remove(f)
|
||||
|
||||
|
||||
def get_flannel_subnet():
|
||||
''' Returns the flannel subnet reserved for this unit '''
|
||||
try:
|
||||
with open('/run/flannel/subnet.env') as f:
|
||||
raw_data = dict(line.strip().split('=') for line in f)
|
||||
return raw_data['FLANNEL_SUBNET']
|
||||
except FileNotFoundError as e:
|
||||
raise FlannelSubnetNotFound() from e
|
||||
|
||||
|
||||
def arch():
|
||||
'''Return the package architecture as a string.'''
|
||||
# Get the package architecture for this system.
|
||||
architecture = check_output(['dpkg', '--print-architecture']).rstrip()
|
||||
# Convert the binary result into a string.
|
||||
architecture = architecture.decode('utf-8')
|
||||
return architecture
|
||||
|
||||
|
||||
class FlannelSubnetNotFound(Exception):
|
||||
pass
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
from charms import layer
|
||||
|
||||
|
||||
layer.status._initialize()
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
mock
|
||||
flake8
|
||||
pytest
|
||||
|
|
@ -0,0 +1 @@
|
|||
0
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue