Commit a393ebb2 authored by Tim Bleimehl's avatar Tim Bleimehl 🤸🏼
Browse files

make python 3.7 compatible

parent 9134ad16
Pipeline #1473 failed with stage
in 1 minute and 37 seconds
......@@ -66,7 +66,8 @@ class Backup:
"path": self.path,
}
if meta_data:
data = data | {
data = {
**data,
"retention_type": self.retention_type,
"database_name": self.database_name,
}
......
......@@ -55,17 +55,16 @@ class Container:
@classmethod
def from_kubernetes_get_dict(cls, kubectl_get_item_result: Dict):
# todo validate correct item by "kind" field
lbls = kubectl_get_item_result["metadata"]["labels"]
if "annotations" in kubectl_get_item_result["metadata"]:
lbls = {**lbls, **kubectl_get_item_result["metadata"]["annotations"]}
container = cls(
mode="kubernetes",
id=kubectl_get_item_result["metadata"]["uid"],
name=kubectl_get_item_result["metadata"]["name"],
backup_name=kubectl_get_item_result["metadata"]["name"],
coda_labels=ValidLabels.valid_labels_from_dict(
kubectl_get_item_result["metadata"]["labels"]
| kubectl_get_item_result["metadata"]["annotations"]
if "annotations" in kubectl_get_item_result["metadata"]
else {},
lbls,
add_missing_default_labels=True,
),
other_labels=ValidLabels.non_valid_labels_from_dict(
......@@ -189,14 +188,14 @@ class ContainerHelper:
def _attach_parent_workload_metadata_to_pod(
cls, pod: Container, parent_workload: Dict
) -> Container:
lbls = parent_workload["metadata"]["labels"]
if "annotations" in parent_workload["metadata"]:
lbls = {**lbls, **parent_workload["metadata"]["annotations"]}
workload_backup_config_labels = ValidLabels.valid_labels_from_dict(
parent_workload["metadata"]["labels"]
| parent_workload["metadata"]["annotations"]
if "annotations" in parent_workload["metadata"]
else {},
lbls,
add_missing_default_labels=True,
)
pod.coda_labels = pod.coda_labels | workload_backup_config_labels
pod.coda_labels = {**workload_backup_config_labels, **pod.coda_labels}
if (
ValidLabels.backup_name in pod.coda_labels
......
......@@ -160,9 +160,14 @@ spec:
### Setup Databases
We use the CoDaBuddy `auto-create`-feature to create our user and database
We use the CoDaBuddy Docker Container `auto-create`-feature to create our user and database
`auto-create kubernetes --all-namespaces`
`docker run --rm -it --network=host -v ~/.kube/config:/.kube/config registry-gl.connect.dzd-ev.de:443/dzdtools/codabuddy auto-create --debug kubernetes --all-namespaces`
todo-note: this is not working due to missing role auth
results in `Error from server (Forbidden): pods is forbidden: User "system:serviceaccount:default:default" cannot list resource "pods" in API group "" at the cluster scope`
`kubectl run codabuddy --restart=Never --rm -i --image=registry-gl.connect.dzd-ev.de:443/dzdtools/codabuddy -- auto-create --debug kubernetes --all-namespaces `
### Backup
......
......@@ -5,7 +5,10 @@ USER 0
COPY --from=docker:latest /usr/local/bin/docker /usr/local/bin/
# RUN groupadd docker && useradd -u 1001 -g docker justaname
# install python3
RUN install_packages python3 python3-pip git
#RUN install_packages python3.9 python3-pip git
RUN apt update && apt search python3
#RUN install_packages software-properties-common && add-apt-repository ppa:deadsnakes/ppa && install_packages python3-9 python3-pip git
# install required python modules
RUN pip3 install setuptools
......
......@@ -15,7 +15,7 @@ setup(
license="MIT",
packages=["CoDaBuddy"],
install_requires=["DZDConfigs", "Click", "tabulate", "humanize", "pyyaml"],
python_requires=">=3.7",
python_requires=">=3.9",
zip_safe=False,
include_package_data=True,
use_scm_version={
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment