diff --git a/.devcontainer/.env.example b/.devcontainer/.env.example new file mode 100755 index 00000000..453b0979 --- /dev/null +++ b/.devcontainer/.env.example @@ -0,0 +1,12 @@ +# Dev container user configuration +# Copy this file to .env and customize as needed +# These values will bind the container user to your host user for seamless file sharing + +# Your username (defaults to 'devuser' if not set) +USERNAME=${USER} + +# Your user ID (run 'id -u' on host to get this) +USER_UID=1000 + +# Your group ID (run 'id -g' on host to get this) +USER_GID=1000 diff --git a/.devcontainer/.gitignore b/.devcontainer/.gitignore new file mode 100755 index 00000000..0247178b --- /dev/null +++ b/.devcontainer/.gitignore @@ -0,0 +1 @@ +home \ No newline at end of file diff --git a/deployment-configuration/helm/templates/auto-database-postgres-operator.yaml b/deployment-configuration/helm/templates/auto-database-postgres-operator.yaml new file mode 100644 index 00000000..25d380b0 --- /dev/null +++ b/deployment-configuration/helm/templates/auto-database-postgres-operator.yaml @@ -0,0 +1,86 @@ +{{- define "deploy_utils.database.postgres.operator" }} +--- +apiVersion: v1 +kind: Secret +metadata: + name: {{ printf "%s-credentials" .app.harness.database.name | quote }} + namespace: {{ .root.Values.namespace }} + labels: + app: {{ .app.harness.database.name | quote }} +{{ include "deploy_utils.labels" .root | indent 4 }} +type: kubernetes.io/basic-auth +stringData: + username: {{ .app.harness.database.user | quote }} + password: {{ .app.harness.database.pass | quote }} +--- +apiVersion: postgresql.cnpg.io/v1 +kind: Cluster +metadata: + name: {{ .app.harness.database.name | quote }} + namespace: {{ .root.Values.namespace }} + labels: + app: {{ .app.harness.database.name | quote }} +{{ include "deploy_utils.labels" .root | indent 4 }} +spec: + instances: {{ .app.harness.database.postgres.instances | default 1 }} + + inheritedMetadata: + labels: + app: {{ .app.harness.database.name | quote }} + service: db + + bootstrap: + initdb: + database: {{ .app.harness.database.postgres.initialdb | quote }} + owner: {{ .app.harness.database.user | quote }} + secret: + name: {{ printf "%s-credentials" .app.harness.database.name | quote }} + + storage: + size: {{ .app.harness.database.size }} + + {{- with .app.harness.database.resources }} + resources: + {{- with .requests }} + requests: + {{- with .memory }} + memory: {{ . | quote }} + {{- end }} + {{- with .cpu }} + cpu: {{ . | quote }} + {{- end }} + {{- end }} + {{- with .limits }} + limits: + {{- with .memory }} + memory: {{ . | quote }} + {{- end }} + {{- end }} + {{- end }} + + {{- if .app.harness.database.image_ref }} + imageName: {{ index (index .app "task-images") .app.harness.database.image_ref | default ("Image ref not found!" | quote) }} + {{- else if .app.harness.database.postgres.image }} + imageName: {{ .app.harness.database.postgres.image | quote }} + {{- end }} +--- +apiVersion: v1 +kind: Service +metadata: + name: {{ .app.harness.database.name | quote }} + namespace: {{ .root.Values.namespace }} + labels: + app: {{ .app.harness.deployment.name | quote }} +{{ include "deploy_utils.labels" .root | indent 4 }} +spec: + type: {{ if .app.harness.database.expose }}LoadBalancer{{ else }}ClusterIP{{ end }} + selector: + app: {{ .app.harness.database.name | quote }} + cnpg.io/instanceRole: primary + ports: + {{- range $port := .app.harness.database.postgres.ports }} + - name: {{ $port.name }} + port: {{ $port.port }} + targetPort: 5432 + {{- end }} +{{- end }} diff --git a/deployment-configuration/helm/templates/auto-database.yaml b/deployment-configuration/helm/templates/auto-database.yaml index 212d763f..a23e97a6 100644 --- a/deployment-configuration/helm/templates/auto-database.yaml +++ b/deployment-configuration/helm/templates/auto-database.yaml @@ -14,6 +14,9 @@ spec: --- {{- end }} {{- define "deploy_utils.database" }} +{{- if and (eq .app.harness.database.type "postgres") .app.harness.database.postgres.operator }} +{{- include "deploy_utils.database.postgres.operator" . }} +{{- else }} --- kind: PersistentVolumeClaim apiVersion: v1 @@ -105,6 +108,7 @@ spec: - name: {{ $port.name }} port: {{ $port.port }} {{- end }} +{{- end }} --- {{- include "deploy_utils.database_network_policy" (dict "root" .root "app" .app) }} {{ end }} diff --git a/deployment-configuration/helm/templates/auto-network-policies.yaml b/deployment-configuration/helm/templates/auto-network-policies.yaml index 6dbf4eb0..63f5dc4c 100644 --- a/deployment-configuration/helm/templates/auto-network-policies.yaml +++ b/deployment-configuration/helm/templates/auto-network-policies.yaml @@ -117,6 +117,38 @@ spec: protocol: UDP - port: 53 protocol: TCP + {{- if and (eq .app.harness.database.type "postgres") .app.harness.database.postgres.operator }} + # Allow CNPG pods to reach the Kubernetes API server + {{- $apiCidrs := list }} + {{- $kubeSvc := (lookup "v1" "Service" "default" "kubernetes") }} + {{- $kubeEp := (lookup "v1" "Endpoints" "default" "kubernetes") }} + {{- if $kubeSvc }} + {{- if $kubeSvc.spec }} + {{- if $kubeSvc.spec.clusterIP }} + {{- $apiCidrs = append $apiCidrs (printf "%s/32" $kubeSvc.spec.clusterIP) }} + {{- end }} + {{- end }} + {{- end }} + {{- if and $kubeEp $kubeEp.subsets }} + {{- range $subset := $kubeEp.subsets }} + {{- range $addr := $subset.addresses }} + {{- $apiCidrs = append $apiCidrs (printf "%s/32" $addr.ip) }} + {{- end }} + {{- end }} + {{- end }} + {{- /* Fall back to configured CIDRs when lookup returns nothing (e.g. helm template) */ -}} + {{- if not $apiCidrs }} + {{- $apiCidrs = .app.harness.database.postgres.apiServerCidr }} + {{- end }} + {{- range $cidr := $apiCidrs }} + - to: + - ipBlock: + cidr: {{ $cidr }} + ports: + - port: 443 + protocol: TCP + {{- end }} + {{- end }} {{- range $ns := $allowedNamespaces }} # Allow traffic to whitelisted namespace - to: diff --git a/deployment-configuration/helm/values.yaml b/deployment-configuration/helm/values.yaml index fe752226..cc72d84a 100644 --- a/deployment-configuration/helm/values.yaml +++ b/deployment-configuration/helm/values.yaml @@ -14,9 +14,9 @@ registry: # -- Optional secret used for pulling from docker registry. secret: # -- Secret name used for pulling from docker registry. - name: + name: e # -- Value of the secret used for pulling from docker registry in json encoded format. - value: + value: sd # -- Docker tag used to pull images. tag: latest # -- List of applications. diff --git a/deployment-configuration/value-template.yaml b/deployment-configuration/value-template.yaml index fef6f5a8..fa34ca0a 100644 --- a/deployment-configuration/value-template.yaml +++ b/deployment-configuration/value-template.yaml @@ -95,8 +95,15 @@ harness: port: 27017 # -- settings for postgers database (for type==postgres) postgres: - image: postgres:13 + image: initialdb: cloudharness + # -- Use the CloudNative-PG operator instead of a plain Deployment. Requires the CNPG operator to be installed in the cluster. + operator: false + # -- Number of PostgreSQL instances managed by the CNPG operator (only used when operator is true) + instances: 1 + # -- CIDR(s) allowed for CNPG pods to reach the Kubernetes API server (port 443). + # -- Resolved automatically at deploy time via cluster lookup. Set explicitly only as a fallback for helm-template or air-gapped use. + apiServerCidr: [] ports: - name: http port: 5432 diff --git a/deployment/codefresh-test.yaml b/deployment/codefresh-test.yaml index b4ad6b8d..03258450 100644 --- a/deployment/codefresh-test.yaml +++ b/deployment/codefresh-test.yaml @@ -1,76 +1,76 @@ version: '1.0' stages: -- prepare -- build -- unittest -- deploy -- qa +- 'prepare' +- 'build' +- 'unittest' +- 'deploy' +- 'qa' steps: main_clone: - title: Clone main repository - type: git-clone - stage: prepare + title: 'Clone main repository' + type: 'git-clone' + stage: 'prepare' repo: '${{CF_REPO_OWNER}}/${{CF_REPO_NAME}}' revision: '${{CF_BRANCH}}' - git: github + git: 'github' post_main_clone: - title: Post main clone - type: parallel - stage: prepare + title: 'Post main clone' + type: 'parallel' + stage: 'prepare' steps: clone_cloud_harness: - title: Cloning cloud-harness repository... - type: git-clone - stage: prepare - repo: https://github.com/MetaCell/cloud-harness.git + title: 'Cloning cloud-harness repository...' + type: 'git-clone' + stage: 'prepare' + repo: 'https://github.com/MetaCell/cloud-harness.git' revision: '${{CLOUDHARNESS_BRANCH}}' - working_directory: . - git: github + working_directory: '.' + git: 'github' prepare_deployment: - title: Prepare helm chart - image: python:3.12 - stage: prepare - working_directory: . + title: 'Prepare helm chart' + image: 'python:3.12' + stage: 'prepare' + working_directory: '.' commands: - - bash cloud-harness/install.sh - - export HELM_NAME_ARG="$( [ -n "${{CHART_NAME}}" ] && printf -- "--name %s" "${{CHART_NAME}}" - )" - - export HELM_CHART_VERSION_ARG="$( [ -n "${{CHART_VERSION}}" ] && printf -- "--chart-version - %s" "${{CHART_VERSION}}" )" - - export HELM_APP_VERSION_ARG="$( APP_VERSION_RESOLVED="${{APP_VERSION}}"; [ -z - "$APP_VERSION_RESOLVED" ] && APP_VERSION_RESOLVED="${{DEPLOYMENT_TAG}}"; [ -n - "$APP_VERSION_RESOLVED" ] && printf -- "--app-version %s" "$APP_VERSION_RESOLVED" - )" - - export HELM_META_ARGS="$HELM_NAME_ARG $HELM_CHART_VERSION_ARG $HELM_APP_VERSION_ARG" - - harness-deployment . -n test-${{NAMESPACE_BASENAME}} -d ${{DOMAIN}} -r ${{REGISTRY}} - -rsn ${{REGISTRY_SECRET}} -e test --write-env --cache-url '${{IMAGE_CACHE_URL}}' - -N $HELM_META_ARGS -i samples - - cat deployment/.env >> ${{CF_VOLUME_PATH}}/env_vars_to_export - - cat ${{CF_VOLUME_PATH}}/env_vars_to_export + - 'bash cloud-harness/install.sh' + - 'export HELM_NAME_ARG="$( [ -n "${{CHART_NAME}}" ] && printf -- "--name %s" + "${{CHART_NAME}}" )"' + - 'export HELM_CHART_VERSION_ARG="$( [ -n "${{CHART_VERSION}}" ] && printf -- + "--chart-version %s" "${{CHART_VERSION}}" )"' + - 'export HELM_APP_VERSION_ARG="$( APP_VERSION_RESOLVED="${{APP_VERSION}}"; [ + -z "$APP_VERSION_RESOLVED" ] && APP_VERSION_RESOLVED="${{DEPLOYMENT_TAG}}"; + [ -n "$APP_VERSION_RESOLVED" ] && printf -- "--app-version %s" "$APP_VERSION_RESOLVED" + )"' + - 'export HELM_META_ARGS="$HELM_NAME_ARG $HELM_CHART_VERSION_ARG $HELM_APP_VERSION_ARG"' + - 'harness-deployment . -n test-${{NAMESPACE_BASENAME}} -d ${{DOMAIN}} -r ${{REGISTRY}} + -rsn ${{REGISTRY_SECRET}} -e test --write-env --cache-url ''${{IMAGE_CACHE_URL}}'' + -N $HELM_META_ARGS -i samples' + - 'cat deployment/.env >> ${{CF_VOLUME_PATH}}/env_vars_to_export' + - 'cat ${{CF_VOLUME_PATH}}/env_vars_to_export' prepare_deployment_view: commands: - - helm template ./deployment/helm --debug -n test-${{NAMESPACE_BASENAME}}-${{CF_SHORT_REVISION}} + - 'helm template ./deployment/helm --debug -n test-${{NAMESPACE_BASENAME}}-${{CF_SHORT_REVISION}}' environment: - - ACTION=auth - - KUBE_CONTEXT=test-${{CF_BUILD_ID}} - image: codefresh/cfstep-helm:3.6.2 - stage: prepare - title: View helm chart + - 'ACTION=auth' + - 'KUBE_CONTEXT=test-${{CF_BUILD_ID}}' + image: 'codefresh/cfstep-helm:3.6.2' + stage: 'prepare' + title: 'View helm chart' build_application_images_0: - type: parallel - stage: build + type: 'parallel' + stage: 'build' steps: accounts: - type: build - stage: build - dockerfile: Dockerfile + type: 'build' + stage: 'build' + dockerfile: 'Dockerfile' registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - NOCACHE=${{CF_BUILD_ID}} - image_name: cloud-harness/accounts - title: Accounts - working_directory: ./applications/accounts + - 'NOCACHE=${{CF_BUILD_ID}}' + image_name: 'cloud-harness/accounts' + title: 'Accounts' + working_directory: './applications/accounts' tags: - '${{ACCOUNTS_TAG}}' - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' @@ -78,21 +78,21 @@ steps: when: condition: any: - buildDoesNotExist: includes('${{ACCOUNTS_TAG_EXISTS}}', '{{ACCOUNTS_TAG_EXISTS}}') - == true - forceNoCache: includes('${{ACCOUNTS_TAG_FORCE_BUILD}}', '{{ACCOUNTS_TAG_FORCE_BUILD}}') - == false + buildDoesNotExist: 'includes(''${{ACCOUNTS_TAG_EXISTS}}'', ''{{ACCOUNTS_TAG_EXISTS}}'') + == true' + forceNoCache: 'includes(''${{ACCOUNTS_TAG_FORCE_BUILD}}'', ''{{ACCOUNTS_TAG_FORCE_BUILD}}'') + == false' cloudharness-base: - type: build - stage: build - dockerfile: infrastructure/base-images/cloudharness-base/Dockerfile + type: 'build' + stage: 'build' + dockerfile: 'infrastructure/base-images/cloudharness-base/Dockerfile' registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - NOCACHE=${{CF_BUILD_ID}} - image_name: cloud-harness/cloudharness-base - title: Cloudharness base - working_directory: ./. + - 'NOCACHE=${{CF_BUILD_ID}}' + image_name: 'cloud-harness/cloudharness-base' + title: 'Cloudharness base' + working_directory: './.' tags: - '${{CLOUDHARNESS_BASE_TAG}}' - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' @@ -100,21 +100,21 @@ steps: when: condition: any: - buildDoesNotExist: includes('${{CLOUDHARNESS_BASE_TAG_EXISTS}}', '{{CLOUDHARNESS_BASE_TAG_EXISTS}}') - == true - forceNoCache: includes('${{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}') - == false + buildDoesNotExist: 'includes(''${{CLOUDHARNESS_BASE_TAG_EXISTS}}'', + ''{{CLOUDHARNESS_BASE_TAG_EXISTS}}'') == true' + forceNoCache: 'includes(''${{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}'', + ''{{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}'') == false' cloudharness-frontend-build: - type: build - stage: build - dockerfile: infrastructure/base-images/cloudharness-frontend-build/Dockerfile + type: 'build' + stage: 'build' + dockerfile: 'infrastructure/base-images/cloudharness-frontend-build/Dockerfile' registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - NOCACHE=${{CF_BUILD_ID}} - image_name: cloud-harness/cloudharness-frontend-build - title: Cloudharness frontend build - working_directory: ./. + - 'NOCACHE=${{CF_BUILD_ID}}' + image_name: 'cloud-harness/cloudharness-frontend-build' + title: 'Cloudharness frontend build' + working_directory: './.' tags: - '${{CLOUDHARNESS_FRONTEND_BUILD_TAG}}' - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' @@ -122,50 +122,50 @@ steps: when: condition: any: - buildDoesNotExist: includes('${{CLOUDHARNESS_FRONTEND_BUILD_TAG_EXISTS}}', - '{{CLOUDHARNESS_FRONTEND_BUILD_TAG_EXISTS}}') == true - forceNoCache: includes('${{CLOUDHARNESS_FRONTEND_BUILD_TAG_FORCE_BUILD}}', - '{{CLOUDHARNESS_FRONTEND_BUILD_TAG_FORCE_BUILD}}') == false + buildDoesNotExist: 'includes(''${{CLOUDHARNESS_FRONTEND_BUILD_TAG_EXISTS}}'', + ''{{CLOUDHARNESS_FRONTEND_BUILD_TAG_EXISTS}}'') == true' + forceNoCache: 'includes(''${{CLOUDHARNESS_FRONTEND_BUILD_TAG_FORCE_BUILD}}'', + ''{{CLOUDHARNESS_FRONTEND_BUILD_TAG_FORCE_BUILD}}'') == false' test-e2e: - type: build - stage: build - dockerfile: Dockerfile + type: 'build' + stage: 'build' + dockerfile: 'Dockerfile' registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - NOCACHE=${{CF_BUILD_ID}} - image_name: cloud-harness/test-e2e - title: Test e2e - working_directory: ./test/test-e2e + - 'NOCACHE=${{CF_BUILD_ID}}' + image_name: 'cloud-harness/test-e2e' + title: 'Test e2e' + working_directory: './test/test-e2e' tags: - '${{TEST_E2E_TAG}}' - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' - '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}' - - latest + - 'latest' when: condition: any: - buildDoesNotExist: includes('${{TEST_E2E_TAG_EXISTS}}', '{{TEST_E2E_TAG_EXISTS}}') - == true - forceNoCache: includes('${{TEST_E2E_TAG_FORCE_BUILD}}', '{{TEST_E2E_TAG_FORCE_BUILD}}') - == false - title: Build parallel step 1 + buildDoesNotExist: 'includes(''${{TEST_E2E_TAG_EXISTS}}'', ''{{TEST_E2E_TAG_EXISTS}}'') + == true' + forceNoCache: 'includes(''${{TEST_E2E_TAG_FORCE_BUILD}}'', ''{{TEST_E2E_TAG_FORCE_BUILD}}'') + == false' + title: 'Build parallel step 1' build_application_images_1: - type: parallel - stage: build + type: 'parallel' + stage: 'build' steps: cloudharness-django: - type: build - stage: build - dockerfile: Dockerfile + type: 'build' + stage: 'build' + dockerfile: 'Dockerfile' registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - NOCACHE=${{CF_BUILD_ID}} - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} - image_name: cloud-harness/cloudharness-django - title: Cloudharness django - working_directory: ./infrastructure/common-images/cloudharness-django + - 'NOCACHE=${{CF_BUILD_ID}}' + - 'CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}' + image_name: 'cloud-harness/cloudharness-django' + title: 'Cloudharness django' + working_directory: './infrastructure/common-images/cloudharness-django' tags: - '${{CLOUDHARNESS_DJANGO_TAG}}' - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' @@ -173,22 +173,22 @@ steps: when: condition: any: - buildDoesNotExist: includes('${{CLOUDHARNESS_DJANGO_TAG_EXISTS}}', '{{CLOUDHARNESS_DJANGO_TAG_EXISTS}}') - == true - forceNoCache: includes('${{CLOUDHARNESS_DJANGO_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_DJANGO_TAG_FORCE_BUILD}}') - == false + buildDoesNotExist: 'includes(''${{CLOUDHARNESS_DJANGO_TAG_EXISTS}}'', + ''{{CLOUDHARNESS_DJANGO_TAG_EXISTS}}'') == true' + forceNoCache: 'includes(''${{CLOUDHARNESS_DJANGO_TAG_FORCE_BUILD}}'', + ''{{CLOUDHARNESS_DJANGO_TAG_FORCE_BUILD}}'') == false' cloudharness-flask: - type: build - stage: build - dockerfile: Dockerfile + type: 'build' + stage: 'build' + dockerfile: 'Dockerfile' registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - NOCACHE=${{CF_BUILD_ID}} - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} - image_name: cloud-harness/cloudharness-flask - title: Cloudharness flask - working_directory: ./infrastructure/common-images/cloudharness-flask + - 'NOCACHE=${{CF_BUILD_ID}}' + - 'CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}' + image_name: 'cloud-harness/cloudharness-flask' + title: 'Cloudharness flask' + working_directory: './infrastructure/common-images/cloudharness-flask' tags: - '${{CLOUDHARNESS_FLASK_TAG}}' - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' @@ -196,22 +196,22 @@ steps: when: condition: any: - buildDoesNotExist: includes('${{CLOUDHARNESS_FLASK_TAG_EXISTS}}', '{{CLOUDHARNESS_FLASK_TAG_EXISTS}}') - == true - forceNoCache: includes('${{CLOUDHARNESS_FLASK_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_FLASK_TAG_FORCE_BUILD}}') - == false + buildDoesNotExist: 'includes(''${{CLOUDHARNESS_FLASK_TAG_EXISTS}}'', + ''{{CLOUDHARNESS_FLASK_TAG_EXISTS}}'') == true' + forceNoCache: 'includes(''${{CLOUDHARNESS_FLASK_TAG_FORCE_BUILD}}'', + ''{{CLOUDHARNESS_FLASK_TAG_FORCE_BUILD}}'') == false' jupyterhub: - type: build - stage: build - dockerfile: Dockerfile + type: 'build' + stage: 'build' + dockerfile: 'Dockerfile' registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - NOCACHE=${{CF_BUILD_ID}} - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} - image_name: cloud-harness/jupyterhub - title: Jupyterhub - working_directory: ./applications/jupyterhub + - 'NOCACHE=${{CF_BUILD_ID}}' + - 'CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}' + image_name: 'cloud-harness/jupyterhub' + title: 'Jupyterhub' + working_directory: './applications/jupyterhub' tags: - '${{JUPYTERHUB_TAG}}' - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' @@ -219,22 +219,22 @@ steps: when: condition: any: - buildDoesNotExist: includes('${{JUPYTERHUB_TAG_EXISTS}}', '{{JUPYTERHUB_TAG_EXISTS}}') - == true - forceNoCache: includes('${{JUPYTERHUB_TAG_FORCE_BUILD}}', '{{JUPYTERHUB_TAG_FORCE_BUILD}}') - == false + buildDoesNotExist: 'includes(''${{JUPYTERHUB_TAG_EXISTS}}'', ''{{JUPYTERHUB_TAG_EXISTS}}'') + == true' + forceNoCache: 'includes(''${{JUPYTERHUB_TAG_FORCE_BUILD}}'', ''{{JUPYTERHUB_TAG_FORCE_BUILD}}'') + == false' samples-print-file: - type: build - stage: build - dockerfile: Dockerfile + type: 'build' + stage: 'build' + dockerfile: 'Dockerfile' registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - NOCACHE=${{CF_BUILD_ID}} - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} - image_name: cloud-harness/sampleapp-print-file - title: Samples print file - working_directory: ./applications/samples/tasks/print-file + - 'NOCACHE=${{CF_BUILD_ID}}' + - 'CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}' + image_name: 'cloud-harness/sampleapp-print-file' + title: 'Samples print file' + working_directory: './applications/samples/tasks/print-file' tags: - '${{SAMPLES_PRINT_FILE_TAG}}' - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' @@ -242,22 +242,22 @@ steps: when: condition: any: - buildDoesNotExist: includes('${{SAMPLES_PRINT_FILE_TAG_EXISTS}}', '{{SAMPLES_PRINT_FILE_TAG_EXISTS}}') - == true - forceNoCache: includes('${{SAMPLES_PRINT_FILE_TAG_FORCE_BUILD}}', '{{SAMPLES_PRINT_FILE_TAG_FORCE_BUILD}}') - == false + buildDoesNotExist: 'includes(''${{SAMPLES_PRINT_FILE_TAG_EXISTS}}'', + ''{{SAMPLES_PRINT_FILE_TAG_EXISTS}}'') == true' + forceNoCache: 'includes(''${{SAMPLES_PRINT_FILE_TAG_FORCE_BUILD}}'', + ''{{SAMPLES_PRINT_FILE_TAG_FORCE_BUILD}}'') == false' samples-secret: - type: build - stage: build - dockerfile: Dockerfile + type: 'build' + stage: 'build' + dockerfile: 'Dockerfile' registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - NOCACHE=${{CF_BUILD_ID}} - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} - image_name: cloud-harness/sampleapp-secret - title: Samples secret - working_directory: ./applications/samples/tasks/secret + - 'NOCACHE=${{CF_BUILD_ID}}' + - 'CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}' + image_name: 'cloud-harness/sampleapp-secret' + title: 'Samples secret' + working_directory: './applications/samples/tasks/secret' tags: - '${{SAMPLES_SECRET_TAG}}' - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' @@ -265,46 +265,46 @@ steps: when: condition: any: - buildDoesNotExist: includes('${{SAMPLES_SECRET_TAG_EXISTS}}', '{{SAMPLES_SECRET_TAG_EXISTS}}') - == true - forceNoCache: includes('${{SAMPLES_SECRET_TAG_FORCE_BUILD}}', '{{SAMPLES_SECRET_TAG_FORCE_BUILD}}') - == false + buildDoesNotExist: 'includes(''${{SAMPLES_SECRET_TAG_EXISTS}}'', ''{{SAMPLES_SECRET_TAG_EXISTS}}'') + == true' + forceNoCache: 'includes(''${{SAMPLES_SECRET_TAG_FORCE_BUILD}}'', ''{{SAMPLES_SECRET_TAG_FORCE_BUILD}}'') + == false' test-api: - type: build - stage: build - dockerfile: test/test-api/Dockerfile + type: 'build' + stage: 'build' + dockerfile: 'test/test-api/Dockerfile' registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - NOCACHE=${{CF_BUILD_ID}} - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} - image_name: cloud-harness/test-api - title: Test api - working_directory: ./. + - 'NOCACHE=${{CF_BUILD_ID}}' + - 'CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}' + image_name: 'cloud-harness/test-api' + title: 'Test api' + working_directory: './.' tags: - '${{TEST_API_TAG}}' - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' - '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}' - - latest + - 'latest' when: condition: any: - buildDoesNotExist: includes('${{TEST_API_TAG_EXISTS}}', '{{TEST_API_TAG_EXISTS}}') - == true - forceNoCache: includes('${{TEST_API_TAG_FORCE_BUILD}}', '{{TEST_API_TAG_FORCE_BUILD}}') - == false + buildDoesNotExist: 'includes(''${{TEST_API_TAG_EXISTS}}'', ''{{TEST_API_TAG_EXISTS}}'') + == true' + forceNoCache: 'includes(''${{TEST_API_TAG_FORCE_BUILD}}'', ''{{TEST_API_TAG_FORCE_BUILD}}'') + == false' workflows-extract-download: - type: build - stage: build - dockerfile: Dockerfile + type: 'build' + stage: 'build' + dockerfile: 'Dockerfile' registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - NOCACHE=${{CF_BUILD_ID}} - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} - image_name: cloud-harness/workflows-extract-download - title: Workflows extract download - working_directory: ./applications/workflows/tasks/extract-download + - 'NOCACHE=${{CF_BUILD_ID}}' + - 'CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}' + image_name: 'cloud-harness/workflows-extract-download' + title: 'Workflows extract download' + working_directory: './applications/workflows/tasks/extract-download' tags: - '${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG}}' - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' @@ -312,22 +312,22 @@ steps: when: condition: any: - buildDoesNotExist: includes('${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_EXISTS}}', - '{{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_EXISTS}}') == true - forceNoCache: includes('${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_FORCE_BUILD}}', - '{{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_FORCE_BUILD}}') == false + buildDoesNotExist: 'includes(''${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_EXISTS}}'', + ''{{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_EXISTS}}'') == true' + forceNoCache: 'includes(''${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_FORCE_BUILD}}'', + ''{{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_FORCE_BUILD}}'') == false' workflows-notify-queue: - type: build - stage: build - dockerfile: Dockerfile + type: 'build' + stage: 'build' + dockerfile: 'Dockerfile' registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - NOCACHE=${{CF_BUILD_ID}} - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} - image_name: cloud-harness/workflows-notify-queue - title: Workflows notify queue - working_directory: ./applications/workflows/tasks/notify-queue + - 'NOCACHE=${{CF_BUILD_ID}}' + - 'CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}' + image_name: 'cloud-harness/workflows-notify-queue' + title: 'Workflows notify queue' + working_directory: './applications/workflows/tasks/notify-queue' tags: - '${{WORKFLOWS_NOTIFY_QUEUE_TAG}}' - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' @@ -335,22 +335,22 @@ steps: when: condition: any: - buildDoesNotExist: includes('${{WORKFLOWS_NOTIFY_QUEUE_TAG_EXISTS}}', - '{{WORKFLOWS_NOTIFY_QUEUE_TAG_EXISTS}}') == true - forceNoCache: includes('${{WORKFLOWS_NOTIFY_QUEUE_TAG_FORCE_BUILD}}', - '{{WORKFLOWS_NOTIFY_QUEUE_TAG_FORCE_BUILD}}') == false + buildDoesNotExist: 'includes(''${{WORKFLOWS_NOTIFY_QUEUE_TAG_EXISTS}}'', + ''{{WORKFLOWS_NOTIFY_QUEUE_TAG_EXISTS}}'') == true' + forceNoCache: 'includes(''${{WORKFLOWS_NOTIFY_QUEUE_TAG_FORCE_BUILD}}'', + ''{{WORKFLOWS_NOTIFY_QUEUE_TAG_FORCE_BUILD}}'') == false' workflows-send-result-event: - type: build - stage: build - dockerfile: Dockerfile + type: 'build' + stage: 'build' + dockerfile: 'Dockerfile' registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - NOCACHE=${{CF_BUILD_ID}} - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} - image_name: cloud-harness/workflows-send-result-event - title: Workflows send result event - working_directory: ./applications/workflows/tasks/send-result-event + - 'NOCACHE=${{CF_BUILD_ID}}' + - 'CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}' + image_name: 'cloud-harness/workflows-send-result-event' + title: 'Workflows send result event' + working_directory: './applications/workflows/tasks/send-result-event' tags: - '${{WORKFLOWS_SEND_RESULT_EVENT_TAG}}' - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' @@ -358,27 +358,27 @@ steps: when: condition: any: - buildDoesNotExist: includes('${{WORKFLOWS_SEND_RESULT_EVENT_TAG_EXISTS}}', - '{{WORKFLOWS_SEND_RESULT_EVENT_TAG_EXISTS}}') == true - forceNoCache: includes('${{WORKFLOWS_SEND_RESULT_EVENT_TAG_FORCE_BUILD}}', - '{{WORKFLOWS_SEND_RESULT_EVENT_TAG_FORCE_BUILD}}') == false - title: Build parallel step 2 + buildDoesNotExist: 'includes(''${{WORKFLOWS_SEND_RESULT_EVENT_TAG_EXISTS}}'', + ''{{WORKFLOWS_SEND_RESULT_EVENT_TAG_EXISTS}}'') == true' + forceNoCache: 'includes(''${{WORKFLOWS_SEND_RESULT_EVENT_TAG_FORCE_BUILD}}'', + ''{{WORKFLOWS_SEND_RESULT_EVENT_TAG_FORCE_BUILD}}'') == false' + title: 'Build parallel step 2' build_application_images_2: - type: parallel - stage: build + type: 'parallel' + stage: 'build' steps: common: - type: build - stage: build - dockerfile: Dockerfile + type: 'build' + stage: 'build' + dockerfile: 'Dockerfile' registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - NOCACHE=${{CF_BUILD_ID}} - - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloud-harness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}} - image_name: cloud-harness/common - title: Common - working_directory: ./applications/common/server + - 'NOCACHE=${{CF_BUILD_ID}}' + - 'CLOUDHARNESS_FLASK=${{REGISTRY}}/cloud-harness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}' + image_name: 'cloud-harness/common' + title: 'Common' + working_directory: './applications/common/server' tags: - '${{COMMON_TAG}}' - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' @@ -386,23 +386,23 @@ steps: when: condition: any: - buildDoesNotExist: includes('${{COMMON_TAG_EXISTS}}', '{{COMMON_TAG_EXISTS}}') - == true - forceNoCache: includes('${{COMMON_TAG_FORCE_BUILD}}', '{{COMMON_TAG_FORCE_BUILD}}') - == false + buildDoesNotExist: 'includes(''${{COMMON_TAG_EXISTS}}'', ''{{COMMON_TAG_EXISTS}}'') + == true' + forceNoCache: 'includes(''${{COMMON_TAG_FORCE_BUILD}}'', ''{{COMMON_TAG_FORCE_BUILD}}'') + == false' samples: - type: build - stage: build - dockerfile: Dockerfile + type: 'build' + stage: 'build' + dockerfile: 'test.Dockerfile' registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - NOCACHE=${{CF_BUILD_ID}} - - CLOUDHARNESS_FRONTEND_BUILD=${{REGISTRY}}/cloud-harness/cloudharness-frontend-build:${{CLOUDHARNESS_FRONTEND_BUILD_TAG}} - - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloud-harness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}} - image_name: cloud-harness/sampleapp - title: Samples - working_directory: ./applications/samples + - 'NOCACHE=${{CF_BUILD_ID}}' + - 'CLOUDHARNESS_FRONTEND_BUILD=${{REGISTRY}}/cloud-harness/cloudharness-frontend-build:${{CLOUDHARNESS_FRONTEND_BUILD_TAG}}' + - 'CLOUDHARNESS_FLASK=${{REGISTRY}}/cloud-harness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}' + image_name: 'cloud-harness/sampleapp' + title: 'Samples' + working_directory: './applications/samples' tags: - '${{SAMPLES_TAG}}' - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' @@ -410,22 +410,22 @@ steps: when: condition: any: - buildDoesNotExist: includes('${{SAMPLES_TAG_EXISTS}}', '{{SAMPLES_TAG_EXISTS}}') - == true - forceNoCache: includes('${{SAMPLES_TAG_FORCE_BUILD}}', '{{SAMPLES_TAG_FORCE_BUILD}}') - == false + buildDoesNotExist: 'includes(''${{SAMPLES_TAG_EXISTS}}'', ''{{SAMPLES_TAG_EXISTS}}'') + == true' + forceNoCache: 'includes(''${{SAMPLES_TAG_FORCE_BUILD}}'', ''{{SAMPLES_TAG_FORCE_BUILD}}'') + == false' volumemanager: - type: build - stage: build - dockerfile: Dockerfile + type: 'build' + stage: 'build' + dockerfile: 'Dockerfile' registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - NOCACHE=${{CF_BUILD_ID}} - - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloud-harness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}} - image_name: cloud-harness/volumemanager - title: Volumemanager - working_directory: ./applications/volumemanager/server + - 'NOCACHE=${{CF_BUILD_ID}}' + - 'CLOUDHARNESS_FLASK=${{REGISTRY}}/cloud-harness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}' + image_name: 'cloud-harness/volumemanager' + title: 'Volumemanager' + working_directory: './applications/volumemanager/server' tags: - '${{VOLUMEMANAGER_TAG}}' - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' @@ -433,22 +433,22 @@ steps: when: condition: any: - buildDoesNotExist: includes('${{VOLUMEMANAGER_TAG_EXISTS}}', '{{VOLUMEMANAGER_TAG_EXISTS}}') - == true - forceNoCache: includes('${{VOLUMEMANAGER_TAG_FORCE_BUILD}}', '{{VOLUMEMANAGER_TAG_FORCE_BUILD}}') - == false + buildDoesNotExist: 'includes(''${{VOLUMEMANAGER_TAG_EXISTS}}'', ''{{VOLUMEMANAGER_TAG_EXISTS}}'') + == true' + forceNoCache: 'includes(''${{VOLUMEMANAGER_TAG_FORCE_BUILD}}'', ''{{VOLUMEMANAGER_TAG_FORCE_BUILD}}'') + == false' workflows: - type: build - stage: build - dockerfile: Dockerfile + type: 'build' + stage: 'build' + dockerfile: 'Dockerfile' registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - NOCACHE=${{CF_BUILD_ID}} - - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloud-harness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}} - image_name: cloud-harness/workflows - title: Workflows - working_directory: ./applications/workflows/server + - 'NOCACHE=${{CF_BUILD_ID}}' + - 'CLOUDHARNESS_FLASK=${{REGISTRY}}/cloud-harness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}' + image_name: 'cloud-harness/workflows' + title: 'Workflows' + working_directory: './applications/workflows/server' tags: - '${{WORKFLOWS_TAG}}' - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' @@ -456,27 +456,27 @@ steps: when: condition: any: - buildDoesNotExist: includes('${{WORKFLOWS_TAG_EXISTS}}', '{{WORKFLOWS_TAG_EXISTS}}') - == true - forceNoCache: includes('${{WORKFLOWS_TAG_FORCE_BUILD}}', '{{WORKFLOWS_TAG_FORCE_BUILD}}') - == false - title: Build parallel step 3 + buildDoesNotExist: 'includes(''${{WORKFLOWS_TAG_EXISTS}}'', ''{{WORKFLOWS_TAG_EXISTS}}'') + == true' + forceNoCache: 'includes(''${{WORKFLOWS_TAG_FORCE_BUILD}}'', ''{{WORKFLOWS_TAG_FORCE_BUILD}}'') + == false' + title: 'Build parallel step 3' build_application_images_3: - type: parallel - stage: build + type: 'parallel' + stage: 'build' steps: samples-sum: - type: build - stage: build - dockerfile: Dockerfile + type: 'build' + stage: 'build' + dockerfile: 'Dockerfile' registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - NOCACHE=${{CF_BUILD_ID}} - - SAMPLES=${{REGISTRY}}/cloud-harness/sampleapp:${{SAMPLES_TAG}} - image_name: cloud-harness/sampleapp-sum - title: Samples sum - working_directory: ./applications/samples/tasks/sum + - 'NOCACHE=${{CF_BUILD_ID}}' + - 'SAMPLES=${{REGISTRY}}/cloud-harness/sampleapp:${{SAMPLES_TAG}}' + image_name: 'cloud-harness/sampleapp-sum' + title: 'Samples sum' + working_directory: './applications/samples/tasks/sum' tags: - '${{SAMPLES_SUM_TAG}}' - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' @@ -484,142 +484,142 @@ steps: when: condition: any: - buildDoesNotExist: includes('${{SAMPLES_SUM_TAG_EXISTS}}', '{{SAMPLES_SUM_TAG_EXISTS}}') - == true - forceNoCache: includes('${{SAMPLES_SUM_TAG_FORCE_BUILD}}', '{{SAMPLES_SUM_TAG_FORCE_BUILD}}') - == false - title: Build parallel step 4 + buildDoesNotExist: 'includes(''${{SAMPLES_SUM_TAG_EXISTS}}'', ''{{SAMPLES_SUM_TAG_EXISTS}}'') + == true' + forceNoCache: 'includes(''${{SAMPLES_SUM_TAG_FORCE_BUILD}}'', ''{{SAMPLES_SUM_TAG_FORCE_BUILD}}'') + == false' + title: 'Build parallel step 4' tests_unit: - stage: unittest - type: parallel + stage: 'unittest' + type: 'parallel' steps: samples_ut: - title: Unit tests for samples + title: 'Unit tests for samples' commands: - - pytest /usr/src/app/samples/test + - 'pytest /usr/src/app/samples/test' image: '${{REGISTRY}}/cloud-harness/sampleapp:${{SAMPLES_TAG}}' deployment: - stage: deploy - type: helm - working_directory: ./${{CF_REPO_NAME}} - title: Installing chart + stage: 'deploy' + type: 'helm' + working_directory: './${{CF_REPO_NAME}}' + title: 'Installing chart' arguments: - helm_version: 3.6.2 - chart_name: deployment/helm - release_name: test-${{NAMESPACE_BASENAME}} + helm_version: '3.6.2' + chart_name: 'deployment/helm' + release_name: 'test-${{NAMESPACE_BASENAME}}' kube_context: '${{CLUSTER_NAME}}' - namespace: test-${{NAMESPACE_BASENAME}} + namespace: 'test-${{NAMESPACE_BASENAME}}' chart_version: '${{CF_SHORT_REVISION}}' - cmd_ps: --timeout 600s --create-namespace + cmd_ps: '--timeout 600s --create-namespace' custom_value_files: - - ./deployment/helm/values.yaml + - './deployment/helm/values.yaml' custom_values: - - apps_samples_harness_secrets_asecret=${{ASECRET}} + - 'apps_samples_harness_secrets_asecret="${{ASECRET}}"' wait_deployment: - stage: qa - title: Wait deployment to be ready - image: codefresh/kubectl + stage: 'qa' + title: 'Wait deployment to be ready' + image: 'codefresh/kubectl' commands: - - kubectl config use-context ${{CLUSTER_NAME}} - - kubectl config set-context --current --namespace=test-${{NAMESPACE_BASENAME}} - - kubectl rollout status deployment/samples - - kubectl rollout status deployment/samples-gk - - kubectl rollout status deployment/workflows - - kubectl rollout status deployment/common - - kubectl rollout status deployment/accounts - - kubectl rollout status deployment/volumemanager - - kubectl rollout status deployment/argo-gk - - sleep 60 + - 'kubectl config use-context ${{CLUSTER_NAME}}' + - 'kubectl config set-context --current --namespace=test-${{NAMESPACE_BASENAME}}' + - 'kubectl rollout status deployment/samples' + - 'kubectl rollout status deployment/samples-gk' + - 'kubectl rollout status deployment/argo-gk' + - 'kubectl rollout status deployment/volumemanager' + - 'kubectl rollout status deployment/common' + - 'kubectl rollout status deployment/workflows' + - 'kubectl rollout status deployment/accounts' + - 'sleep 60' tests_api: - stage: qa - title: Api tests - working_directory: /home/test + stage: 'qa' + title: 'Api tests' + working_directory: '/home/test' image: '${{REGISTRY}}/cloud-harness/test-api:${{TEST_API_TAG}}' fail_fast: false commands: - - echo $APP_NAME + - 'echo $APP_NAME' scale: - samples_api_test: - title: samples api test + workflows_api_test: + title: 'workflows api test' volumes: - - '${{CF_REPO_NAME}}/applications/samples:/home/test' + - '${{CF_REPO_NAME}}/applications/workflows:/home/test' - '${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml' environment: - - APP_URL=https://samples.${{DOMAIN}}/api - - USERNAME=sample@testuser.com - - PASSWORD=test + - 'APP_URL=https://workflows.${{DOMAIN}}/api' commands: - - st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url - https://samples.${{DOMAIN}}/api -c all --skip-deprecated-operations --exclude-operation-id=submit_sync - --exclude-operation-id=submit_sync_with_results --exclude-operation-id=error - --hypothesis-suppress-health-check=too_slow --hypothesis-deadline=180000 - --request-timeout=180000 --hypothesis-max-examples=2 --show-trace --exclude-checks=ignored_auth - - pytest -v test/api + - 'st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url + https://workflows.${{DOMAIN}}/api -c all' common_api_test: - title: common api test + title: 'common api test' volumes: - '${{CF_REPO_NAME}}/applications/common:/home/test' - '${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml' environment: - - APP_URL=https://common.${{DOMAIN}}/api + - 'APP_URL=https://common.${{DOMAIN}}/api' commands: - - st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url - https://common.${{DOMAIN}}/api -c all - workflows_api_test: - title: workflows api test + - 'st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url + https://common.${{DOMAIN}}/api -c all' + samples_api_test: + title: 'samples api test' volumes: - - '${{CF_REPO_NAME}}/applications/workflows:/home/test' + - '${{CF_REPO_NAME}}/applications/samples:/home/test' - '${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml' environment: - - APP_URL=https://workflows.${{DOMAIN}}/api + - 'APP_URL=https://samples.${{DOMAIN}}/api' + - 'USERNAME=sample@testuser.com' + - 'PASSWORD=test' commands: - - st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url - https://workflows.${{DOMAIN}}/api -c all + - 'st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url + https://samples.${{DOMAIN}}/api -c all --skip-deprecated-operations --exclude-operation-id=submit_sync + --exclude-operation-id=submit_sync_with_results --exclude-operation-id=error + --hypothesis-suppress-health-check=too_slow --hypothesis-deadline=180000 + --request-timeout=180000 --hypothesis-max-examples=2 --show-trace --exclude-checks=ignored_auth' + - 'pytest -v test/api' hooks: on_fail: exec: - image: alpine + image: 'alpine' commands: - - cf_export FAILED=failed + - 'cf_export FAILED=failed' tests_e2e: - stage: qa - title: End to end tests - working_directory: /home/test + stage: 'qa' + title: 'End to end tests' + working_directory: '/home/test' image: '${{REGISTRY}}/cloud-harness/test-e2e:${{TEST_E2E_TAG}}' fail_fast: false commands: - - npx puppeteer browsers install chrome - - yarn test + - 'npx puppeteer browsers install chrome' + - 'yarn test' scale: jupyterhub_e2e_test: - title: jupyterhub e2e test + title: 'jupyterhub e2e test' volumes: - '${{CF_REPO_NAME}}/applications/jupyterhub/test/e2e:/home/test/__tests__/jupyterhub' environment: - - APP_URL=https://hub.${{DOMAIN}} + - 'APP_URL=https://hub.${{DOMAIN}}' samples_e2e_test: - title: samples e2e test + title: 'samples e2e test' volumes: - '${{CF_REPO_NAME}}/applications/samples/test/e2e:/home/test/__tests__/samples' environment: - - APP_URL=https://samples.${{DOMAIN}} - - USERNAME=sample@testuser.com - - PASSWORD=test + - 'APP_URL=https://samples.${{DOMAIN}}' + - 'USERNAME=sample@testuser.com' + - 'PASSWORD=test' hooks: on_fail: exec: - image: alpine + image: 'alpine' commands: - - cf_export FAILED=failed + - 'cf_export FAILED=failed' approval: - type: pending-approval - stage: qa - title: Approve anyway and delete deployment - description: The pipeline will fail after ${{WAIT_ON_FAIL}} minutes + type: 'pending-approval' + stage: 'qa' + title: 'Approve anyway and delete deployment' + description: 'The pipeline will fail after ${{WAIT_ON_FAIL}} minutes' timeout: - timeUnit: minutes + timeUnit: 'minutes' duration: '${{WAIT_ON_FAIL}}' - finalState: denied + finalState: 'denied' when: condition: all: diff --git a/docs/applications/databases.md b/docs/applications/databases.md index 96498f6c..3704c3f2 100644 --- a/docs/applications/databases.md +++ b/docs/applications/databases.md @@ -84,6 +84,9 @@ harness postgres: image: postgres:13 initialdb: cloudharness + operator: false + instances: 1 + apiServerCidr: [] ports: - name: http port: 5432 @@ -91,6 +94,19 @@ harness `initialdb` is the default database used +`operator`: When set to `true`, uses the [CloudNative-PG operator](https://github.com/cloudnative-pg/cloudnative-pg) instead of a plain Kubernetes Deployment. This provides advanced features like automated failover and cluster management. **Backups are not configured by default by this chart; you must define CNPG backup resources (for example, `Backup` and/or `ScheduledBackup` objects) or use another backup mechanism separately.** **Requires the CNPG operator to be pre-installed in the cluster.** + +To install the CNPG operator: +```bash +helm repo add cloudnative-pg https://cloudnative-pg.github.io/charts +helm repo update +helm install cnpg cloudnative-pg/cloudnative-pg +``` + +`instances`: Number of PostgreSQL instances (replicas) managed by the CNPG operator. Only used when `operator: true`. Default is 1. + +`apiServerCidr`: List of CIDRs allowed for CNPG database pods to reach the Kubernetes API server on port 443. **Resolved automatically at deploy time** by looking up the `kubernetes` Service and Endpoints in the `default` namespace. The explicit list is only used as a fallback when lookup returns nothing (e.g. `helm template` dry-run). Leave empty (`[]`) for auto-detection; set explicitly only for air-gapped or restricted environments. + #### Neo4j diff --git a/docs/model/DatabaseConfig.md b/docs/model/DatabaseConfig.md index c6b3bf21..4857965e 100644 --- a/docs/model/DatabaseConfig.md +++ b/docs/model/DatabaseConfig.md @@ -9,6 +9,10 @@ Name | Type | Description | Notes **image** | **str** | | [optional] **name** | **str** | | [optional] **ports** | [**List[PortConfig]**](PortConfig.md) | | [optional] +**operator** | **bool** | Use the CloudNative-PG operator instead of a plain Deployment (postgres only) | [optional] +**instances** | **int** | Number of PostgreSQL instances managed by the CNPG operator (only used when operator is true) | [optional] +**api_server_cidr** | **List[str]** | CIDR(s) allowed for CNPG pods to reach the Kubernetes API server (port 443). Override with your cluster API-server or service CIDR. | [optional] +**initialdb** | **str** | Initial database name (postgres only) | [optional] ## Example diff --git a/libraries/models/api/openapi.yaml b/libraries/models/api/openapi.yaml index 52b013fa..b9e7f8bc 100644 --- a/libraries/models/api/openapi.yaml +++ b/libraries/models/api/openapi.yaml @@ -950,6 +950,21 @@ components: type: array items: $ref: '#/components/schemas/PortConfig' + operator: + description: 'Use the CloudNative-PG operator instead of a plain Deployment (postgres only)' + type: boolean + instances: + description: 'Number of PostgreSQL instances managed by the CNPG operator (only used when operator is true)' + type: integer + minimum: 1 + apiServerCidr: + description: 'CIDR(s) allowed for CNPG pods to reach the Kubernetes API server (port 443). Override with your cluster API-server or service CIDR.' + type: array + items: + type: string + initialdb: + description: 'Initial database name (postgres only)' + type: string additionalProperties: true NetworkConfig: title: Root Type for NetworkConfig diff --git a/libraries/models/cloudharness_model/models/database_config.py b/libraries/models/cloudharness_model/models/database_config.py index a2a914c5..79a62bbd 100644 --- a/libraries/models/cloudharness_model/models/database_config.py +++ b/libraries/models/cloudharness_model/models/database_config.py @@ -34,8 +34,12 @@ class DatabaseConfig(CloudHarnessBaseModel): image: Optional[StrictStr] = None name: Optional[StrictStr] = None ports: Optional[List[PortConfig]] = None + operator: Optional[StrictBool] = Field(default=None, description="Use the CloudNative-PG operator instead of a plain Deployment (postgres only)") + instances: Optional[Annotated[int, Field(strict=True, ge=1)]] = Field(default=None, description="Number of PostgreSQL instances managed by the CNPG operator (only used when operator is true)") + api_server_cidr: Optional[List[StrictStr]] = Field(default=None, description="CIDR(s) allowed for CNPG pods to reach the Kubernetes API server (port 443). Override with your cluster API-server or service CIDR.", alias="apiServerCidr") + initialdb: Optional[StrictStr] = Field(default=None, description="Initial database name (postgres only)") additional_properties: Dict[str, Any] = {} - __properties: ClassVar[List[str]] = ["image", "name", "ports"] + __properties: ClassVar[List[str]] = ["image", "name", "ports", "operator", "instances", "apiServerCidr", "initialdb"] def to_dict(self) -> Dict[str, Any]: """Return the dictionary representation of the model using alias. @@ -83,7 +87,11 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: _obj = cls.model_validate({ "image": obj.get("image"), "name": obj.get("name"), - "ports": [PortConfig.from_dict(_item) for _item in obj["ports"]] if obj.get("ports") is not None else None + "ports": [PortConfig.from_dict(_item) for _item in obj["ports"]] if obj.get("ports") is not None else None, + "operator": obj.get("operator"), + "instances": obj.get("instances"), + "apiServerCidr": obj.get("apiServerCidr"), + "initialdb": obj.get("initialdb") }) # store additional fields in additional_properties for _key in obj.keys(): diff --git a/libraries/models/cloudharness_model/models/user.py b/libraries/models/cloudharness_model/models/user.py index 1382348c..bfc2a10f 100644 --- a/libraries/models/cloudharness_model/models/user.py +++ b/libraries/models/cloudharness_model/models/user.py @@ -13,24 +13,26 @@ from __future__ import annotations +import pprint +import re # noqa: F401 +import json from typing import Optional, Set from typing_extensions import Self from cloudharness_model.base_model import CloudHarnessBaseModel -from pydantic import Field, StrictStr, StrictBool, StrictInt, StrictFloat -from typing import ClassVar, List, Dict, Any, Optional, Annotated - +from pydantic import BaseModel, Field, field_validator, StrictStr, StrictBool, StrictInt, StrictFloat +from typing import ClassVar, List, Dict, Any, Union, Optional, Annotated +import importlib from cloudharness_model.models.organization import Organization from cloudharness_model.models.user_credential import UserCredential from cloudharness_model.models.user_group import UserGroup - class User(CloudHarnessBaseModel): """ User - """ # noqa: E501 + """ # noqa: E501 access: Optional[Dict[str, Any]] = None attributes: Optional[Dict[str, Any]] = None client_roles: Optional[Dict[str, Any]] = Field(default=None, alias="clientRoles") @@ -146,3 +148,5 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: _obj.additional_properties[_key] = obj.get(_key) return _obj + + diff --git a/libraries/models/docs/DatabaseConfig.md b/libraries/models/docs/DatabaseConfig.md index c6b3bf21..4857965e 100644 --- a/libraries/models/docs/DatabaseConfig.md +++ b/libraries/models/docs/DatabaseConfig.md @@ -9,6 +9,10 @@ Name | Type | Description | Notes **image** | **str** | | [optional] **name** | **str** | | [optional] **ports** | [**List[PortConfig]**](PortConfig.md) | | [optional] +**operator** | **bool** | Use the CloudNative-PG operator instead of a plain Deployment (postgres only) | [optional] +**instances** | **int** | Number of PostgreSQL instances managed by the CNPG operator (only used when operator is true) | [optional] +**api_server_cidr** | **List[str]** | CIDR(s) allowed for CNPG pods to reach the Kubernetes API server (port 443). Override with your cluster API-server or service CIDR. | [optional] +**initialdb** | **str** | Initial database name (postgres only) | [optional] ## Example diff --git a/tools/deployment-cli-tools/ch_cli_tools/codefresh.py b/tools/deployment-cli-tools/ch_cli_tools/codefresh.py index 1b810c87..384d15ef 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/codefresh.py +++ b/tools/deployment-cli-tools/ch_cli_tools/codefresh.py @@ -412,13 +412,13 @@ def adjust_build_steps(index): for secret in [secret[0] for secret in app.harness.secrets.items() if secret[1] != ""]: secret_name = secret.replace("_", "__") arguments["custom_values"].append( - "apps_%s_harness_secrets_%s=${{%s}}" % (app_name.replace("_", "__"), secret_name, secret_name.upper())) - # Add connect_string as a secret custom_value for apps that have it set to empty + 'apps_%s_harness_secrets_%s="${{%s}}"' % (app_name.replace("_", "__"), secret_name, secret_name.upper()) + ) for app_name, app in helm_values.apps.items(): if app.harness.database and app.harness.database.get("connect_string") == "": var_name = f"{app_name.upper().replace('-', '_')}_DB_CONNECT_STRING" arguments["custom_values"].append( - "apps_%s_harness_database_connect__string=${{%s}}" % ( + "apps_%s_harness_database_connect__string=\"${{%s}}\"" % ( app_name.replace("_", "__"), var_name) ) # Add registry secret value secret if registry secret name is set @@ -427,7 +427,7 @@ def adjust_build_steps(index): registry_secret_name = getattr(secret, "name", None) if registry_secret_name: arguments["custom_values"].append( - "registry_secret_value=${{REGISTRY_SECRET_VALUE}}" + 'registry_secret_value="${{K8S_SA_JSON}}"' ) cmds = codefresh['steps']['prepare_deployment']['commands'] @@ -474,8 +474,21 @@ def adjust_build_steps(index): codefresh_dir = dirname(codefresh_abs_path) if not exists(codefresh_dir): os.makedirs(codefresh_dir) + from ruamel.yaml.scalarstring import SingleQuotedScalarString + + deployment_step = codefresh.get("steps", {}).get("deployment", {}) + arguments = deployment_step.get("arguments", {}) + if "custom_values" in arguments: + arguments["custom_values"] = [ + SingleQuotedScalarString(v) if isinstance(v, str) else v + for v in arguments["custom_values"] + ] + + from ruamel.yaml import YAML + ryaml = YAML() + ryaml.default_flow_style = False with open(codefresh_abs_path, 'w') as f: - yaml.dump(codefresh, f) + ryaml.dump(codefresh, f) return codefresh diff --git a/tools/deployment-cli-tools/tests/resources/applications/myapp/deploy/values-withpostgres.yaml b/tools/deployment-cli-tools/tests/resources/applications/myapp/deploy/values-withpostgres.yaml index 4b260dbf..d808f735 100644 --- a/tools/deployment-cli-tools/tests/resources/applications/myapp/deploy/values-withpostgres.yaml +++ b/tools/deployment-cli-tools/tests/resources/applications/myapp/deploy/values-withpostgres.yaml @@ -1,2 +1,6 @@ harness: - database: {auto: true, type: postgres} \ No newline at end of file + database: + auto: true + type: postgres + postgres: + image: postgres:17 \ No newline at end of file diff --git a/tools/deployment-cli-tools/tests/test_codefresh.py b/tools/deployment-cli-tools/tests/test_codefresh.py index f454d0b2..44506a3e 100644 --- a/tools/deployment-cli-tools/tests/test_codefresh.py +++ b/tools/deployment-cli-tools/tests/test_codefresh.py @@ -358,13 +358,60 @@ def test_codefresh_db_connect_string_secret(): base_image_name=values['name'], helm_values=values, save=False) custom_values = cf['steps']['deployment']['arguments']['custom_values'] - expected = "apps_myapp_harness_database_connect__string=${{MYAPP_DB_CONNECT_STRING}}" + expected = "apps_myapp_harness_database_connect__string=\"${{MYAPP_DB_CONNECT_STRING}}\"" assert expected in custom_values, \ f"Expected custom_value entry for connect_string not found. Got: {custom_values}" finally: shutil.rmtree(BUILD_MERGE_DIR, ignore_errors=True) +def test_codefresh_secret_with_quotes(): + values = create_helm_chart( + [CLOUDHARNESS_ROOT, RESOURCES], + output_path=OUT, + include=['myapp'], + exclude=['events'], + domain="my.local", + namespace='test', + env='dev', + local=False, + tag=1, + registry='reg' + ) + try: + root_paths = preprocess_build_overrides( + root_paths=[CLOUDHARNESS_ROOT, RESOURCES], + helm_values=values, + merge_build_path=BUILD_MERGE_DIR + ) + + build_included = [app['harness']['name'] + for app in values['apps'].values() if 'harness' in app] + + values.apps["myapp"].harness.secrets = { + "settings_secret": "SECRET_KEY='replace-with-strong-shared-secret'" + } + + cf = create_codefresh_deployment_scripts(root_paths, include=build_included, + envs=['dev'], + base_image_name=values['name'], + helm_values=values, save=False) + + custom_values = cf['steps']['deployment']['arguments']['custom_values'] + entry = next( + value for value in custom_values + if value.startswith("apps_myapp_harness_secrets_settings__secret=") + ) + assert entry == 'apps_myapp_harness_secrets_settings__secret="${{SETTINGS__SECRET}}"' + rendered = entry.replace( + "${{SETTINGS__SECRET}}", + values.apps["myapp"].harness.secrets["settings_secret"] + ) + assert rendered == 'apps_myapp_harness_secrets_settings__secret="SECRET_KEY=\'replace-with-strong-shared-secret\'"' + finally: + shutil.rmtree(BUILD_MERGE_DIR, ignore_errors=True) + + def test_sort_parallel_steps_alphabetically(): """Sub-steps inside parallel steps must be sorted alphabetically by name.""" steps = {