diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 15500146..f48e60d3 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -45,7 +45,7 @@ RUN if [ "${INSTALL_AZURE_CLI}" = "true" ]; then bash /tmp/library-scripts/azcli # Install Terraform, tflint, Go, PowerShell, and other useful tools # TODO: move this into main "RUN" layer above -ARG TERRAFORM_VERSION=0.12.30 +ARG TERRAFORM_VERSION=0.14.9 ARG TFLINT_VERSION=0.18.0 RUN bash /tmp/library-scripts/terraform-debian.sh "${TERRAFORM_VERSION}" "${TFLINT_VERSION}" \ && bash /tmp/library-scripts/powershell-debian.sh \ diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 40a933d3..670c689d 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -5,7 +5,7 @@ "build": { "dockerfile": "Dockerfile", "args": { - "TERRAFORM_VERSION": "0.12.30", + "TERRAFORM_VERSION": "0.14.9", "TFLINT_VERSION": "0.22.0", "INSTALL_AZURE_CLI": "true", "INSTALL_DOCKER": "true", diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index d6f06a5f..a8d9a51e 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -57,195 +57,175 @@ env: TF_IN_AUTOMATION: "true" TF_INPUT: "false" TF_PLAN: "tfplan" - TF_VERSION: "0.12.30" # "latest" is supported + # https://github.com/hashicorp/terraform/releases + TF_VERSION: "0.14.9" TF_WORKING_DIR: ./terraform # https://github.com/terraform-linters/tflint-ruleset-azurerm/releases - TFLINT_RULESET_AZURERM_VERSION: "v0.8.2" + TFLINT_RULESET_AZURERM_VERSION: "v0.9.0" # https://github.com/terraform-linters/tflint/releases - TFLINT_VERSION: "v0.24.1" + TFLINT_VERSION: "v0.25.0" # Env var concatenation is currently not supported at Workflow or Job scope. See workaround below: # https://github.community/t5/GitHub-Actions/How-can-we-concatenate-multiple-env-vars-at-workflow-and-job/td-p/48489 jobs: build-and-deploy: - # always pin versions # view installed software: https://docs.github.com/en/free-pro-team@latest/actions/reference/specifications-for-github-hosted-runners#supported-software runs-on: ubuntu-18.04 + # https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idenvironment + environment: + name: dev_environment + url: "https://argocd.${{ env.ROOT_DOMAIN_NAME }}" + # only run if owner triggered action if: github.actor == github.event.repository.owner.login steps: - - # Checkout - # https://github.com/marketplace/actions/checkout - - uses: actions/checkout@v2 + # Checkout + # https://github.com/marketplace/actions/checkout + - uses: actions/checkout@v2 # specify different branch # NOT required as I've changed the default branch to develop # with: # ref: develop - # Init tasks - inc Env var concatenation - # https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-commands-for-github-actions#environment-files - - name: Init tasks - inc Env var concatenation (Workaround) - run: | - chmod -R +x ./scripts/ - echo "AKS_CLUSTER_NAME=${{ env.PREFIX }}-aks-001" >> $GITHUB_ENV - echo "AKS_RG_NAME=${{ env.PREFIX }}-rg-aks-dev-001" >> $GITHUB_ENV - echo "ARGOCD_FQDN=argocd.${{ env.ROOT_DOMAIN_NAME }}" >> $GITHUB_ENV - echo "DNS_DOMAIN_NAME=nexus.${{ env.ROOT_DOMAIN_NAME }}" >> $GITHUB_ENV - echo "DOCKER_FQDN=docker.${{ env.ROOT_DOMAIN_NAME }}" >> $GITHUB_ENV - echo "TERRAFORM_STORAGE_ACCOUNT=${{ env.PREFIX }}sttfstate${{ env.LOCATION }}001" >> $GITHUB_ENV - echo "TERRAFORM_STORAGE_RG=${{ env.PREFIX }}-rg-tfstate-dev-001" >> $GITHUB_ENV - echo "VELERO_STORAGE_ACCOUNT=${{ env.PREFIX }}stbckuksouth001" >> $GITHUB_ENV - echo "VELERO_STORAGE_RG=${{ env.PREFIX }}-rg-velero-dev-001" >> $GITHUB_ENV - - - # Login - - name: Login to Azure - run: ./scripts/azure_login.sh - - # This is required when developing after the initial build, and the AKS cluster may have been stopped - # Ensure AKS cluster is running, else timeouts will occur on k8s Terraform apply tasks - - name: Start AKS Cluster - continue-on-error: true - run: ./scripts/start_aks_cluster.sh - - - # Prereqs - - name: Create Storage Account for Terraform state - run: ./scripts/storage_create.sh - - - name: Lookup Storage Key - run: ./scripts/storage_key.sh - - - name: Replace tokens in Terraform config files - run: pwsh -command "./scripts/Replace-Tokens.ps1" - env: - ARGOCD_ADMIN_PASSWORD: ${{ secrets.ARGOCD_ADMIN_PASSWORD }} - HELM_CHART_REPO_DEPLOY_PRIVATE_KEY: ${{ secrets.HELM_CHART_REPO_DEPLOY_PRIVATE_KEY }} - IFTTT_WEBHOOK_KEY: ${{ secrets.IFTTT_WEBHOOK_KEY }} - - - name: Create zip file of Function App - run: pwsh -command "./function_app/CreateFunctionAppZip.ps1" - - - # Terraform - - uses: hashicorp/setup-terraform@v1 - with: - terraform_version: ${{ env.TF_VERSION }} - - - name: Terraform Init / Validate - run: | - terraform init - terraform validate - working-directory: ${{ env.TF_WORKING_DIR }} - - - name: Terraform Lint - run: ./scripts/tflint.sh - env: - TF_WORKING_DIR: ${{ env.TF_WORKING_DIR }} - TFLINT_RULESET_AZURERM_VERSION: ${{ env.TFLINT_RULESET_AZURERM_VERSION }} - TFLINT_VERSION: ${{ env.TFLINT_VERSION }} - - - name: Terraform Plan - id: plan - run: terraform plan -out=${{ env.TF_PLAN }} - working-directory: ${{ env.TF_WORKING_DIR }} - - # - run: echo ${{ steps.plan.outputs.stdout }} - # - run: echo ${{ steps.plan.outputs.stderr }} - # - run: echo ${{ steps.plan.outputs.exitcode }} - - - name: Terraform Apply - run: ./scripts/terraform_apply.sh - # only apply if changes are present - # https://www.terraform.io/docs/commands/plan.html#detailed-exitcode - # if: steps.plan.outputs.exitcode == 2 - env: - TF_PLAN: ${{ env.TF_PLAN }} - - - # Kubernetes - - name: Deploy kubernetes manifests - run: ./scripts/k8s_manifests_apply.sh - - - name: Wait for resources to be "Ready" - run: ./scripts/wait.sh - - - # Ansible - # TODO: enable Ansible Lint once this issue has been resolved: https://github.com/ansible/ansible-lint-action/issues/36 - # - name: Lint Ansible Playbook - # uses: ansible/ansible-lint-action@6c8c141 - # with: - # targets: "./ansible" - - - - name: Run Ansible playbook - run: ./scripts/ansible.sh - env: - NEXUS_ADMIN_PASSWORD: ${{ secrets.NEXUS_ADMIN_PASSWORD }} - NEXUS_USER_PASSWORD: ${{ secrets.NEXUS_USER_PASSWORD }} - - - # Docker - - name: Docker repo login - uses: Azure/docker-login@v1 - with: - login-server: ${{ env.DOCKER_FQDN }} - username: ${{ env.NEXUS_USER_USERNAME }} - password: ${{ secrets.NEXUS_USER_PASSWORD }} - - - name: Push images to Docker repo - run: ./scripts/push_docker_images.sh - - - # TODO: Remove once issue has been fixed - # https://github.com/terraform-providers/terraform-provider-azurerm/issues/8546 - - name: Restart Function App - run: az functionapp restart --name "${{ env.PREFIX }}-funcapp" --resource-group "${{ env.PREFIX }}-rg-function-app" - - - # Pester tests - - name: Run Pester tests - continue-on-error: true - run: pwsh -command "./scripts/Start-Test.ps1" - - - name: Archive test artifacts - uses: actions/upload-artifact@v1 - with: - name: test results - path: test/pester-test-results.xml - # TODO: only run when previous task (Pester tests) has been successful - if: always() - - # remove NuGet proxy repo so pester report step doesnt fail - - name: Unregister NuGet proxy repo - run: pwsh -command "Unregister-PSRepository -Name nuget.org-proxy -Verbose" - - # Shows at the bottom of a run: https://github.com/adamrushuk/devops-lab/runs/1035347513?check_suite_focus=true - - name: Pester report - # TODO: remove continue-on-error once bug is fixed - continue-on-error: true - uses: zyborg/pester-tests-report@v1.3.2 - with: - test_results_path: test/pester-test-results.xml - report_name: pester_tests - report_title: Pester Tests - github_token: ${{ secrets.GITHUB_TOKEN }} - - - # Notify - - name: Notify slack - continue-on-error: true - env: - SLACK_CHANNEL_ID: ${{ secrets.SLACK_CHANNEL_ID }} - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} - run: ./scripts/send_slack_message.sh "[aks-nexus-velero] Build complete" - + # Init tasks - inc Env var concatenation + # https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-commands-for-github-actions#environment-files + - name: Init tasks - inc Env var concatenation (Workaround) + run: | + chmod -R +x ./scripts/ + echo "AKS_CLUSTER_NAME=${{ env.PREFIX }}-aks-001" >> $GITHUB_ENV + echo "AKS_RG_NAME=${{ env.PREFIX }}-rg-aks-dev-001" >> $GITHUB_ENV + echo "ARGOCD_FQDN=argocd.${{ env.ROOT_DOMAIN_NAME }}" >> $GITHUB_ENV + echo "DNS_DOMAIN_NAME=nexus.${{ env.ROOT_DOMAIN_NAME }}" >> $GITHUB_ENV + echo "DOCKER_FQDN=docker.${{ env.ROOT_DOMAIN_NAME }}" >> $GITHUB_ENV + echo "TERRAFORM_STORAGE_ACCOUNT=${{ env.PREFIX }}sttfstate${{ env.LOCATION }}001" >> $GITHUB_ENV + echo "TERRAFORM_STORAGE_RG=${{ env.PREFIX }}-rg-tfstate-dev-001" >> $GITHUB_ENV + echo "VELERO_STORAGE_ACCOUNT=${{ env.PREFIX }}stbckuksouth001" >> $GITHUB_ENV + echo "VELERO_STORAGE_RG=${{ env.PREFIX }}-rg-velero-dev-001" >> $GITHUB_ENV + + # Login + - name: Login to Azure + run: ./scripts/azure_login.sh + + # This is required when developing after the initial build, and the AKS cluster may have been stopped + # Ensure AKS cluster is running, else timeouts will occur on k8s Terraform apply tasks + - name: Start AKS Cluster + continue-on-error: true + run: ./scripts/start_aks_cluster.sh + + # Prereqs + - name: Create Storage Account for Terraform state + run: ./scripts/storage_create.sh + + - name: Lookup Storage Key + run: ./scripts/storage_key.sh + + - name: Replace tokens in Terraform config files + run: pwsh -command "./scripts/Replace-Tokens.ps1" + env: + ARGOCD_ADMIN_PASSWORD: ${{ secrets.ARGOCD_ADMIN_PASSWORD }} + HELM_CHART_REPO_DEPLOY_PRIVATE_KEY: ${{ secrets.HELM_CHART_REPO_DEPLOY_PRIVATE_KEY }} + IFTTT_WEBHOOK_KEY: ${{ secrets.IFTTT_WEBHOOK_KEY }} + + - name: Create zip file of Function App + run: pwsh -command "./function_app/CreateFunctionAppZip.ps1" + + # Terraform + - uses: hashicorp/setup-terraform@v1 + with: + terraform_version: ${{ env.TF_VERSION }} + + - name: Terraform Init / Validate + run: | + terraform init + terraform validate + working-directory: ${{ env.TF_WORKING_DIR }} + + - name: Terraform Lint + run: ./scripts/tflint.sh + env: + TF_WORKING_DIR: ${{ env.TF_WORKING_DIR }} + TFLINT_RULESET_AZURERM_VERSION: ${{ env.TFLINT_RULESET_AZURERM_VERSION }} + TFLINT_VERSION: ${{ env.TFLINT_VERSION }} + + - name: ๐Ÿ‘€ Terraform Plan + id: plan + run: terraform plan -out=${{ env.TF_PLAN }} + working-directory: ${{ env.TF_WORKING_DIR }} + + - name: ๐Ÿš€ Terraform Apply + run: ./scripts/terraform_apply.sh + # only apply if changes are present + # https://www.terraform.io/docs/commands/plan.html#detailed-exitcode + # if: steps.plan.outputs.exitcode == 2 + env: + TF_PLAN: ${{ env.TF_PLAN }} + + # Kubernetes + - name: Deploy kubernetes manifests + run: ./scripts/k8s_manifests_apply.sh + + - name: Wait for resources to be "Ready" + run: ./scripts/wait.sh + + # Ansible + - name: Run Ansible playbook + run: ./scripts/ansible.sh + env: + NEXUS_ADMIN_PASSWORD: ${{ secrets.NEXUS_ADMIN_PASSWORD }} + NEXUS_USER_PASSWORD: ${{ secrets.NEXUS_USER_PASSWORD }} + + # Docker + - name: Docker repo login + uses: Azure/docker-login@v1 + with: + login-server: ${{ env.DOCKER_FQDN }} + username: ${{ env.NEXUS_USER_USERNAME }} + password: ${{ secrets.NEXUS_USER_PASSWORD }} + + - name: Push images to Docker repo + run: ./scripts/push_docker_images.sh + + # TODO: Remove once issue has been fixed + # https://github.com/terraform-providers/terraform-provider-azurerm/issues/8546 + - name: Restart Function App + run: az functionapp restart --name "${{ env.PREFIX }}-funcapp" --resource-group "${{ env.PREFIX }}-rg-function-app" + + # Pester tests + - name: ๐Ÿงช Run Pester tests + continue-on-error: true + run: pwsh -command "./scripts/Start-Test.ps1" + + - name: Archive test artifacts + uses: actions/upload-artifact@v1 + with: + name: test results + path: test/pester-test-results.xml + if: always() + + # remove NuGet proxy repo so pester report step doesnt fail + - name: Unregister NuGet proxy repo + run: pwsh -command "Unregister-PSRepository -Name nuget.org-proxy -Verbose" + + # Shows at the bottom of a run: https://github.com/adamrushuk/devops-lab/runs/1035347513?check_suite_focus=true + - name: Pester report + uses: zyborg/pester-tests-report@v1 + with: + test_results_path: test/pester-test-results.xml + report_name: pester_tests + report_title: Pester Tests + github_token: ${{ secrets.GITHUB_TOKEN }} + + # Notify + - name: Notify slack + continue-on-error: true + env: + SLACK_CHANNEL_ID: ${{ secrets.SLACK_CHANNEL_ID }} + SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + run: ./scripts/send_slack_message.sh "[aks-nexus-velero] Build complete" # used for any windows-only tasks test-windows: @@ -258,42 +238,41 @@ jobs: if: github.actor == github.event.repository.owner.login steps: - # Checkout - - uses: actions/checkout@v2 - - # Init tasks - inc Env var concatenation - # https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-commands-for-github-actions#environment-files - - name: Init tasks - inc Env var concatenation (Workaround) - # * NOTE: different syntax required for Windows agents - run: | - echo "AKS_RG_NAME=${{ env.PREFIX }}-rg-aks-dev-001" | Out-File -Append -Encoding utf8 -FilePath "$env:GITHUB_ENV" - echo "AKS_CLUSTER_NAME=${{ env.PREFIX }}-aks-001" | Out-File -Append -Encoding utf8 -FilePath "$env:GITHUB_ENV" - echo "DNS_DOMAIN_NAME=nexus.${{ env.ROOT_DOMAIN_NAME }}" | Out-File -Append -Encoding utf8 -FilePath "$env:GITHUB_ENV" - - - # Login - - name: Login to Azure - run: ./scripts/azure_login.ps1 - - # Chocolatey - - name: Test Nexus Chocolatey proxy repo - run: | - choco install velero --source "https://${{ env.DNS_DOMAIN_NAME }}/repository/chocolatey-proxy/" - - # Velero CLI - - name: Test Velero CLI - # NOTE: Some functions cast values to a string to perform comparisons - # https://docs.github.com/en/free-pro-team@latest/actions/reference/context-and-expression-syntax-for-github-actions#functions - # ! WARNING: only single quotes work for boolean comparison - if: env.VELERO_ENABLED == 'true' - run: | - az aks get-credentials --resource-group "${{ env.AKS_RG_NAME }}" --name "${{ env.AKS_CLUSTER_NAME }}" --overwrite-existing --admin - - echo "`nVelero version info:" - velero version || true - - echo "`nVelero backup location info:" - velero backup-location get - - echo "`nVelero backup info:" - velero backup get + # Checkout + - uses: actions/checkout@v2 + + # Init tasks - inc Env var concatenation + # https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-commands-for-github-actions#environment-files + - name: Init tasks - inc Env var concatenation (Workaround) + # * NOTE: different syntax required for Windows agents + run: | + echo "AKS_RG_NAME=${{ env.PREFIX }}-rg-aks-dev-001" | Out-File -Append -Encoding utf8 -FilePath "$env:GITHUB_ENV" + echo "AKS_CLUSTER_NAME=${{ env.PREFIX }}-aks-001" | Out-File -Append -Encoding utf8 -FilePath "$env:GITHUB_ENV" + echo "DNS_DOMAIN_NAME=nexus.${{ env.ROOT_DOMAIN_NAME }}" | Out-File -Append -Encoding utf8 -FilePath "$env:GITHUB_ENV" + + # Login + - name: Login to Azure + run: ./scripts/azure_login.ps1 + + # Chocolatey + - name: Test Nexus Chocolatey proxy repo + run: | + choco install velero --source "https://${{ env.DNS_DOMAIN_NAME }}/repository/chocolatey-proxy/" + + # Velero CLI + - name: Test Velero CLI + # NOTE: Some functions cast values to a string to perform comparisons + # https://docs.github.com/en/free-pro-team@latest/actions/reference/context-and-expression-syntax-for-github-actions#functions + # ! WARNING: only single quotes work for boolean comparison + if: env.VELERO_ENABLED == 'true' + run: | + az aks get-credentials --resource-group "${{ env.AKS_RG_NAME }}" --name "${{ env.AKS_CLUSTER_NAME }}" --overwrite-existing --admin + + echo "`nVelero version info:" + velero version || true + + echo "`nVelero backup location info:" + velero backup-location get + + echo "`nVelero backup info:" + velero backup get diff --git a/.github/workflows/destroy.yml b/.github/workflows/destroy.yml index 20186934..cac5aee2 100644 --- a/.github/workflows/destroy.yml +++ b/.github/workflows/destroy.yml @@ -49,7 +49,10 @@ env: # terraform TF_IN_AUTOMATION: "true" TF_INPUT: "false" - TF_VERSION: "0.12.30" # "latest" is supported + TF_LOG_PATH: terraform.log + TF_LOG: TRACE + # https://github.com/hashicorp/terraform/releases + TF_VERSION: "0.14.9" TF_WORKING_DIR: terraform # Env var concatenation is currently not supported at Workflow or Job scope. See workaround below: @@ -115,12 +118,19 @@ jobs: with: terraform_version: ${{ env.TF_VERSION }} - - name: Terraform destroy + - name: ๐Ÿ’€ Terraform destroy run: | terraform init terraform destroy -no-color -auto-approve working-directory: ${{ env.TF_WORKING_DIR }} + - name: Terraform logs + uses: actions/upload-artifact@v2 + with: + name: Terraform logs + path: ${{ env.TF_WORKING_DIR }}/${{ env.TF_LOG_PATH }} + if: always() + # Cleanup - name: Delete Storage run: ./scripts/storage_delete.sh diff --git a/README.md b/README.md index 2f26b023..c85ac340 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ -# aks-nexus-velero +# DevOps Lab [![Build environment](https://github.com/adamrushuk/devops-lab/workflows/build/badge.svg)](https://github.com/adamrushuk/devops-lab/actions?query=workflow%3A%22build) diff --git a/scripts/start_aks_cluster.sh b/scripts/start_aks_cluster.sh index d7f7a6bf..0f0bae5a 100644 --- a/scripts/start_aks_cluster.sh +++ b/scripts/start_aks_cluster.sh @@ -6,12 +6,8 @@ set -euo pipefail trap "echo 'error: Script failed: see failed command above'" ERR -# Prereqs as this is a preview feature: https://docs.microsoft.com/en-us/azure/aks/start-stop-cluster -# Install the aks-preview extension -az extension add --name aks-preview - -# Update the extension to make sure you have the latest version installed -az extension update --name aks-preview +# info +az version # Check AKS power state aks_power_state=$(az aks show --name "$AKS_CLUSTER_NAME" --resource-group "$AKS_RG_NAME" --output tsv --query 'powerState.code') diff --git a/scripts/stop_aks_cluster.sh b/scripts/stop_aks_cluster.sh index 83bd3e55..7a64b149 100644 --- a/scripts/stop_aks_cluster.sh +++ b/scripts/stop_aks_cluster.sh @@ -6,12 +6,8 @@ set -euo pipefail trap "echo 'error: Script failed: see failed command above'" ERR -# Prereqs as this is a preview feature: https://docs.microsoft.com/en-us/azure/aks/start-stop-cluster -# Install the aks-preview extension -az extension add --name aks-preview - -# Update the extension to make sure you have the latest version installed -az extension update --name aks-preview +# info +az version # Check AKS power state aks_power_state=$(az aks show --name "$AKS_CLUSTER_NAME" --resource-group "$AKS_RG_NAME" --output tsv --query 'powerState.code') diff --git a/scripts/tflint.sh b/scripts/tflint.sh index 77e5ea19..57482d98 100644 --- a/scripts/tflint.sh +++ b/scripts/tflint.sh @@ -14,7 +14,7 @@ TFLINT_RULESET_AZURERM_VERSION="${TFLINT_RULESET_AZURERM_VERSION:-v0.7.0}" TF_FLAGS=("$TF_WORKING_DIR") export TFLINT_LOG=debug # use empty array to skip adding disabled rules, eg: "DISABLED_RULES=()" -DISABLED_RULES=("azurerm_log_analytics_workspace_invalid_retention_in_days") +DISABLED_RULES=() # use dynamic flags if [ ${#DISABLED_RULES[@]} -gt 0 ]; then diff --git a/sealed-secrets/README.md b/sealed-secrets/README.md new file mode 100644 index 00000000..10ddf439 --- /dev/null +++ b/sealed-secrets/README.md @@ -0,0 +1,79 @@ +# Sealed Secrets Notes + +[sealed-secrets](https://github.com/bitnami-labs/sealed-secrets) is a Kubernetes controller and tool for one-way +encrypted Secrets. + +**Problem**: "I can manage all my K8s config in git, except Secrets." + +**Solution**: Encrypt your Secret into a SealedSecret, which is safe to store - even to a public repository. The +SealedSecret can be decrypted only by the controller running in the target cluster and nobody else +(not even the original author) is able to obtain the original Secret from the SealedSecret. + +## Installation + +Before installation, consider reading the [Release Notes](https://github.com/bitnami-labs/sealed-secrets/blob/main/RELEASE-NOTES.md). + +### Helm Chart + +Use the code below to install the official [sealed-secrets helm chart](https://github.com/bitnami-labs/sealed-secrets/tree/main/helm/sealed-secrets): + +```bash +# add repo +helm repo add sealed-secrets https://bitnami-labs.github.io/sealed-secrets + +# list charts +helm search repo sealed-secrets + +# list all chart versions +helm search repo sealed-secrets/sealed-secrets -l + +# create namespace +kubectl create namespace sealed-secrets + +# install chart (dry-run) +helm upgrade sealed-secrets sealed-secrets/sealed-secrets --install --atomic --namespace sealed-secrets --debug --dry-run + +# install chart +helm upgrade sealed-secrets sealed-secrets/sealed-secrets --install --atomic --namespace sealed-secrets --debug + +# show status / notes +helm status sealed-secrets --namespace sealed-secrets +``` + +### Kubeseal CLI + +Install the kubeseal CLI by downloading the binary from [sealed-secrets/releases](https://github.com/bitnami-labs/sealed-secrets/releases). + +```bash +wget https://github.com/bitnami-labs/sealed-secrets/releases/download/v0.15.0/kubeseal-linux-amd64 -O kubeseal +sudo install -m 755 kubeseal /usr/local/bin/kubeseal +``` + +## Usage + +The example below creates a secret, then uses kubeseal to encrypt it into a sealed-secret file. + +Once the sealed-secret file is applied into the kubernetes cluster, it is decrypted server-side to create a +standard secret in the target namespace. + +```bash +# create secret +# (note use of `--dry-run` - this is just a local file!) +echo -n SuperSecretPassw0rd | kubectl create secret generic mysecret --dry-run=client --from-literal=username=admin --from-file=password=/dev/stdin -o yaml > secret.yaml + +# create sealed-secret using stdin/stdout +kubeseal \ + --controller-namespace sealed-secrets \ + --controller-name sealed-secrets \ + --namespace my-target-namespace \ + < secret.yaml > sealed-secret.yaml + +# create namespace +kubectl create namespace my-target-namespace + +# apply sealed-secret +kubectl create --namespace my-target-namespace -f sealed-secret.yaml + +# show secret +kubectl get secret mysecret --namespace my-target-namespace -o yaml +``` diff --git a/terraform/helm_aad_pod_identity.tf b/terraform/aad_pod_identity_helm.tf similarity index 91% rename from terraform/helm_aad_pod_identity.tf rename to terraform/aad_pod_identity_helm.tf index 9018b635..8451639b 100644 --- a/terraform/helm_aad_pod_identity.tf +++ b/terraform/aad_pod_identity_helm.tf @@ -51,13 +51,7 @@ resource "helm_release" "aad_pod_identity" { data.template_file.azureIdentities.rendered ] - # should only be required for helm v2 - set { - name = "installCRDs" - value = "false" - } - - # allow Kubenet: https://azure.github.io/aad-pod-identity/docs/configure/aad_pod_identity_on_kubenet/ + # enable if using Kubenet: https://azure.github.io/aad-pod-identity/docs/configure/aad_pod_identity_on_kubenet/ set { name = "nmi.allowNetworkPluginKubenet" value = "false" diff --git a/terraform/aks.tf b/terraform/aks.tf index 5fac5dba..9ec36ccf 100644 --- a/terraform/aks.tf +++ b/terraform/aks.tf @@ -54,7 +54,7 @@ resource "azurerm_log_analytics_solution" "aks" { # https://registry.terraform.io/modules/adamrushuk/aks/azurerm/latest module "aks" { source = "adamrushuk/aks/azurerm" - version = "0.7.0" + version = "0.8.0" kubernetes_version = var.kubernetes_version location = azurerm_resource_group.aks.location diff --git a/terraform/akv2k8s_helm.tf b/terraform/akv2k8s_helm.tf new file mode 100644 index 00000000..1ff38c6e --- /dev/null +++ b/terraform/akv2k8s_helm.tf @@ -0,0 +1,100 @@ +# Azure Key Vault to Kubernetes (akv2k8s) makes Azure Key Vault secrets, certificates and keys available in +# Kubernetes and/or your application - in a simple and secure way +# +# https://akv2k8s.io/ +# https://github.com/SparebankenVest/azure-key-vault-to-kubernetes + +# Key vault access policy for AKS / akv2k8s +data "azurerm_key_vault" "kv" { + name = var.key_vault_name + resource_group_name = var.key_vault_resource_group_name +} + +# Legacy key vault access policy method +# https://docs.microsoft.com/en-us/azure/key-vault/general/assign-access-policy-portal +# resource "azurerm_key_vault_access_policy" "aks" { +# key_vault_id = data.azurerm_key_vault.kv.id + +# tenant_id = data.azurerm_subscription.current.tenant_id +# object_id = module.aks.kubelet_identity[0].object_id + +# certificate_permissions = [ +# "get" +# ] + +# key_permissions = [ +# "get" +# ] + +# secret_permissions = [ +# "get" +# ] +# } + +# Provide key vault access to akv2k8s via Azure role-based access control +# https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/role_assignment +resource "azurerm_role_assignment" "aks_mi_kv_certs" { + scope = data.azurerm_key_vault.kv.id + role_definition_name = "Key Vault Certificates Officer" + principal_id = module.aks.kubelet_identity[0].object_id + description = "Perform any action on the keys of a key vault, except manage permissions" +} + +resource "azurerm_role_assignment" "aks_mi_kv_keys" { + scope = data.azurerm_key_vault.kv.id + role_definition_name = "Key Vault Crypto User" + principal_id = module.aks.kubelet_identity[0].object_id + description = "Perform cryptographic operations using keys" +} + +resource "azurerm_role_assignment" "aks_mi_kv_secrets" { + scope = data.azurerm_key_vault.kv.id + role_definition_name = "Key Vault Secrets User" + principal_id = module.aks.kubelet_identity[0].object_id + description = "Provides read-only access to secret contents" +} + +# Requires "kube_admin_config_raw" as has AAD Auth enabled +# https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/kubernetes_cluster#kube_admin_config_raw +resource "local_file" "kubeconfig" { + sensitive_content = module.aks.full_object.kube_admin_config_raw + filename = var.aks_config_path + + depends_on = [module.aks] +} + +# https://www.terraform.io/docs/providers/kubernetes/r/namespace.html +resource "kubernetes_namespace" "akv2k8s" { + metadata { + name = "akv2k8s" + } + timeouts { + delete = "15m" + } + + depends_on = [module.aks] +} + +# https://www.terraform.io/docs/providers/helm/r/release.html +# https://github.com/SparebankenVest/public-helm-charts/tree/master/stable/akv2k8s#configuration +resource "helm_release" "akv2k8s" { + chart = "akv2k8s" + name = "akv2k8s" + namespace = kubernetes_namespace.akv2k8s.metadata[0].name + repository = "http://charts.spvapi.no" + version = var.akv2k8s_chart_version + timeout = 600 + atomic = true + + set { + name = "addAzurePodIdentityException" + value = "true" + } + + set { + name = "controller.logLevel" + value = "debug" + } + + depends_on = [helm_release.aad_pod_identity] +} diff --git a/terraform/helm_argocd.tf b/terraform/argocd_helm.tf similarity index 95% rename from terraform/helm_argocd.tf rename to terraform/argocd_helm.tf index 744a2d71..d72f4219 100644 --- a/terraform/helm_argocd.tf +++ b/terraform/argocd_helm.tf @@ -23,8 +23,10 @@ resource "null_resource" "argocd_cert_sync" { provisioner "local-exec" { interpreter = ["/bin/bash", "-c"] - command = <<-EOT - export KUBECONFIG=${var.aks_config_path} + environment = { + KUBECONFIG = var.aks_config_path + } + command = <<-EOT kubectl apply -f ${var.argocd_cert_sync_yaml_path} EOT } @@ -45,7 +47,7 @@ resource "helm_release" "argocd" { version = var.argocd_chart_version timeout = 600 atomic = true - values = ["${file("${path.module}/files/argocd-values.yaml")}"] + values = [file("${path.module}/files/argocd-values.yaml")] set { name = "global.image.tag" diff --git a/terraform/argocd_sso.tf b/terraform/argocd_sso.tf new file mode 100644 index 00000000..25ce7ead --- /dev/null +++ b/terraform/argocd_sso.tf @@ -0,0 +1,176 @@ +# Configures Azure AD App Registration Auth using OIDC +# +# https://argo-cd.readthedocs.io/en/stable/operator-manual/user-management/microsoft/#azure-ad-app-registration-auth-using-oidc + +resource "random_password" "argocd" { + length = 32 + special = false + keepers = { + service_principal = azuread_application.argocd.id + } +} + +# https://registry.terraform.io/providers/hashicorp/azuread/latest/docs/resources/application +resource "azuread_application" "argocd" { + display_name = var.argocd_app_reg_name + prevent_duplicate_names = true + homepage = "https://${var.argocd_fqdn}" + identifier_uris = ["https://${var.argocd_app_reg_name}"] + reply_urls = ["https://${var.argocd_fqdn}/auth/callback"] + available_to_other_tenants = false + oauth2_allow_implicit_flow = false + # owners = [] + group_membership_claims = "All" + + # you can check manually created app reg info in the app reg manifest tab + # reference: https://github.com/mjisaak/azure-active-directory/blob/master/README.md#well-known-appids + required_resource_access { + # Microsoft Graph + resource_app_id = "00000003-0000-0000-c000-000000000000" + + # User.Read - e1fe6dd8-ba31-4d61-89e7-88639da4683d - Sign in and read user profile + resource_access { + id = "e1fe6dd8-ba31-4d61-89e7-88639da4683d" + type = "Scope" + } + } + + optional_claims { + access_token { + name = "groups" + source = null + essential = false + additional_properties = [] + } + + id_token { + name = "groups" + source = null + essential = false + additional_properties = [] + } + } +} + +# https://registry.terraform.io/providers/hashicorp/azuread/latest/docs/resources/application_password +resource "azuread_application_password" "argocd" { + application_object_id = azuread_application.argocd.id + description = "argocd_secret" + value = random_password.argocd.result + end_date = "2099-01-01T01:02:03Z" +} + +# TODO: add "SelfServiceAppAccess" tag to enable self-service options in Enterprise App +resource "azuread_service_principal" "argocd" { + application_id = azuread_application.argocd.application_id +} + +data "azurerm_client_config" "current" { +} + + +# argocd-cm patch +# https://registry.terraform.io/providers/hashicorp/template/latest/docs/data-sources/file +data "template_file" "argocd_cm" { + template = file(var.argocd_cm_yaml_path) + vars = { + tenantId = data.azurerm_client_config.current.tenant_id + appClientId = azuread_service_principal.argocd.application_id + } +} + +# https://www.terraform.io/docs/provisioners/local-exec.html +resource "null_resource" "argocd_cm" { + triggers = { + yaml_contents = filemd5(var.argocd_cm_yaml_path) + sp_app_id = azuread_service_principal.argocd.application_id + } + + provisioner "local-exec" { + interpreter = ["/bin/bash", "-c"] + environment = { + KUBECONFIG = var.aks_config_path + } + command = < -# Argo CD Notes - -A collection of notes whilst testing Argo CD. - -Full SSO configuration currently cannot be done with Terraform, so I've partial automated the Application Registration, -and it's Service Principle (which makes an "Enterprise App"), but there are manual steps afterwards: - -- Add `Sign on URL` -- Add `email` User Claim -- Create `SAML Signing Cert` -- Download SAML cert (base64), ready for the ConfigMap yaml -- Create yaml ConfigMaps for SSO and RBAC -- Apply ConfigMaps - - -## Contents - -- [Reference](#reference) -- [Getting Started](#getting-started) -- [Add Repository](#add-repository) -- [Configure SSO for Argo CD](#configure-sso-for-argo-cd) - -## Reference - -- https://github.com/argoproj/argo-cd/blob/master/docs/faq.md#i-forgot-the-admin-password-how-do-i-reset-it - -## Getting Started - -Use `--grpc-web` if you get the `argocd transport: received the unexpected content-type "text/plain; charset=utf-8"` error - -```bash -# vars -ARGO_SERVER="argocd.thehypepipe.co.uk" - -# install -VERSION=$(curl --silent "https://api.github.com/repos/argoproj/argo-cd/releases/latest" | grep '"tag_name"' | sed -E 's/.*"([^"]+)".*/\1/') -sudo curl -sSL -o /usr/local/bin/argocd https://github.com/argoproj/argo-cd/releases/download/$VERSION/argocd-linux-amd64 -sudo chmod +x /usr/local/bin/argocd - -# show version -argocd version --grpc-web --server "$ARGO_SERVER" - -# get admin password -# default password is server pod name, eg: "argocd-server-89c6cd7d4-h7vmn" -ARGO_ADMIN_PASSWORD=$(kubectl get pods -n argocd -l app.kubernetes.io/name=argocd-server -o name | cut -d'/' -f 2) - -# login -argocd logout -h -argocd logout "$ARGO_SERVER" -argocd login -h -argocd login "$ARGO_SERVER" --grpc-web --username admin --password "$ARGO_ADMIN_PASSWORD" - -# change password -read -s NEW_ARGO_ADMIN_PASSWORD -# echo "$NEW_ARGO_ADMIN_PASSWORD" -argocd account update-password --grpc-web -h -argocd account update-password --grpc-web --account admin --current-password "$ARGO_ADMIN_PASSWORD" --new-password "$NEW_ARGO_ADMIN_PASSWORD" - -# test new admin password -argocd logout "$ARGO_SERVER" -argocd login "$ARGO_SERVER" --grpc-web --username admin --password "$NEW_ARGO_ADMIN_PASSWORD" - -# account tasks -argocd account list -argocd account -h - -# misc -argocd -h -``` - -## Add Repository - -```bash -# Add a Git repository via SSH using a private key for authentication, ignoring the server's host key -# argocd repo add git@github.com:adamrushuk/charts-private.git --insecure-ignore-host-key --ssh-private-key-path ~/.ssh/id_ed25519 -argocd repo add -h -argocd repo add git@github.com:adamrushuk/charts-private.git --ssh-private-key-path ~/.ssh/id_ed25519 - -# add known_host entries for private git server -ssh-keyscan gitlab.thehypepipe.co.uk | argocd cert add-ssh --batch - -# create ssh key for private git repo access -# ~/.ssh/id_ed25519 -ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519_argocd -C "argocd@gitlab.thehypepipe.co.uk" -ll ~/.ssh - -# check public key fingerprint -ssh-keygen -lf ~/.ssh/id_ed25519_argocd.pub - -# copy public key and enter into source git repo settings -# eg, GitLab repo deploy key: https://gitlab.thehypepipe.co.uk/helm-charts/-/settings/repository > Deploy Keys -cat ~/.ssh/id_ed25519_argocd.pub - -# add helm chart repository -argocd repo add git@gitlab.thehypepipe.co.uk/helm-charts.git --ssh-private-key-path ~/.ssh/id_ed25519_argocd - -# show repo -argocd repo list -``` - -## Configure SSO for Argo CD - -https://argoproj.github.io/argo-cd/operator-manual/user-management/microsoft/ - -```bash -# subscription where ArgoCD is deployed -AR-Dev - -# created new AAD groups, eg: -AR-Dev_ArgoCD_Admin -AR-Dev_ArgoCD_ReadOnly - -# created argo enterprise app -AR-Dev_ArgoCD - - -# Basic SAML Configuration -# Identifier (Entity ID) -https://argocd.thehypepipe.co.uk/api/dex/callback -# Reply URL (Assertion Consumer Service URL) -https://argocd.thehypepipe.co.uk/api/dex/callback -# Sign on URL -https://argocd.thehypepipe.co.uk/auth/login - -# User Attributes & Claims -# + Add new claim | Name: email | Source: Attribute | Source attribute: user.userprincipalname -+ Add new claim | Name: email | Source: Attribute | Source attribute: user.primaryauthoritativeemail - -+ Add group claim | Which groups: All groups | Source attribute: Group ID | Customize: True | Name: Group | Namespace: | Emit groups as role claims: False - -# Create a "Sign SAML assertion" SAML Signing Cert (SHA-256) -# Download and base64 the cert, ready for the ConfigMap yaml - -# Login URL (ssoURL) -https://login.microsoftonline.com//saml2 -# Azure AD Identifier -https://sts.windows.net// -# Logout URL -https://login.microsoftonline.com//saml2 - - -# SSO: User Attributes & Claims -# select user.userprincipalname instead of user.mail -+ Add new claim | Name: email | Source: Attribute | Source attribute: user.userprincipalname - - - - -## Create RBAC patch ## -# RBAC vars -ARGO_ADMIN_GROUP_NAME="AR-Dev_ArgoCD_Admins" -ARGO_ADMIN_GROUP_ID=$(az ad group show --group "$ARGO_ADMIN_GROUP_NAME" --query "objectId" --output tsv) - -# Create RBAC patch yaml -cat > argocd-rbac-cm-patch.yaml << EOF -# Patch ConfigMap to add RBAC config -data: - policy.default: role:readonly - - # Map AAD Group Object Id to an Argo CD role - # (Nested groups work fine) - # g, , role:admin - policy.csv: | - g, $ARGO_ADMIN_GROUP_ID, role:admin -EOF - -# Apply yaml RBAC patch for default admin and readonly roles -kubectl patch configmap/argocd-rbac-cm --namespace argocd --type merge --patch "$(cat argocd-rbac-cm-patch.yaml)" - - - -## Create SSO patch yaml ## -# SSO vars -ARGO_FQDN="argocd.thehypepipe.co.uk" -TENANT_ID=$(az account show --query "tenantId" --output tsv) -# assumes SAML Signing Certificate has been downloaded/saved as "ArgoCD.cer" (choosing Certificate (Base64) option) -SAML_CERT_BASE64=$(cat ArgoCD.cer | base64) -echo "$SAML_CERT_BASE64" - -# created indented string ready for caData YAML multi-line block -SAML_CERT_BASE64_INDENTED=$(cat ArgoCD.cer | base64 | sed 's/^/ /') -echo "$SAML_CERT_BASE64_INDENTED" - -cat > argocd-cm-sso-patch.yaml << EOF -# Patch ConfigMap to add dex SSO config -# source: https://argoproj.github.io/argo-cd/operator-manual/user-management/microsoft/ -data: - dex.config: | - logger: - level: debug - format: json - connectors: - - type: saml - id: saml - name: saml - config: - entityIssuer: https://$ARGO_FQDN/api/dex/callback - ssoURL: https://login.microsoftonline.com/$TENANT_ID/saml2 - caData: | -$SAML_CERT_BASE64_INDENTED - redirectURI: https://$ARGO_FQDN/api/dex/callback - usernameAttr: email - emailAttr: email - groupsAttr: Group -EOF - -# Apply SSO patch -kubectl patch configmap/argocd-cm --namespace argocd --type merge --patch "$(cat argocd-cm-sso-patch.yaml)" - -``` diff --git a/terraform/argocd_sso/argocd-cm-sso-patch.TEMPLATE.yaml b/terraform/argocd_sso/argocd-cm-sso-patch.TEMPLATE.yaml deleted file mode 100644 index b32c7316..00000000 --- a/terraform/argocd_sso/argocd-cm-sso-patch.TEMPLATE.yaml +++ /dev/null @@ -1,25 +0,0 @@ -# Patch ConfigMap to add dex SSO config -# source: https://argoproj.github.io/argo-cd/operator-manual/user-management/microsoft/ -# -# After following steps above, modify content and run command below to apply patch: -# kubectl patch configmap/argocd-cm --namespace argocd --type merge --patch "$(cat argocd-cm-patch-dev.yaml)" - -# TEMPLATE -data: - dex.config: | - logger: - level: debug - format: json - connectors: - - type: saml - id: saml - name: saml - config: - entityIssuer: https://$ARGO_FQDN/api/dex/callback - ssoURL: https://login.microsoftonline.com/$TENANT_ID/saml2 - caData: | - $SAML_CERT_BASE64 - redirectURI: https://$ARGO_FQDN/api/dex/callback - usernameAttr: email - emailAttr: email - groupsAttr: Group diff --git a/terraform/argocd_sso/argocd-rbac-cm-patch.TEMPLATE.yaml b/terraform/argocd_sso/argocd-rbac-cm-patch.TEMPLATE.yaml deleted file mode 100644 index 2a55d75d..00000000 --- a/terraform/argocd_sso/argocd-rbac-cm-patch.TEMPLATE.yaml +++ /dev/null @@ -1,16 +0,0 @@ -# Patch ConfigMap to add RBAC config -# source: -# - https://argoproj.github.io/argo-cd/operator-manual/rbac/ -# -# Run command below to apply patch for default admin and readonly roles: -# kubectl patch configmap/argocd-rbac-cm --namespace argocd --type merge --patch-file "argocd-rbac-cm-patch.yaml" - -# TEMPLATE -data: - policy.default: role:readonly - - # Map AAD Group Object Id to an Argo CD role - # (Nested groups work fine) - # g, , role:admin - policy.csv: | - g, $ARGO_ADMIN_GROUP_ID, role:admin diff --git a/terraform/argocd_sso/argocd_aad.tf b/terraform/argocd_sso/argocd_aad.tf deleted file mode 100644 index e4131573..00000000 --- a/terraform/argocd_sso/argocd_aad.tf +++ /dev/null @@ -1,145 +0,0 @@ -provider "azurerm" { - version = "2.44.0" - features {} -} - -variable "dns_zone_name" { - default = "thehypepipe.co.uk" -} - -variable "admin_consent" { - default = true -} - - -# TODO: remove temp outputs -# data "azuread_application" "argocd_manual" { -# display_name = "AR-Dev_ArgoCD" -# } - -# output "azure_app_object_manual" { -# value = data.azuread_application.argocd_manual -# } - -# data "azuread_service_principal" "argocd_manual" { -# display_name = "AR-Dev_ArgoCD" -# } - -# output "azure_sp_object_manual" { -# value = data.azuread_service_principal.argocd_manual -# } - -output "azure_ad_object_argocd" { - value = azuread_application.argocd -} -output "azure_sp_object_argocd" { - value = azuread_service_principal.argocd -} - -# https://registry.terraform.io/providers/hashicorp/azuread/latest/docs/resources/application -# source: https://github.com/hashicorp/terraform-provider-azuread/issues/173#issuecomment-663727531 -resource "azuread_application" "argocd" { - display_name = "ArgoCD" - prevent_duplicate_names = true - homepage = "https://argocd.${var.dns_zone_name}" - identifier_uris = ["https://argocd.${var.dns_zone_name}/api/dex/callback"] - reply_urls = ["https://argocd.${var.dns_zone_name}/api/dex/callback"] - available_to_other_tenants = false - oauth2_allow_implicit_flow = true - # type = "webapp/api" - # owners = ["00000004-0000-0000-c000-000000000000"] - group_membership_claims = "All" - - required_resource_access { - //https://docs.microsoft.com/en-us/azure/active-directory/manage-apps/grant-admin-consent - resource_app_id = "00000003-0000-0000-c000-000000000000" - resource_access { - id = "5f8c59db-677d-491f-a6b8-5f174b11ec1d" - type = "Scope" - } - resource_access { - id = "e1fe6dd8-ba31-4d61-89e7-88639da4683d" - type = "Scope" - } - } - - app_role { - allowed_member_types = [ - "User" - ] - - description = "User" - display_name = "User" - is_enabled = true - } - - app_role { - allowed_member_types = [ - "User" - ] - - description = "msiam_access" - display_name = "msiam_access" - is_enabled = true - } - - // We need to wait because Azure Graph API returns a 200 before its call-able #eventualconsistancy... - provisioner "local-exec" { - command = "sleep 20" - } - - //https://github.com/Azure/azure-cli/issues/7579 - //Add metadata URL - // provisioner "local-exec" { - // command = "az ad app update --id ${self.application_id} --set samlMetadataUrl=${var.saml_metadata_url}" - // } - // We need to wait because Azure Graph API returns a 200 before its call-able #eventualconsistancy... - // provisioner "local-exec" { - // command = "sleep 5" - // } - //https://github.com/Azure/azure-cli/issues/12946 - //https://github.com/Azure/azure-cli/issues/11534 - //https://docs.microsoft.com/en-us/azure/active-directory/develop/active-directory-optional-claims - //Optional Claims for tokens - provisioner "local-exec" { - command = "az rest --method PATCH --uri 'https://graph.microsoft.com/v1.0/applications/${self.object_id}' --body '{\"optionalClaims\": {\"saml2Token\": [{\"name\": \"groups\", \"additionalProperties\": []}]}}'" - } -} - -resource "azuread_service_principal" "argocd" { - //https://github.com/Azure/azure-cli/issues/9250 - application_id = azuread_application.argocd.application_id - tags = [ - "WindowsAzureActiveDirectoryIntegratedApp", - "WindowsAzureActiveDirectoryCustomSingleSignOnApplication", - "WindowsAzureActiveDirectoryGalleryApplicationNonPrimaryV1" - ] - - // We need to wait because Azure Graph API returns a 200 before its call-able #eventualconsistancy... - provisioner "local-exec" { - command = "sleep 20" - } - - # https://docs.microsoft.com/en-us/graph/application-saml-sso-configure-api?tabs=http#set-single-sign-on-mode - provisioner "local-exec" { - command = "az ad sp update --id ${azuread_application.argocd.application_id} --set preferredSingleSignOnMode='saml'" - } - - # depends_on = [ - # azuread_application.argocd - # ] -} - -resource "null_resource" "grant_admin_constent" { - count = var.admin_consent ? 1 : 0 - // https://docs.microsoft.com/en-us/cli/azure/ad/app/permission?view=azure-cli-latest#code-try-3 - provisioner "local-exec" { - command = "sleep 20" - } - provisioner "local-exec" { - command = "az ad app permission admin-consent --id ${azuread_application.argocd.application_id}" - } - depends_on = [ - azuread_service_principal.argocd - ] -} diff --git a/terraform/argocd_sso/sso_claims.sh b/terraform/argocd_sso/sso_claims.sh deleted file mode 100644 index 8b09dbe8..00000000 --- a/terraform/argocd_sso/sso_claims.sh +++ /dev/null @@ -1,62 +0,0 @@ -# Adding extra SSO claim -# Set "ArgoCD" app reg () -ARGO_APP_OBJECT_ID="" -az rest --method PATCH --uri "https://graph.microsoft.com/v1.0/applications/$ARGO_APP_OBJECT_ID" --body '{\"optionalClaims\": {\"saml2Token\": [{\"name\": \"test\", \"additionalProperties\": [\"sam_account_name\"]}]}}' - -az rest --method PATCH --uri "https://graph.microsoft.com/v1.0/applications/$ARGO_APP_OBJECT_ID" --body '{\"optionalClaims\": {\"saml2Token\": [{\"name\": \"userprincipalname\", \"source\": \"user\", \"additionalProperties\": [\"email\"]}]}}' - -# works via PS -$ARGO_APP_OBJECT_ID = "" -az rest --method PATCH --uri "https://graph.microsoft.com/v1.0/applications/$ARGO_APP_OBJECT_ID" --body '{\"optionalClaims\": {\"saml2Token\": [{\"name\": \"groups\", \"additionalProperties\": [\"sam_account_name\"]}]}}' - -# add custom email claim -az rest --method PATCH --uri "https://graph.microsoft.com/v1.0/applications/$ARGO_APP_OBJECT_ID" --body '{\"optionalClaims\": {\"saml2Token\": [{\"name\": \"userprincipalname\", \"source\": \"user\", \"additionalProperties\": [\"email\"]}]}}' - -# add custom group claim -az rest --method PATCH --uri "https://graph.microsoft.com/v1.0/applications/$ARGO_APP_OBJECT_ID" --body '{\"optionalClaims\": {\"saml2Token\": [{\"name\": \"groups\", \"source\": null}]}}' - - - -# Get -az rest --method GET --uri "https://graph.microsoft.com/v1.0/applications/$ARGO_APP_OBJECT_ID" -az rest --method GET --uri "https://graph.microsoft.com/v1.0/applications/$ARGO_APP_OBJECT_ID" | clip.exe - - "optionalClaims": { - "accessToken": [], - "idToken": [], - "saml2Token": [ - { - "additionalProperties": [], - "essential": false, - "name": "groups", - "source": null - } - ] - }, - - -# TF created "ArgoCD" App Reg -az rest --method GET --uri "https://graph.microsoft.com/v1.0/applications/$ARGO_APP_OBJECT_ID" - - "optionalClaims": { - "accessToken": [], - "idToken": [], - "saml2Token": [ - { - "additionalProperties": [ - "sam_account_name" - ], - "essential": false, - "name": "test", - "source": null - } - ] - }, - -# AR-Dev_ArgoCD - App reg -az rest --method GET --uri "https://graph.microsoft.com/v1.0/applications/$ARGO_APP_OBJECT_ID" - -# manual "AR-Dev_ArgoCD" Enterprise App -SERVICE_PRINCIPLE_ID="" -az rest --method GET --uri "https://graph.microsoft.com/v1.0/servicePrincipals/$SERVICE_PRINCIPLE_ID" -az rest --method GET --uri "https://graph.microsoft.com/v1.0/servicePrincipals/$SERVICE_PRINCIPLE_ID" | clip.exe diff --git a/terraform/argocd_sso/tf_test.ps1 b/terraform/argocd_sso/tf_test.ps1 deleted file mode 100644 index 7251a13c..00000000 --- a/terraform/argocd_sso/tf_test.ps1 +++ /dev/null @@ -1,16 +0,0 @@ -# testing Terraform config for Enterprise App -# use WSL -cd ./terraform/argocd_sso - -# login -az login -az account show - -# init -terraform init - -# apply -terraform apply - -# destroy -terraform destroy diff --git a/terraform/helm_cert_manager.tf b/terraform/cert_manager_helm.tf similarity index 100% rename from terraform/helm_cert_manager.tf rename to terraform/cert_manager_helm.tf diff --git a/terraform/dns.tf b/terraform/dns.tf index d2203202..7ccec545 100644 --- a/terraform/dns.tf +++ b/terraform/dns.tf @@ -60,8 +60,10 @@ resource "null_resource" "azureIdentity_external_dns" { provisioner "local-exec" { interpreter = ["/bin/bash", "-c"] - command = <" + +# Labels to add to the Velero deployment's. Optional. +labels: {} + # Annotations to add to the Velero deployment's pod template. Optional. # # If using kube2iam or kiam, use the following annotation with your AWS_ACCOUNT_ID @@ -77,6 +86,11 @@ metrics: scrapeInterval: 30s scrapeTimeout: 10s + # service metdata if metrics are enabled + service: + annotations: {} + labels: {} + # Pod annotations for Prometheus podAnnotations: prometheus.io/scrape: "true" @@ -89,14 +103,6 @@ metrics: # ServiceMonitor namespace. Default to Velero namespace. # namespace: -# Install CRDs as a templates. Enabled by default. -installCRDs: true - -# Enable/disable all helm hooks annotations -# You should disable this if using a deploy tool that doesn't support helm hooks, -# such as ArgoCD -enableHelmHooks: true - ## ## End of deployment-related settings. ## @@ -173,6 +179,11 @@ configuration: restoreResourcePriorities: # `velero server` default: false restoreOnlyMode: + # `velero server` default: 20.0 + clientQPS: + # `velero server` default: 30 + clientBurst: + # # additional key/value pairs to be used as environment variables such as "AWS_CLUSTER_NAME: 'yourcluster.domain.tld'" extraEnvVars: {} @@ -262,6 +273,12 @@ restic: # Tolerations to use for the Restic daemonset. Optional. tolerations: [] + # Annotations to set for the Restic daemonset. Optional. + annotations: {} + + # labels to set for the Restic daemonset. Optional. + labels: {} + # Extra volumes for the Restic daemonset. Optional. extraVolumes: [] @@ -278,6 +295,9 @@ restic: securityContext: {} # fsGroup: 1337 + # Node selector to use for the Restic daemonset. Optional. + nodeSelector: {} + # Backup schedules to create. # Eg: # schedules: diff --git a/terraform/helm_akv2k8s.tf b/terraform/helm_akv2k8s.tf deleted file mode 100644 index 7e615087..00000000 --- a/terraform/helm_akv2k8s.tf +++ /dev/null @@ -1,131 +0,0 @@ -# Azure Key Vault to Kubernetes (akv2k8s) makes Azure Key Vault secrets, certificates and keys available in -# Kubernetes and/or your application - in a simple and secure way -# -# https://akv2k8s.io/ -# https://github.com/SparebankenVest/azure-key-vault-to-kubernetes - -# Key vault access policy for AKS / akv2k8s -data "azurerm_key_vault" "kv" { - name = var.key_vault_name - resource_group_name = var.key_vault_resource_group_name -} - -resource "azurerm_key_vault_access_policy" "aks" { - key_vault_id = data.azurerm_key_vault.kv.id - - tenant_id = data.azurerm_subscription.current.tenant_id - object_id = module.aks.kubelet_identity[0].object_id - - certificate_permissions = [ - "get" - ] - - key_permissions = [ - "get" - ] - - secret_permissions = [ - "get" - ] -} - - -# Requires "kube_admin_config_raw" as has AAD Auth enabled -# https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/kubernetes_cluster#kube_admin_config_raw -resource "local_file" "kubeconfig" { - sensitive_content = module.aks.full_object.kube_admin_config_raw - filename = var.aks_config_path - - depends_on = [module.aks] -} - -# https://www.terraform.io/docs/provisioners/local-exec.html -resource "null_resource" "akv2k8s_crds" { - triggers = { - # always_run = "${timestamp()}" - akv2k8s_yaml_contents = filemd5(var.akv2k8s_yaml_path) - } - - provisioner "local-exec" { - interpreter = ["/bin/bash", "-c"] - command = < 2.2 = 2.X.Y - tls = "~> 2.1" - } + # https://github.com/hashicorp/terraform-provider-kubernetes/releases + kubernetes = { + source = "hashicorp/kubernetes" + version = "~> 2.0.3" + } - # 0.12.X - required_version = "~> 0.12.30" # https://github.com/hashicorp/terraform/releases + # https://github.com/hashicorp/terraform-provider-helm/releases + helm = { + source = "hashicorp/helm" + version = "~> 2.1.0" + } + + random = { + source = "hashicorp/random" + version = "~> 2.2" + } + + tls = { + source = "hashicorp/tls" + version = "~> 2.1" + } + + local = { + source = "hashicorp/local" + version = "~> 2.0" + } + + null = { + source = "hashicorp/null" + version = "~> 3.0" + } + + template = { + source = "hashicorp/template" + version = "~> 2.0" + } + } } # must include blank features block # https://github.com/terraform-providers/terraform-provider-azurerm/releases provider "azurerm" { - version = "2.51.0" features {} } -# use statically defined credentials -# https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs#statically-defined-credentials +# https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs#credentials-config provider "kubernetes" { host = module.aks.full_object.kube_admin_config[0].host client_certificate = base64decode(module.aks.full_object.kube_admin_config[0].client_certificate) @@ -43,6 +85,7 @@ provider "kubernetes" { cluster_ca_certificate = base64decode(module.aks.full_object.kube_admin_config[0].cluster_ca_certificate) } +# https://registry.terraform.io/providers/hashicorp/helm/latest/docs#credentials-config provider "helm" { kubernetes { host = module.aks.full_object.kube_admin_config[0].host @@ -50,4 +93,9 @@ provider "helm" { client_key = base64decode(module.aks.full_object.kube_admin_config[0].client_key) cluster_ca_certificate = base64decode(module.aks.full_object.kube_admin_config[0].cluster_ca_certificate) } + + # TODO: currently has issues when enabled + # experiments { + # manifest = true + # } }