diff --git a/.gitignore b/.gitignore index 6f72f89..d4feb22 100644 --- a/.gitignore +++ b/.gitignore @@ -15,7 +15,7 @@ *.out # Dependency directories (remove the comment below to include it) -# vendor/ +vendor/ # Go workspace file go.work @@ -23,3 +23,18 @@ go.work.sum # env file .env + +# editor and IDE paraphernalia +.idea +*.swp +*.swo +*.iml +.code +*~ + +# ignore personal information +.myenv +.netrc + +target/ +.bin/ \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..1db8053 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,11 @@ +# ces-commons-lib Changelog +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +## [v0.1.0] - 2024-11-15 +### Added +- [#1] Add RemoteDoguDescriptorRepository interface and common errors \ No newline at end of file diff --git a/Jenkinsfile b/Jenkinsfile new file mode 100644 index 0000000..108d228 --- /dev/null +++ b/Jenkinsfile @@ -0,0 +1,128 @@ +#!groovy + +@Library(['github.com/cloudogu/ces-build-lib@3.0.0']) +import com.cloudogu.ces.cesbuildlib.* + +// Creating necessary git objects +git = new Git(this, "cesmarvin") +git.committerName = 'cesmarvin' +git.committerEmail = 'cesmarvin@cloudogu.com' +gitflow = new GitFlow(this, git) +github = new GitHub(this, git) +changelog = new Changelog(this) +Docker docker = new Docker(this) +gpg = new Gpg(this, docker) +goVersion = "1.23" + +// Configuration of repository +repositoryOwner = "cloudogu" +repositoryName = "ces-commons-lib" +project = "github.com/${repositoryOwner}/${repositoryName}" + +// Configuration of branches +productionReleaseBranch = "main" +developmentBranch = "develop" +currentBranch = "${env.BRANCH_NAME}" + +node('docker') { + timestamps { + stage('Checkout') { + checkout scm + make 'clean' + } + + stage('Check Markdown Links') { + Markdown markdown = new Markdown(this, "3.11.0") + markdown.check() + } + + new Docker(this) + .image("golang:${goVersion}") + .mountJenkinsUser() + .inside("--volume ${WORKSPACE}:/go/src/${project} -w /go/src/${project}") { + stage('Unit test') { + make 'unit-test' + junit allowEmptyResults: true, testResults: 'target/unit-tests/*-tests.xml' + } + + stage("Review dog analysis") { + stageStaticAnalysisReviewDog() + } + } + + stage('SonarQube') { + stageStaticAnalysisSonarQube() + } + + + stageAutomaticRelease() + } +} + +void gitWithCredentials(String command) { + withCredentials([usernamePassword(credentialsId: 'cesmarvin', usernameVariable: 'GIT_AUTH_USR', passwordVariable: 'GIT_AUTH_PSW')]) { + sh( + script: "git -c credential.helper=\"!f() { echo username='\$GIT_AUTH_USR'; echo password='\$GIT_AUTH_PSW'; }; f\" " + command, + returnStdout: true + ) + } +} + +void stageStaticAnalysisReviewDog() { + def commitSha = sh(returnStdout: true, script: 'git rev-parse HEAD').trim() + + withCredentials([[$class: 'UsernamePasswordMultiBinding', credentialsId: 'sonarqube-gh', usernameVariable: 'USERNAME', passwordVariable: 'REVIEWDOG_GITHUB_API_TOKEN']]) { + withEnv(["CI_PULL_REQUEST=${env.CHANGE_ID}", "CI_COMMIT=${commitSha}", "CI_REPO_OWNER=${repositoryOwner}", "CI_REPO_NAME=${repositoryName}"]) { + make 'static-analysis-ci' + } + } +} + +void stageStaticAnalysisSonarQube() { + def scannerHome = tool name: 'sonar-scanner', type: 'hudson.plugins.sonar.SonarRunnerInstallation' + withSonarQubeEnv { + sh "git config 'remote.origin.fetch' '+refs/heads/*:refs/remotes/origin/*'" + gitWithCredentials("fetch --all") + + if (currentBranch == productionReleaseBranch) { + echo "This branch has been detected as the production branch." + sh "${scannerHome}/bin/sonar-scanner -Dsonar.branch.name=${env.BRANCH_NAME}" + } else if (currentBranch == developmentBranch) { + echo "This branch has been detected as the development branch." + sh "${scannerHome}/bin/sonar-scanner -Dsonar.branch.name=${env.BRANCH_NAME}" + } else if (env.CHANGE_TARGET) { + echo "This branch has been detected as a pull request." + sh "${scannerHome}/bin/sonar-scanner -Dsonar.pullrequest.key=${env.CHANGE_ID} -Dsonar.pullrequest.branch=${env.CHANGE_BRANCH} -Dsonar.pullrequest.base=${developmentBranch}" + } else if (currentBranch.startsWith("feature/")) { + echo "This branch has been detected as a feature branch." + sh "${scannerHome}/bin/sonar-scanner -Dsonar.branch.name=${env.BRANCH_NAME}" + } else { + echo "This branch has been detected as a miscellaneous branch." + sh "${scannerHome}/bin/sonar-scanner -Dsonar.branch.name=${env.BRANCH_NAME} " + } + } + timeout(time: 2, unit: 'MINUTES') { // Needed when there is no webhook for example + def qGate = waitForQualityGate() + if (qGate.status != 'OK') { + unstable("Pipeline unstable due to SonarQube quality gate failure") + } + } +} + +void stageAutomaticRelease() { + if (gitflow.isReleaseBranch()) { + String releaseVersion = git.getSimpleBranchName() + + stage('Finish Release') { + gitflow.finishRelease(releaseVersion, productionReleaseBranch) + } + + stage('Add Github-Release') { + releaseId = github.createReleaseWithChangelog(releaseVersion, changelog, productionReleaseBranch) + } + } +} + +void make(String makeArgs) { + sh "make ${makeArgs}" +} \ No newline at end of file diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..5088235 --- /dev/null +++ b/Makefile @@ -0,0 +1,15 @@ +ARTIFACT_ID=ces-commons-lib +VERSION=0.1.0 + +MAKEFILES_VERSION=9.3.2 +.DEFAULT_GOAL:=help + +include build/make/variables.mk +include build/make/self-update.mk +include build/make/dependencies-gomod.mk +include build/make/build.mk +include build/make/test-common.mk +include build/make/test-unit.mk +include build/make/static-analysis.mk +include build/make/clean.mk +include build/make/release.mk diff --git a/README.md b/README.md index 9e51691..1d2ffd6 100644 --- a/README.md +++ b/README.md @@ -1 +1,21 @@ -# ces-commons-lib \ No newline at end of file +# ces-commons-lib + +A Go library for interacting with the Cloudogu EcoSystem. + +## What is the Cloudogu EcoSystem? +The Cloudogu EcoSystem is an open platform, which lets you choose how and where your team creates great software. Each service or tool is delivered as a Dogu, a Docker container. Each Dogu can easily be integrated in your environment just by pulling it from our registry. + +We have a growing number of ready-to-use Dogus, e.g. SCM-Manager, Jenkins, Nexus Repository, SonarQube, Redmine and many more. Every Dogu can be tailored to your specific needs. Take advantage of a central authentication service, a dynamic navigation, that lets you easily switch between the web UIs and a smart configuration magic, which automatically detects and responds to dependencies between Dogus. + +The Cloudogu EcoSystem is open source and it runs either on-premises or in the cloud. The Cloudogu EcoSystem is developed by Cloudogu GmbH under [AGPL-3.0-only](https://spdx.org/licenses/AGPL-3.0-only.html). + +## License +Copyright © 2020 - present Cloudogu GmbH +This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, version 3. +This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. +You should have received a copy of the GNU Affero General Public License along with this program. If not, see https://www.gnu.org/licenses/. +See [LICENSE](LICENSE) for details. + + +--- +MADE WITH :heart: FOR DEV ADDICTS. [Legal notice / Imprint](https://cloudogu.com/en/imprint/?mtm_campaign=ecosystem&mtm_kwd=imprint&mtm_source=github&mtm_medium=link) diff --git a/build/.editorconfig b/build/.editorconfig new file mode 100644 index 0000000..f5f657b --- /dev/null +++ b/build/.editorconfig @@ -0,0 +1,2 @@ +[*.mk] +indent_style = tab diff --git a/build/.gitignore b/build/.gitignore new file mode 100644 index 0000000..8197877 --- /dev/null +++ b/build/.gitignore @@ -0,0 +1,2 @@ +deb +tmp diff --git a/build/make/bats.mk b/build/make/bats.mk new file mode 100644 index 0000000..7e73553 --- /dev/null +++ b/build/make/bats.mk @@ -0,0 +1,64 @@ +WORKSPACE=/workspace +BATS_LIBRARY_DIR=$(TARGET_DIR)/bats_libs +TESTS_DIR=$(WORKDIR)/batsTests +BASH_TEST_REPORT_DIR=$(TARGET_DIR)/shell_test_reports +BASH_TEST_REPORTS=$(BASH_TEST_REPORT_DIR)/TestReport-*.xml +BATS_ASSERT=$(BATS_LIBRARY_DIR)/bats-assert +BATS_MOCK=$(BATS_LIBRARY_DIR)/bats-mock +BATS_SUPPORT=$(BATS_LIBRARY_DIR)/bats-support +BATS_FILE=$(BATS_LIBRARY_DIR)/bats-file +BATS_BASE_IMAGE?=bats/bats +BATS_CUSTOM_IMAGE?=cloudogu/bats +BATS_TAG?=1.11.0 +BATS_DIR=build/make/bats +BATS_WORKDIR="${WORKDIR}"/"${BATS_DIR}" + +.PHONY unit-test-shell: +unit-test-shell: unit-test-shell-$(ENVIRONMENT) + +$(BATS_ASSERT): + @git clone --depth 1 https://github.com/bats-core/bats-assert $@ + +$(BATS_MOCK): + @git clone --depth 1 https://github.com/grayhemp/bats-mock $@ + +$(BATS_SUPPORT): + @git clone --depth 1 https://github.com/bats-core/bats-support $@ + +$(BATS_FILE): + @git clone --depth 1 https://github.com/bats-core/bats-file $@ + +$(BASH_SRC): + BASH_SRC:=$(shell find "${WORKDIR}" -type f -name "*.sh") + +${BASH_TEST_REPORT_DIR}: $(TARGET_DIR) + @mkdir -p $(BASH_TEST_REPORT_DIR) + +unit-test-shell-ci: $(BASH_SRC) $(BASH_TEST_REPORT_DIR) $(BATS_ASSERT) $(BATS_MOCK) $(BATS_SUPPORT) $(BATS_FILE) + @echo "Test shell units on CI server" + @make unit-test-shell-generic + +unit-test-shell-local: $(BASH_SRC) $(PASSWD) $(ETCGROUP) $(HOME_DIR) buildTestImage $(BASH_TEST_REPORT_DIR) $(BATS_ASSERT) $(BATS_MOCK) $(BATS_SUPPORT) $(BATS_FILE) + @echo "Test shell units locally (in Docker)" + @docker run --rm \ + -v $(HOME_DIR):/home/$(USER) \ + -v $(WORKDIR):$(WORKSPACE) \ + -w $(WORKSPACE) \ + --entrypoint="" \ + $(BATS_CUSTOM_IMAGE):$(BATS_TAG) \ + "${BATS_DIR}"/customBatsEntrypoint.sh make unit-test-shell-generic-no-junit + +unit-test-shell-generic: + @bats --formatter junit --output ${BASH_TEST_REPORT_DIR} ${TESTS_DIR} + +unit-test-shell-generic-no-junit: + @bats ${TESTS_DIR} + +.PHONY buildTestImage: +buildTestImage: + @echo "Build shell test container" + @cd $(BATS_WORKDIR) && docker build \ + --build-arg=BATS_BASE_IMAGE=${BATS_BASE_IMAGE} \ + --build-arg=BATS_TAG=${BATS_TAG} \ + -t ${BATS_CUSTOM_IMAGE}:${BATS_TAG} \ + . \ No newline at end of file diff --git a/build/make/bats/Dockerfile b/build/make/bats/Dockerfile new file mode 100644 index 0000000..7167a94 --- /dev/null +++ b/build/make/bats/Dockerfile @@ -0,0 +1,9 @@ +ARG BATS_BASE_IMAGE +ARG BATS_TAG + +FROM ${BATS_BASE_IMAGE:-bats/bats}:${BATS_TAG:-1.11.0} + +# Make bash more findable by scripts and tests +RUN apk add make git bash +# suppress git "detected dubious ownership" error/warning for repos which are checked out later +RUN git config --global --add safe.directory /workspace \ No newline at end of file diff --git a/build/make/bats/customBatsEntrypoint.sh b/build/make/bats/customBatsEntrypoint.sh new file mode 100755 index 0000000..58856fe --- /dev/null +++ b/build/make/bats/customBatsEntrypoint.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash +set -o errexit +set -o nounset +set -o pipefail + +"$@" \ No newline at end of file diff --git a/build/make/bower.mk b/build/make/bower.mk new file mode 100644 index 0000000..a2c76a9 --- /dev/null +++ b/build/make/bower.mk @@ -0,0 +1,28 @@ +##@ Bower dependency management + +BOWER_JSON=$(WORKDIR)/bower.json + +.PHONY: bower-install +bower-install: $(BOWER_TARGET) ## Execute yarn run bower (in Docker) + +ifeq ($(ENVIRONMENT), ci) + +$(BOWER_TARGET): $(BOWER_JSON) $(YARN_TARGET) + @echo "Yarn run bower on CI server" + @yarn run bower + +else + +$(BOWER_TARGET): $(BOWER_JSON) $(PASSWD) $(YARN_TARGET) + @echo "Executing bower..." + @docker run --rm \ + -e HOME=/tmp \ + -u "$(UID_NR):$(GID_NR)" \ + -v $(PASSWD):/etc/passwd:ro \ + -v $(WORKDIR):$(WORKDIR) \ + -w $(WORKDIR) \ + node:$(NODE_VERSION) \ + yarn run bower + @touch $@ + +endif diff --git a/build/make/build.mk b/build/make/build.mk new file mode 100644 index 0000000..d3581de --- /dev/null +++ b/build/make/build.mk @@ -0,0 +1,51 @@ +##@ Compiling go software + +ADDITIONAL_LDFLAGS?=-extldflags -static +LDFLAGS?=-ldflags "$(ADDITIONAL_LDFLAGS) -X main.Version=$(VERSION) -X main.CommitID=$(COMMIT_ID)" +GOIMAGE?=golang +GOTAG?=1.23 +GOOS?=linux +GOARCH?=amd64 +PRE_COMPILE?= +GO_ENV_VARS?= +CUSTOM_GO_MOUNT?=-v /tmp:/tmp +GO_BUILD_FLAGS?=-mod=vendor -a -tags netgo $(LDFLAGS) -installsuffix cgo -o $(BINARY) + +.PHONY: compile +compile: $(BINARY) ## Compile the go program via Docker + +compile-ci: ## Compile the go program without Docker + @echo "Compiling (CI)..." + make compile-generic + +compile-generic: + @echo "Compiling..." +# here is go called without mod capabilities because of error "go: error loading module requirements" +# see https://github.com/golang/go/issues/30868#issuecomment-474199640 + @$(GO_ENV_VARS) go build $(GO_BUILD_FLAGS) + + +ifeq ($(ENVIRONMENT), ci) + +$(BINARY): $(SRC) vendor $(PRE_COMPILE) + @echo "Built on CI server" + @make compile-generic + +else + +$(BINARY): $(SRC) vendor $(PASSWD) $(ETCGROUP) $(HOME_DIR) $(PRE_COMPILE) + @echo "Building locally (in Docker)" + @docker run --rm \ + -e GOOS=$(GOOS) \ + -e GOARCH=$(GOARCH) \ + -u "$(UID_NR):$(GID_NR)" \ + -v $(PASSWD):/etc/passwd:ro \ + -v $(ETCGROUP):/etc/group:ro \ + -v $(HOME_DIR):/home/$(USER) \ + -v $(WORKDIR):/go/src/github.com/cloudogu/$(ARTIFACT_ID) \ + $(CUSTOM_GO_MOUNT) \ + -w /go/src/github.com/cloudogu/$(ARTIFACT_ID) \ + $(GOIMAGE):$(GOTAG) \ + make compile-generic + +endif diff --git a/build/make/clean.mk b/build/make/clean.mk new file mode 100644 index 0000000..119387f --- /dev/null +++ b/build/make/clean.mk @@ -0,0 +1,15 @@ +##@ Cleaning + +.PHONY: clean +clean: $(ADDITIONAL_CLEAN) ## Remove target and tmp directories + rm -rf ${TARGET_DIR} + rm -rf ${TMP_DIR} + rm -rf ${UTILITY_BIN_PATH} + +.PHONY: dist-clean +dist-clean: clean ## Remove all generated directories + rm -rf node_modules + rm -rf public/vendor + rm -rf vendor + rm -rf npm-cache + rm -rf bower diff --git a/build/make/coder-lib.sh b/build/make/coder-lib.sh new file mode 100755 index 0000000..2b5d198 --- /dev/null +++ b/build/make/coder-lib.sh @@ -0,0 +1,182 @@ +#!/bin/bash +# a collection of helpful functions to update coder workspaces for rapid development +set -e -u -x -o pipefail + +function getContainerBin() { + if [ -x "$(command -v podman)" ]; then + echo "podman"; + else + echo "docker"; + fi +} + +function getCoderUser() { + # check if coder is installed, so that there is no problem with build and release targets if this is called before + if [ -x "$(command -v coder)" ]; then + coder users show me -o json | jq -r '.username'; + fi +} + +function getAllWorkspaces() { + coder list -c workspace | tail -n+2 +} + +function doesWorkspaceExist() { + coderUser="$1" + workspaceName="$2" + + workspace=$(coder list -a -o json | jq -r "select(.[].owner_name == \"${coderUser}\" and .[].name == \"${workspaceName}\") | .[0].name") + if [ -z "$workspace" ]; then + return 1 #workspace does not exist + else + return 0 + fi +} + +function generateUniqueWorkspaceName() { + local wantedWorkspacePrefix="$1" + # use time to make name unique + local time + time=$(date +'%H-%M-%S') + local lengthOfTime=${#time} + local delimiter='-' + local lengthOfDelimiter=${#delimiter} + # trim prefix, as workspace names are limited to 32 chars + local trimmedPrefix="${wantedWorkspacePrefix:0:$((32 - lengthOfDelimiter - lengthOfTime))}" + local uniqueName="${trimmedPrefix}${delimiter}${time}" + # '--' is forbidden in coder, replace multiple '-' with a single one. + echo "${uniqueName}" | awk '{gsub(/[-]+/,"-")}1' + # returns sth like 'myPrefix-12-45-23' +} + +function buildImage() { + local tag="$1" + local containerBuildDir="${2:-./container}" + local secretDir="${3:-./secrets}" + local containerExec="${4:-podman}" + + # include build-secrets if there are any + local secretArgs=() + if [ -d "$secretDir" ]; then + # shellcheck disable=SC2231 + for secretPath in $secretDir/*; do + # do not match .sh scripts + [[ $secretPath == *.sh ]] && continue + local secretName + secretName=$(basename "$secretPath") + secretArgs+=("--secret=id=$secretName,src=$secretDir/$secretName") + done + fi + + if [ "$containerExec" = "podman" ]; then + $containerExec build -t "$tag" --pull=newer "$containerBuildDir" "${secretArgs[@]}" + else + $containerExec build -t "$tag" --pull "$containerBuildDir" "${secretArgs[@]}" + fi +} + +function doTrivyConvert() { + local trivyFlags=$1 + local outputFile=$2 + local containerExec=$3 + local jsonScanToConvert=$4 + + local containerJsonScanFile="/tmp/scan.json" + + # shellcheck disable=SC2086 + # as globbing is what we want here + "$containerExec" run --rm --pull=always \ + -v trivy-cache:/root/.cache \ + -v "$jsonScanToConvert:$containerJsonScanFile" \ + aquasec/trivy -q \ + convert $trivyFlags "$containerJsonScanFile" > "$outputFile" +} + +function uploadTemplate() { + local templateDir="${1:?"Error. you need to add the template directory as the first parameter"}" + local templateName="${2:?"Error. you need to add the template name as the second parameter"}" + # for terraform variables (not editable by workspace users) + local variablesFile="${templateDir}/variables.yaml" + if [ -f "$variablesFile" ]; then + local doesVariablesFileExist=1 + fi + if ! coder template push -y -d "$templateDir" ${doesVariablesFileExist:+--variables-file "$variablesFile"} "$templateName"; then + # if template does not exist yet, create it in coder + coder template create -y -d "$templateDir" ${doesVariablesFileExist:+--variables-file "$variablesFile"} "$templateName" + fi +} + +function createNewWorkspace() { + local templateName="$1" + local workspaceName="$2" + # 3. param is optional, set it to autofill prompts for coder params + local templateDir="${3-unset}" + local richParametersFile="${templateDir}/rich-parameters.yaml" + if [ -n "${templateDir+x}" ] && [ -f "$richParametersFile" ]; then + local doesRichParametersFileExist=1 + fi + coder create -t "$templateName" -y "$workspaceName" ${doesRichParametersFileExist:+--rich-parameter-file "$richParametersFile"} +} + +function removeAllOtherWorkspaces() { + local CODER_USER="$1" + local WORKSPACE_PREFIX="$2" + local IGNORED_WORKSPACE="$3" + WORKSPACES="$(getAllWorkspaces)" + for ws in $WORKSPACES; do + if [ "$ws" != "$CODER_USER/$IGNORED_WORKSPACE" ] && [[ "$ws" =~ ^"$CODER_USER/$WORKSPACE_PREFIX" ]]; then + echo "delete $ws" + if ! coder delete "$ws" -y; then + #do it twice as podman always throws an error at the first time + coder delete "$ws" -y + fi + fi + done +} + +function updateWorkspace() { + local coderUser="$1" + local workspaceName="$2" + local qualifiedWorkspaceName="$coderUser/$workspaceName" + if ! coder stop "$qualifiedWorkspaceName" -y; then + #do it twice as podman always throws an error at the first time + coder stop "$qualifiedWorkspaceName" -y + fi + coder update "$qualifiedWorkspaceName" +} + +function startTestWorkspace() { + local coderUser="$1" + local templateDir="$2" + local workspacePrefix="$3" + local templateName="$4" + local reuseTestWorkspace="$5" + + local newWorkspaceName + if [ "$reuseTestWorkspace" = false ]; then + newWorkspaceName="$(generateUniqueWorkspaceName "$workspacePrefix")" + # do that before deleting others, so that i don't need to wait + createNewWorkspace "$templateName" "$newWorkspaceName" "$templateDir" + # trim prefix as the name of the workspace can also get trimmed + removeAllOtherWorkspaces "$coderUser" "${workspacePrefix:0:22}" "$newWorkspaceName" + else + newWorkspaceName="$workspacePrefix" + if ! doesWorkspaceExist "$coderUser" "$newWorkspaceName"; then + createNewWorkspace "$templateName" "$newWorkspaceName" "$templateDir" + else + updateWorkspace "$coderUser" "$newWorkspaceName" + fi + fi +} + +function uploadToNexus() { + local fileToUpload="$1" + local fileNameNexus="${fileToUpload##*/}" + local templateName="$2" + local releaseVersion="$3" + local nexusUrl="${4:-https://ecosystem.cloudogu.com/nexus/repository/itz-bund/coder}" + set +x #disable command printing because of the password + curl --progress-bar -u "$(cat secrets/nexus-user):$(cat secrets/nexus-pw)" --upload-file "$fileToUpload" \ + "$nexusUrl/$templateName/$releaseVersion/$fileNameNexus" + set -x +} \ No newline at end of file diff --git a/build/make/coder.mk b/build/make/coder.mk new file mode 100644 index 0000000..07f4d43 --- /dev/null +++ b/build/make/coder.mk @@ -0,0 +1,159 @@ +SHELL := /bin/bash + +IMAGE_TAG?=${IMAGE_REGISTRY}/coder/coder-${TEMPLATE_NAME}:${VERSION} +REUSE_TEST_WORKSPACE?=false + +#BUILD_DIR given via variables.mk +TEMPLATE_DIR=${WORKDIR}/template +CONTAINER_BUILD_DIR=${WORKDIR}/container +SECRETS_DIR=${WORKDIR}/secrets +CODER_LIB_PATH=${BUILD_DIR}/make/coder-lib.sh + +RELEASE_DIR=${WORKDIR}/release +MAKE_CHANGE_TOKEN_DIR=${RELEASE_DIR}/make +CONTAINER_FILE?=${CONTAINER_BUILD_DIR}/Dockerfile +CONTAINER_IMAGE_CHANGE_TOKEN?=${MAKE_CHANGE_TOKEN_DIR}/${TEMPLATE_NAME}_image_id.txt +CONTAINER_IMAGE_TAR?=${RELEASE_DIR}/${TEMPLATE_NAME}.tar +CONTAINER_IMAGE_TARGZ?=${RELEASE_DIR}/${TEMPLATE_NAME}.tar.gz +CONTAINER_IMAGE_TRIVY_SCAN_JSON?=${RELEASE_DIR}/trivy.json +CONTAINER_IMAGE_TRIVY_SCAN_TABLE?=${RELEASE_DIR}/trivy.txt +CONTAINER_IMAGE_TRIVY_SCAN_CRITICAL_TABLE?=${RELEASE_DIR}/trivy_critical.txt +CONTAINER_IMAGE_TRIVY_SCAN_CRITICAL_JSON?=${RELEASE_DIR}/trivy_critical.json + +IMAGE_REGISTRY?=registry.cloudogu.com +IMAGE_REGISTRY_USER_FILE?=${SECRETS_DIR}/harbor-user +IMAGE_REGISTRY_PW_FILE?=${SECRETS_DIR}/harbor-pw + +CHANGELOG_FILE=${WORKDIR}/CHANGELOG.md +TEMPLATE_RELEASE_TAR_GZ=${RELEASE_DIR}/${TEMPLATE_NAME}-template.tar.gz + +TEST_WORKSPACE_PREFIX?=test-${TEMPLATE_NAME} +CODER_USER?=$(shell . ${CODER_LIB_PATH} && getCoderUser) + +CONTAINER_BIN?=$(shell . ${CODER_LIB_PATH} && getContainerBin) +GOPASS_BIN?=$(shell command -v gopass 2> /dev/null) + +EXCLUDED_TEMPLATE_FILES?=rich-parameters.yaml variables.yaml + + +##@ Coder template development + +${SECRETS_DIR}: + mkdir -p ${SECRETS_DIR} + +${IMAGE_REGISTRY_USER_FILE}: ${SECRETS_DIR} +ifeq ($(ENVIRONMENT), local) + @echo "Found developer environment. creating secret ${IMAGE_REGISTRY_USER_FILE}" + @${GOPASS_BIN} show ces/websites/registry.cloudogu.com/robot_coder_jenkins | tail -n 1 | sed -e "s/^username: //" > ${IMAGE_REGISTRY_USER_FILE}; +else + @echo "Found CI environment. Please create secrets yourself" +endif + +${IMAGE_REGISTRY_PW_FILE}: ${SECRETS_DIR} +ifeq ($(ENVIRONMENT), local) + @echo "Found developer environment. creating secret ${IMAGE_REGISTRY_PW_FILE}" + @${GOPASS_BIN} show ces/websites/registry.cloudogu.com/robot_coder_jenkins | head -n 1 > ${IMAGE_REGISTRY_PW_FILE}; +else + @echo "Found CI environment. Please create secrets yourself" +endif + +.PHONY: loadGopassSecrets +loadGopassSecrets: ${IMAGE_REGISTRY_USER_FILE} ${IMAGE_REGISTRY_PW_FILE} ${ADDITIONAL_SECRETS_TARGET} ## load secrets from gopass into secret files, so that the build process works locally + +.PHONY: imageRegistryLogin +imageRegistryLogin: loadGopassSecrets ${IMAGE_REGISTRY_USER_FILE} ${IMAGE_REGISTRY_PW_FILE} ## log in to the registry + @${CONTAINER_BIN} login -u "$$(cat ${IMAGE_REGISTRY_USER_FILE})" --password-stdin '${IMAGE_REGISTRY}' < ${IMAGE_REGISTRY_PW_FILE} + +.PHONY: imageRegistryLogout +imageRegistryLogout: ## log out of the registry + @${CONTAINER_BIN} logout '${IMAGE_REGISTRY}' + +.PHONY: buildImage +buildImage: buildImage-$(ENVIRONMENT) ## build the container image + +.PHONY: buildImage-local +buildImage-local: imageRegistryLogin ${CONTAINER_IMAGE_CHANGE_TOKEN} ## build the container image locally + @echo "if the build is not triggered without a change in the dockerfile, try to delete ${CONTAINER_IMAGE_CHANGE_TOKEN}" + +.PHONY: buildImage-ci +buildImage-ci: ${CONTAINER_IMAGE_CHANGE_TOKEN} ## build the container image without automatic secret management + +${CONTAINER_IMAGE_CHANGE_TOKEN}: ${CONTAINER_FILE} + @. ${CODER_LIB_PATH} && buildImage ${IMAGE_TAG} ${CONTAINER_BUILD_DIR} ${SECRETS_DIR} ${CONTAINER_BIN} + @mkdir -p ${MAKE_CHANGE_TOKEN_DIR} + @${CONTAINER_BIN} image ls --format="{{.ID}}" ${IMAGE_TAG} > ${CONTAINER_IMAGE_CHANGE_TOKEN} + +.PHONY: uploadTemplate +uploadTemplate: ## upload template to coder server + @. ${CODER_LIB_PATH} && uploadTemplate ${TEMPLATE_DIR} ${TEMPLATE_NAME} + +.PHONY: startTestWorkspace +startTestWorkspace: ## start a test workspace with coder + @. ${CODER_LIB_PATH} && startTestWorkspace ${CODER_USER} ${TEMPLATE_DIR} ${TEST_WORKSPACE_PREFIX} ${TEMPLATE_NAME} ${REUSE_TEST_WORKSPACE} + +.PHONY: createImageRelease +createImageRelease: ${CONTAINER_IMAGE_TARGZ} ## export the container image as a tar.gz + +${CONTAINER_IMAGE_TAR}: ${CONTAINER_IMAGE_CHANGE_TOKEN} + ${CONTAINER_BIN} save "${IMAGE_TAG}" -o ${CONTAINER_IMAGE_TAR} + +${CONTAINER_IMAGE_TARGZ}: ${CONTAINER_IMAGE_TAR} + gzip -f --keep "${CONTAINER_IMAGE_TAR}" + +.PHONY: trivyscanImage +trivyscanImage: ${CONTAINER_IMAGE_TRIVY_SCAN_JSON} ${CONTAINER_IMAGE_TRIVY_SCAN_TABLE} ${CONTAINER_IMAGE_TRIVY_SCAN_CRITICAL_TABLE} ${CONTAINER_IMAGE_TRIVY_SCAN_CRITICAL_JSON} ## do a trivy scan for the workspace image in various output formats + +${CONTAINER_IMAGE_TRIVY_SCAN_JSON}: ${CONTAINER_IMAGE_TAR} + ${CONTAINER_BIN} run --rm --pull=always \ + -v "trivy-cache:/root/.cache" \ + -v "${CONTAINER_IMAGE_TAR}:/tmp/image.tar" \ + aquasec/trivy -q \ + image --scanners vuln --input /tmp/image.tar -f json --timeout 15m \ + > ${CONTAINER_IMAGE_TRIVY_SCAN_JSON} + +${CONTAINER_IMAGE_TRIVY_SCAN_TABLE}: ${CONTAINER_IMAGE_TRIVY_SCAN_JSON} + @. ${CODER_LIB_PATH} && \ + doTrivyConvert "--format table" ${CONTAINER_IMAGE_TRIVY_SCAN_TABLE} ${CONTAINER_BIN} ${CONTAINER_IMAGE_TRIVY_SCAN_JSON} + +${CONTAINER_IMAGE_TRIVY_SCAN_CRITICAL_TABLE}: ${CONTAINER_IMAGE_TRIVY_SCAN_JSON} + @. ${CODER_LIB_PATH} && \ + doTrivyConvert "--format table --severity CRITICAL" ${CONTAINER_IMAGE_TRIVY_SCAN_CRITICAL_TABLE} ${CONTAINER_BIN} ${CONTAINER_IMAGE_TRIVY_SCAN_JSON} + +${CONTAINER_IMAGE_TRIVY_SCAN_CRITICAL_JSON}: ${CONTAINER_IMAGE_TRIVY_SCAN_JSON} + @. ${CODER_LIB_PATH} && \ + doTrivyConvert "--format json --severity CRITICAL" ${CONTAINER_IMAGE_TRIVY_SCAN_CRITICAL_JSON} ${CONTAINER_BIN} ${CONTAINER_IMAGE_TRIVY_SCAN_JSON} + +.PHONY: createTemplateRelease +createTemplateRelease: ## generate template.tar.gz with all files needed for customers + # remove release dir first as 'cp' cannot merge and will place the source dir inside the target dir if it already exists + rm -rf "${RELEASE_DIR}/${TEMPLATE_NAME}" + cp -r "${TEMPLATE_DIR}" "${RELEASE_DIR}/${TEMPLATE_NAME}/" + #copy changelog + cp "${CHANGELOG_FILE}" "${RELEASE_DIR}/${TEMPLATE_NAME}/" + # remove excludes + for file in "${EXCLUDED_TEMPLATE_FILES}"; do \ + rm -f "${RELEASE_DIR}/${TEMPLATE_NAME}/$$file"; \ + done + tar -czf "${RELEASE_DIR}/${TEMPLATE_NAME}-template.tar.gz" -C "${RELEASE_DIR}" "${TEMPLATE_NAME}" + +.PHONY: createRelease ## generate template- and container archives and the trivy scans +createRelease: createTemplateRelease ${CONTAINER_IMAGE_TARGZ} trivyscanImage ## create the image.tar.gz, template.tar.gz and trivy scans + +.PHONY: cleanCoderRelease +cleanCoderRelease: ## clean release directory + rm -rf "${RELEASE_DIR}" + mkdir -p "${RELEASE_DIR}" + +.PHONY: pushImage +pushImage: ## push the container image into the registry + ${CONTAINER_BIN} push ${IMAGE_TAG} + +.PHONY: uploadRelease +uploadRelease: createTemplateRelease ${CONTAINER_IMAGE_TARGZ} ${CONTAINER_IMAGE_TRIVY_SCAN_JSON} ${CONTAINER_IMAGE_TRIVY_SCAN_TABLE} ${CONTAINER_IMAGE_TRIVY_SCAN_CRITICAL_TABLE} ${CONTAINER_IMAGE_TRIVY_SCAN_CRITICAL_JSON} ## upload release artifacts to nexus + @. ${CODER_LIB_PATH} && uploadToNexus ${TEMPLATE_RELEASE_TAR_GZ} ${TEMPLATE_NAME} ${VERSION} + @. ${CODER_LIB_PATH} && uploadToNexus ${CONTAINER_IMAGE_TRIVY_SCAN_JSON} ${TEMPLATE_NAME} ${VERSION} + @. ${CODER_LIB_PATH} && uploadToNexus ${CONTAINER_IMAGE_TRIVY_SCAN_TABLE} ${TEMPLATE_NAME} ${VERSION} + @. ${CODER_LIB_PATH} && uploadToNexus ${CONTAINER_IMAGE_TRIVY_SCAN_CRITICAL_TABLE} ${TEMPLATE_NAME} ${VERSION} + @. ${CODER_LIB_PATH} && uploadToNexus ${CONTAINER_IMAGE_TRIVY_SCAN_CRITICAL_JSON} ${TEMPLATE_NAME} ${VERSION} + @. ${CODER_LIB_PATH} && uploadToNexus ${CONTAINER_IMAGE_TARGZ} ${TEMPLATE_NAME} ${VERSION} + diff --git a/build/make/dependencies-gomod.mk b/build/make/dependencies-gomod.mk new file mode 100644 index 0000000..3b3b989 --- /dev/null +++ b/build/make/dependencies-gomod.mk @@ -0,0 +1,8 @@ +##@ Go mod dependency management + +.PHONY: dependencies +dependencies: vendor ## Install dependencies using go mod + +vendor: go.mod go.sum + @echo "Installing dependencies using go modules..." + ${GO_CALL} mod vendor diff --git a/build/make/deploy-debian.mk b/build/make/deploy-debian.mk new file mode 100644 index 0000000..89b0fbe --- /dev/null +++ b/build/make/deploy-debian.mk @@ -0,0 +1,65 @@ +##@ Debian package deployment + +# This Makefile holds all targets for deploying and undeploying +# Uses the variable APT_REPO to determine which apt repos should be used to deploy + +# Attention: This Makefile depends on package-debian.mk! + +.PHONY: deploy-check +deploy-check: + @case X"${VERSION}" in *-SNAPSHOT) echo "i will not upload a snaphot version for you" ; exit 1; esac; + @if [ X"${APT_API_USERNAME}" = X"" ] ; then echo "supply an APT_API_USERNAME environment variable"; exit 1; fi; + @if [ X"${APT_API_PASSWORD}" = X"" ] ; then echo "supply an APT_API_PASSWORD environment variable"; exit 1; fi; + @if [ X"${APT_API_SIGNPHRASE}" = X"" ] ; then echo "supply an APT_API_SIGNPHRASE environment variable"; exit 1; fi; + +.PHONY: upload-package +upload-package: deploy-check $(DEBIAN_PACKAGE) + @echo "... uploading package" + @$(APTLY) -F file=@"${DEBIAN_PACKAGE}" "${APT_API_BASE_URL}/files/$$(basename ${DEBIAN_PACKAGE})" + +.PHONY: add-package-to-repo +add-package-to-repo: upload-package +ifeq ($(APT_REPO), ces-premium) + @echo "... add package to ces-premium repository" + @$(APTLY) -X POST "${APT_API_BASE_URL}/repos/ces-premium/file/$$(basename ${DEBIAN_PACKAGE})" +else + @echo "\n... add package to ces repository" + @$(APTLY) -X POST "${APT_API_BASE_URL}/repos/ces/file/$$(basename ${DEBIAN_PACKAGE})" +endif + +define aptly_publish + $(APTLY) -X PUT -H "Content-Type: application/json" --data '{"Signing": { "Batch": true, "Passphrase": "${APT_API_SIGNPHRASE}"}}' ${APT_API_BASE_URL}/publish/$(1)/$(2) +endef + +.PHONY: publish +publish: + @echo "\n... publish packages" +ifeq ($(APT_REPO), ces-premium) + @$(call aptly_publish,ces-premium,bionic) +else + @$(call aptly_publish,ces,focal) + @$(call aptly_publish,ces,bionic) +endif + +.PHONY: deploy +deploy: add-package-to-repo publish ## Deploy package to apt repository + +define aptly_undeploy + PREF=$$(${APTLY} "${APT_API_BASE_URL}/repos/$(1)/packages?q=${ARTIFACT_ID}%20(${VERSION})"); \ + ${APTLY} -X DELETE -H 'Content-Type: application/json' --data "{\"PackageRefs\": $${PREF}}" ${APT_API_BASE_URL}/repos/$(1)/packages +endef + +.PHONY: remove-package-from-repo +remove-package-from-repo: +ifeq ($(APT_REPO), ces-premium) + @$(call aptly_undeploy,ces-premium) +else + @$(call aptly_undeploy,ces) +endif + +.PHONY: undeploy +undeploy: deploy-check remove-package-from-repo publish ## Undeploy package from apt repository + +.PHONE: lint-deb-package +lint-deb-package: debian ## Lint debian package + @lintian -i $(DEBIAN_PACKAGE) diff --git a/build/make/digital-signature.mk b/build/make/digital-signature.mk new file mode 100644 index 0000000..c0eba35 --- /dev/null +++ b/build/make/digital-signature.mk @@ -0,0 +1,24 @@ +##@ Digital signatures + +CHECKSUM=$(TARGET_DIR)/$(ARTIFACT_ID).sha256sum + +.PHONY: checksum +checksum: $(CHECKSUM) ## Generate checksums +# we have to depend on target dir, because we want to rebuild the checksum +# if one of the artefacts was changed +$(CHECKSUM): $(TARGET_DIR) + @echo "Generating Checksums" + @cd $(TARGET_DIR); find . -maxdepth 1 -not -type d | egrep -v ".(sha256sum|asc)$$" | xargs shasum -a 256 > $$(basename $@) + +SIGNATURE=$(CHECKSUM).asc + +.PHONY: signature +signature: $(SIGNATURE) ## Generate signature +$(SIGNATURE): $(CHECKSUM) + @echo "Generating Signature" + @gpg --batch --yes --detach-sign --armor -o $@ $< + +.PHONY: signature-ci +signature-ci: $(CHECKSUM) + @echo "Generating Signature" + @gpg2 --batch --pinentry-mode loopback --passphrase="${passphrase}" --yes --detach-sign --armor -o ${SIGNATURE} $< diff --git a/build/make/k8s-component.mk b/build/make/k8s-component.mk new file mode 100644 index 0000000..6c1f6c4 --- /dev/null +++ b/build/make/k8s-component.mk @@ -0,0 +1,155 @@ +COMPONENT_DEV_VERSION?=${VERSION}-dev + +include ${BUILD_DIR}/make/k8s.mk + +ifeq (${RUNTIME_ENV}, local) + BINARY_HELM_ADDITIONAL_PUSH_ARGS?=--plain-http +endif +BINARY_HELM_ADDITIONAL_PACK_ARGS?= +BINARY_HELM_ADDITIONAL_UNINST_ARGS?= +BINARY_HELM_ADDITIONAL_UPGR_ARGS?= + +HELM_TARGET_DIR ?= $(K8S_RESOURCE_TEMP_FOLDER)/helm +HELM_SOURCE_DIR ?= k8s/helm +HELM_RELEASE_TGZ=${HELM_TARGET_DIR}/${ARTIFACT_ID}-${VERSION}.tgz +HELM_DEV_RELEASE_TGZ=${HELM_TARGET_DIR}/${ARTIFACT_ID}-${COMPONENT_DEV_VERSION}.tgz +HELM_ARTIFACT_NAMESPACE?=k8s +ifeq (${RUNTIME_ENV}, remote) + HELM_ARTIFACT_NAMESPACE?=testing/k8s +endif + +K8S_RESOURCE_COMPONENT ?= "${K8S_RESOURCE_TEMP_FOLDER}/component-${ARTIFACT_ID}-${VERSION}.yaml" +K8S_RESOURCE_COMPONENT_CR_TEMPLATE_YAML ?= $(BUILD_DIR)/make/k8s-component.tpl +# HELM_PRE_GENERATE_TARGETS allows to execute targets that affect Helm source files AND Helm target files. +HELM_PRE_GENERATE_TARGETS ?= +# HELM_POST_GENERATE_TARGETS allows to execute targets that only affect Helm target files. +HELM_POST_GENERATE_TARGETS ?= +HELM_PRE_APPLY_TARGETS ?= +COMPONENT_PRE_APPLY_TARGETS ?= + +# This can be used by components with own images to build and push to the dev registry. +# These components should override this variable with `image-import`. +IMAGE_IMPORT_TARGET?= + +##@ K8s - Helm general +.PHONY: helm-init-chart +helm-init-chart: ${BINARY_HELM} ## Creates a Chart.yaml-template with zero values + @echo "Initialize ${HELM_SOURCE_DIR}/Chart.yaml..." + @mkdir -p ${HELM_SOURCE_DIR}/tmp/ + @${BINARY_HELM} create ${HELM_SOURCE_DIR}/tmp/${ARTIFACT_ID} + @cp ${HELM_SOURCE_DIR}/tmp/${ARTIFACT_ID}/Chart.yaml ${HELM_SOURCE_DIR}/ + @rm -dr ${HELM_SOURCE_DIR}/tmp + @sed -i 's/appVersion: ".*"/appVersion: "0.0.0-replaceme"/' ${HELM_SOURCE_DIR}/Chart.yaml + @sed -i 's/version: .*/version: 0.0.0-replaceme/' ${HELM_SOURCE_DIR}/Chart.yaml + +.PHONY: helm-generate +helm-generate: ${HELM_TARGET_DIR}/Chart.yaml ${HELM_POST_GENERATE_TARGETS} ## Generates the final helm chart. + +# this is phony because of it is easier this way than the makefile-single-run way +.PHONY: ${HELM_TARGET_DIR}/Chart.yaml +${HELM_TARGET_DIR}/Chart.yaml: $(K8S_RESOURCE_TEMP_FOLDER) validate-chart ${HELM_PRE_GENERATE_TARGETS} copy-helm-files + @echo "Generate Helm chart..." + @if [[ ${STAGE} == "development" ]]; then \ + sed -i 's/appVersion: "0.0.0-replaceme"/appVersion: '$(COMPONENT_DEV_VERSION)'/' ${HELM_TARGET_DIR}/Chart.yaml; \ + sed -i 's/version: 0.0.0-replaceme/version: '$(COMPONENT_DEV_VERSION)'/' ${HELM_TARGET_DIR}/Chart.yaml; \ + else \ + sed -i 's/appVersion: "0.0.0-replaceme"/appVersion: "${VERSION}"/' ${HELM_TARGET_DIR}/Chart.yaml; \ + sed -i 's/version: 0.0.0-replaceme/version: ${VERSION}/' ${HELM_TARGET_DIR}/Chart.yaml; \ + fi + +.PHONY: copy-helm-files +copy-helm-files: + @echo "Copying Helm files..." + @rm -drf ${HELM_TARGET_DIR} # delete folder, so the chart is newly created. + @mkdir -p ${HELM_TARGET_DIR}/templates + @cp -r ${HELM_SOURCE_DIR}/** ${HELM_TARGET_DIR} + +.PHONY: validate-chart +validate-chart: + @if [ ! -f ${HELM_SOURCE_DIR}/Chart.yaml ] ; then \ + echo "Could not find source Helm chart under \$${HELM_SOURCE_DIR}/Chart.yaml" ; \ + exit 22 ; \ + fi + +.PHONY: helm-update-dependencies +helm-update-dependencies: ${BINARY_HELM} ## Update Helm chart dependencies + @$(BINARY_HELM) dependency update "${HELM_SOURCE_DIR}" + +##@ K8s - Helm dev targets + +.PHONY: helm-apply +helm-apply: ${BINARY_HELM} check-k8s-namespace-env-var ${IMAGE_IMPORT_TARGET} helm-generate ${HELM_PRE_APPLY_TARGETS} ## Generates and installs the Helm chart. + @echo "Apply generated helm chart" + @${BINARY_HELM} --kube-context="${KUBE_CONTEXT_NAME}" upgrade -i ${ARTIFACT_ID} ${HELM_TARGET_DIR} ${BINARY_HELM_ADDITIONAL_UPGR_ARGS} --namespace ${NAMESPACE} + +.PHONY: helm-delete +helm-delete: ${BINARY_HELM} check-k8s-namespace-env-var ## Uninstalls the current Helm chart. + @echo "Uninstall helm chart" + @${BINARY_HELM} --kube-context="${KUBE_CONTEXT_NAME}" uninstall ${ARTIFACT_ID} --namespace=${NAMESPACE} ${BINARY_HELM_ADDITIONAL_UNINST_ARGS} || true + +.PHONY: helm-reinstall +helm-reinstall: helm-delete helm-apply ## Uninstalls the current helm chart and reinstalls it. + +.PHONY: helm-chart-import +helm-chart-import: ${CHECK_VAR_TARGETS} helm-generate helm-package ${IMAGE_IMPORT_TARGET} ## Imports the currently available chart into the cluster-local registry. + @if [[ ${STAGE} == "development" ]]; then \ + echo "Import ${HELM_DEV_RELEASE_TGZ} into K8s cluster ${CES_REGISTRY_HOST}..."; \ + ${BINARY_HELM} push ${HELM_DEV_RELEASE_TGZ} oci://${CES_REGISTRY_HOST}/${HELM_ARTIFACT_NAMESPACE} ${BINARY_HELM_ADDITIONAL_PUSH_ARGS}; \ + else \ + echo "Import ${HELM_RELEASE_TGZ} into K8s cluster ${CES_REGISTRY_HOST}..."; \ + ${BINARY_HELM} push ${HELM_RELEASE_TGZ} oci://${CES_REGISTRY_HOST}/${HELM_ARTIFACT_NAMESPACE} ${BINARY_HELM_ADDITIONAL_PUSH_ARGS}; \ + fi + @echo "Done." + +##@ K8s - Helm release targets + +.PHONY: helm-generate-release +helm-generate-release: update-urls ## Generates the final helm chart with release URLs. + + +.PHONY: helm-package +helm-package: helm-delete-existing-tgz ${HELM_RELEASE_TGZ} ## Generates and packages the helm chart with release URLs. + +${HELM_RELEASE_TGZ}: ${BINARY_HELM} ${HELM_TARGET_DIR}/Chart.yaml ${HELM_POST_GENERATE_TARGETS} ## Generates and packages the helm chart with release URLs. + @echo "Package generated helm chart" + @if [[ ${STAGE} == "development" ]]; then \ + echo "WARNING: You are using a development environment" ; \ + fi + @${BINARY_HELM} package ${HELM_TARGET_DIR} -d ${HELM_TARGET_DIR} ${BINARY_HELM_ADDITIONAL_PACK_ARGS} + +.PHONY: helm-delete-existing-tgz +helm-delete-existing-tgz: ## Remove an existing Helm package from the target directory. + @echo "Delete ${HELM_RELEASE_TGZ}*" + @rm -f ${HELM_TARGET_DIR}/${ARTIFACT_ID}-*.tgz + +##@ K8s - Helm lint targets + +.PHONY: helm-lint +helm-lint: $(BINARY_HELM) helm-generate + @$(BINARY_HELM) lint "${HELM_TARGET_DIR}" + +##@ K8s - Component dev targets + +.PHONY: component-generate +component-generate: ${K8S_RESOURCE_COMPONENT_CR_TEMPLATE_YAML} ${COMPONENT_POST_GENERATE_TARGETS} ## Generate the component yaml resource. + +${K8S_RESOURCE_COMPONENT_CR_TEMPLATE_YAML}: ${K8S_RESOURCE_TEMP_FOLDER} + @echo "Generating temporary K8s component resource: ${K8S_RESOURCE_COMPONENT}" + @if [[ ${STAGE} == "development" ]]; then \ + sed "s|NAMESPACE|$(HELM_ARTIFACT_NAMESPACE)|g" "${K8S_RESOURCE_COMPONENT_CR_TEMPLATE_YAML}" | sed "s|NAME|$(ARTIFACT_ID)|g" | sed "s|VERSION|$(COMPONENT_DEV_VERSION)|g" > "${K8S_RESOURCE_COMPONENT}"; \ + else \ + sed "s|NAMESPACE|$(HELM_ARTIFACT_NAMESPACE)|g" "${K8S_RESOURCE_COMPONENT_CR_TEMPLATE_YAML}" | sed "s|NAME|$(ARTIFACT_ID)|g" | sed "s|VERSION|$(VERSION)|g" > "${K8S_RESOURCE_COMPONENT}"; \ + fi + +.PHONY: component-apply +component-apply: check-k8s-namespace-env-var ${COMPONENT_PRE_APPLY_TARGETS} ${IMAGE_IMPORT_TARGET} helm-generate helm-chart-import component-generate ## Applies the component yaml resource to the actual defined context. + @kubectl apply -f "${K8S_RESOURCE_COMPONENT}" --namespace="${NAMESPACE}" --context="${KUBE_CONTEXT_NAME}" + @echo "Done." + +.PHONY: component-delete +component-delete: check-k8s-namespace-env-var component-generate $(K8S_POST_GENERATE_TARGETS) ## Deletes the component yaml resource from the actual defined context. + @kubectl delete -f "${K8S_RESOURCE_COMPONENT}" --namespace="${NAMESPACE}" --context="${KUBE_CONTEXT_NAME}" || true + @echo "Done." + +.PHONY: component-reinstall +component-reinstall: component-delete component-apply ## Reinstalls the component yaml resource from the actual defined context. diff --git a/build/make/k8s-component.tpl b/build/make/k8s-component.tpl new file mode 100644 index 0000000..fa0eaa6 --- /dev/null +++ b/build/make/k8s-component.tpl @@ -0,0 +1,13 @@ +# Use the property .spec.deployNamespace to define the namespace the component should be deployed to. +# Make environment variable 'COMPONENT_DEPLOY_NAMESPACE' is responsible for that. +# If 'COMPONENT_DEPLOY_NAMESPACE' is empty the property 'deployNamespace' will be deleted. +apiVersion: k8s.cloudogu.com/v1 +kind: Component +metadata: + name: NAME + labels: + app: ces +spec: + name: NAME + namespace: NAMESPACE + version: VERSION \ No newline at end of file diff --git a/build/make/k8s-controller.mk b/build/make/k8s-controller.mk new file mode 100644 index 0000000..ea3d457 --- /dev/null +++ b/build/make/k8s-controller.mk @@ -0,0 +1,56 @@ +# This script requires the k8s.mk script +include ${BUILD_DIR}/make/k8s-component.mk +include ${BUILD_DIR}/make/k8s-crd.mk + +## Variables + +# make sure to create a statically linked binary otherwise it may quit with +# "exec user process caused: no such file or directory" +GO_BUILD_FLAGS=-mod=vendor -a -tags netgo,osusergo $(LDFLAGS) -o $(BINARY) + +# remove DWARF symbol table and strip other symbols to shave ~13 MB from binary +ADDITIONAL_LDFLAGS=-extldflags -static -w -s + +# ENVTEST_K8S_VERSION refers to the version of kubebuilder assets to be downloaded by envtest binary. +ENVTEST_K8S_VERSION = 1.23 +K8S_INTEGRATION_TEST_DIR=${TARGET_DIR}/k8s-integration-test + +##@ K8s - EcoSystem + +.PHONY: build +build: helm-apply ## Builds a new version of the dogu and deploys it into the K8s-EcoSystem. + +##@ Release + +.PHONY: controller-release +controller-release: ## Interactively starts the release workflow. + @echo "Starting git flow release..." + @build/make/release.sh controller-tool + +##@ K8s - Development + +.PHONY: build-controller +build-controller: ${SRC} compile ## Builds the controller Go binary. + +# Allows to perform tasks before locally running the controller +K8S_RUN_PRE_TARGETS ?= +.PHONY: run +run: generate-deepcopy $(K8S_RUN_PRE_TARGETS) ## Run a controller from your host. + go run -ldflags "-X main.Version=$(VERSION)" ./main.go + +##@ K8s - Integration test with envtest + +$(K8S_INTEGRATION_TEST_DIR): + @mkdir -p $@ + +.PHONY: k8s-integration-test +k8s-integration-test: $(K8S_INTEGRATION_TEST_DIR) ${ENVTEST} ## Run k8s integration tests. + @echo "Running K8s integration tests..." + @KUBEBUILDER_ASSETS="$(shell $(ENVTEST) use $(ENVTEST_K8S_VERSION) -p path)" go test -tags=k8s_integration ./... -coverprofile ${K8S_INTEGRATION_TEST_DIR}/report-k8s-integration.out + +##@ Controller specific targets + +.PHONY: generate-deepcopy +generate-deepcopy: ${CONTROLLER_GEN} ## Generate code containing DeepCopy* method implementations. + @echo "Auto-generate deepcopy functions..." + @$(CONTROLLER_GEN) object:headerFile="hack/boilerplate.go.txt" paths="./..." diff --git a/build/make/k8s-crd.mk b/build/make/k8s-crd.mk new file mode 100644 index 0000000..090b029 --- /dev/null +++ b/build/make/k8s-crd.mk @@ -0,0 +1,115 @@ +ARTIFACT_CRD_ID = $(ARTIFACT_ID)-crd +DEV_CRD_VERSION ?= ${VERSION}-dev +HELM_CRD_SOURCE_DIR ?= ${WORKDIR}/k8s/helm-crd +HELM_CRD_TARGET_DIR ?= $(K8S_RESOURCE_TEMP_FOLDER)/helm-crd +HELM_CRD_RELEASE_TGZ = ${HELM_CRD_TARGET_DIR}/${ARTIFACT_CRD_ID}-${VERSION}.tgz +HELM_CRD_DEV_RELEASE_TGZ = ${HELM_CRD_TARGET_DIR}/${ARTIFACT_CRD_ID}-${DEV_CRD_VERSION}.tgz + +K8S_RESOURCE_CRD_COMPONENT ?= "${K8S_RESOURCE_TEMP_FOLDER}/component-${ARTIFACT_CRD_ID}-${VERSION}.yaml" +K8S_RESOURCE_COMPONENT_CR_TEMPLATE_YAML ?= $(BUILD_DIR)/make/k8s-component.tpl +# CRD_POST_MANIFEST_TARGETS can be used to post-process CRD YAMLs after their creation. +CRD_POST_MANIFEST_TARGETS ?= crd-add-labels + +# This can be used by external components to prevent generate and copy controller manifests by overriding with an empty value. +CRD_HELM_MANIFEST_TARGET?=manifests + +##@ K8s - CRD targets + +.PHONY: manifests +manifests: ${CONTROLLER_GEN} manifests-run ${CRD_POST_MANIFEST_TARGETS} ## Generate CustomResourceDefinition YAMLs. + +.PHONY: manifests-run +manifests-run: + @echo "Generate manifests..." + @$(CONTROLLER_GEN) crd paths="./..." output:crd:artifacts:config=${HELM_CRD_SOURCE_DIR}/templates + +.PHONY: crd-add-labels +crd-add-labels: $(BINARY_YQ) + @echo "Adding labels to CRD..." + @for file in ${HELM_CRD_SOURCE_DIR}/templates/*.yaml ; do \ + $(BINARY_YQ) -i e ".metadata.labels.app = \"ces\"" $${file} ;\ + $(BINARY_YQ) -i e ".metadata.labels.\"app.kubernetes.io/name\" = \"${ARTIFACT_ID}\"" $${file} ;\ + done + +.PHONY: crd-helm-generate ## Generates the Helm CRD chart +crd-helm-generate: ${CRD_HELM_MANIFEST_TARGET} validate-crd-chart ${HELM_CRD_TARGET_DIR}/Chart.yaml ${K8S_POST_CRD_HELM_GENERATE_TARGETS} + +# this is phony because of it is easier this way than the makefile-single-run way +.PHONY: ${HELM_CRD_TARGET_DIR}/Chart.yaml +${HELM_CRD_TARGET_DIR}/Chart.yaml: ${K8S_RESOURCE_TEMP_FOLDER} + @echo "Copying Helm CRD files..." + @rm -drf ${HELM_CRD_TARGET_DIR}/templates + @mkdir -p ${HELM_CRD_TARGET_DIR}/templates + @cp -r ${HELM_CRD_SOURCE_DIR}/** ${HELM_CRD_TARGET_DIR} + + @echo "Generate Helm CRD chart..." + @sed -i 's/name: artifact-crd-replaceme/name: ${ARTIFACT_CRD_ID}/' ${HELM_CRD_TARGET_DIR}/Chart.yaml + @if [[ ${STAGE} == "development" ]]; then \ + sed -i 's/appVersion: "0.0.0-replaceme"/appVersion: "${DEV_CRD_VERSION}"/' ${HELM_CRD_TARGET_DIR}/Chart.yaml; \ + sed -i 's/version: 0.0.0-replaceme/version: ${DEV_CRD_VERSION}/' ${HELM_CRD_TARGET_DIR}/Chart.yaml; \ + else \ + sed -i 's/appVersion: "0.0.0-replaceme"/appVersion: "${VERSION}"/' ${HELM_CRD_TARGET_DIR}/Chart.yaml; \ + sed -i 's/version: 0.0.0-replaceme/version: ${VERSION}/' ${HELM_CRD_TARGET_DIR}/Chart.yaml; \ + fi + +.PHONY: validate-crd-chart +validate-crd-chart: + @if [ ! -f ${HELM_CRD_SOURCE_DIR}/Chart.yaml ] ; then \ + echo "Could not find CRD source Helm chart under \$${HELM_CRD_SOURCE_DIR}/Chart.yaml" ; \ + exit 23 ; \ + fi + +.PHONY: crd-helm-apply +crd-helm-apply: ${BINARY_HELM} check-k8s-namespace-env-var crd-helm-generate ## Generates and installs the Helm CRD chart. + @echo "Apply generated Helm CRD chart" + @${BINARY_HELM} --kube-context="${KUBE_CONTEXT_NAME}" upgrade -i ${ARTIFACT_CRD_ID} ${HELM_CRD_TARGET_DIR} ${BINARY_HELM_ADDITIONAL_UPGR_ARGS} --namespace ${NAMESPACE} + +.PHONY: crd-helm-delete +crd-helm-delete: ${BINARY_HELM} check-k8s-namespace-env-var ## Uninstalls the current Helm CRD chart. + @echo "Uninstall Helm CRD chart" + @${BINARY_HELM} --kube-context="${KUBE_CONTEXT_NAME}" uninstall ${ARTIFACT_CRD_ID} --namespace=${NAMESPACE} ${BINARY_HELM_ADDITIONAL_UNINST_ARGS} || true + +.PHONY: crd-helm-package +crd-helm-package: crd-helm-delete-existing-tgz ${HELM_CRD_RELEASE_TGZ} ## Generates and packages the Helm CRD chart. + +.PHONY: crd-helm-delete-existing-tgz +crd-helm-delete-existing-tgz: ## Remove an existing Helm CRD package. + @rm -f ${HELM_CRD_TARGET_DIR}/${ARTIFACT_CRD_ID}-*.tgz + +${HELM_CRD_RELEASE_TGZ}: ${BINARY_HELM} crd-helm-generate ## Generates and packages the Helm CRD chart. + @echo "Package generated helm crd-chart" + @${BINARY_HELM} package ${HELM_CRD_TARGET_DIR} -d ${HELM_CRD_TARGET_DIR} ${BINARY_HELM_ADDITIONAL_PACK_ARGS} + +.PHONY: crd-helm-chart-import +crd-helm-chart-import: ${CHECK_VAR_TARGETS} check-k8s-artifact-id crd-helm-generate crd-helm-package ## Imports the currently available Helm CRD chart into the cluster-local registry. + @if [[ ${STAGE} == "development" ]]; then \ + echo "Import ${HELM_CRD_DEV_RELEASE_TGZ} into K8s cluster ${CES_REGISTRY_HOST}..."; \ + ${BINARY_HELM} push ${HELM_CRD_DEV_RELEASE_TGZ} oci://${CES_REGISTRY_HOST}/${HELM_ARTIFACT_NAMESPACE} ${BINARY_HELM_ADDITIONAL_PUSH_ARGS}; \ + else \ + echo "Import ${HELM_CRD_RELEASE_TGZ} into K8s cluster ${CES_REGISTRY_HOST}..."; \ + ${BINARY_HELM} push ${HELM_CRD_RELEASE_TGZ} oci://${CES_REGISTRY_HOST}/${HELM_ARTIFACT_NAMESPACE} ${BINARY_HELM_ADDITIONAL_PUSH_ARGS}; \ + fi + @echo "Done." + +.PHONY: crd-helm-lint +crd-helm-lint: $(BINARY_HELM) crd-helm-generate + @$(BINARY_HELM) lint "${HELM_CRD_TARGET_DIR}" + +.PHONY: crd-component-generate +crd-component-generate: ${K8S_RESOURCE_TEMP_FOLDER} ## Generate the CRD component YAML resource. + @echo "Generating temporary K8s crd-component resource: ${K8S_RESOURCE_CRD_COMPONENT}" + @if [[ ${STAGE} == "development" ]]; then \ + sed "s|NAMESPACE|$(HELM_ARTIFACT_NAMESPACE)|g" "${K8S_RESOURCE_COMPONENT_CR_TEMPLATE_YAML}" | sed "s|NAME|$(ARTIFACT_CRD_ID)|g" | sed "s|VERSION|$(DEV_CRD_VERSION)|g" > "${K8S_RESOURCE_CRD_COMPONENT}"; \ + else \ + sed "s|NAMESPACE|$(HELM_ARTIFACT_NAMESPACE)|g" "${K8S_RESOURCE_COMPONENT_CR_TEMPLATE_YAML}" | sed "s|NAME|$(ARTIFACT_CRD_ID)|g" | sed "s|VERSION|$(VERSION)|g" > "${K8S_RESOURCE_CRD_COMPONENT}"; \ + fi + +.PHONY: crd-component-apply +crd-component-apply: check-k8s-namespace-env-var crd-helm-chart-import crd-component-generate ## Applies the CRD component YAML resource to the actual defined context. + @kubectl apply -f "${K8S_RESOURCE_CRD_COMPONENT}" --namespace="${NAMESPACE}" --context="${KUBE_CONTEXT_NAME}" + @echo "Done." + +.PHONY: crd-component-delete +crd-component-delete: check-k8s-namespace-env-var crd-component-generate ## Deletes the CRD component YAML resource from the actual defined context. + @kubectl delete -f "${K8S_RESOURCE_CRD_COMPONENT}" --namespace="${NAMESPACE}" --context="${KUBE_CONTEXT_NAME}" || true + @echo "Done." diff --git a/build/make/k8s-dogu.mk b/build/make/k8s-dogu.mk new file mode 100644 index 0000000..e656b17 --- /dev/null +++ b/build/make/k8s-dogu.mk @@ -0,0 +1,47 @@ +# Variables +# Path to the dogu json of the dogu +DOGU_JSON_FILE=${WORKDIR}/dogu.json +DOGU_JSON_DEV_FILE=${WORKDIR}/${TARGET_DIR}/dogu.json +# Name of the dogu is extracted from the dogu.json +ARTIFACT_ID=$(shell $(BINARY_YQ) -oy -e ".Name" $(DOGU_JSON_FILE) | sed "s|.*/||g") +# Namespace of the dogu is extracted from the dogu.json +ARTIFACT_NAMESPACE=$(shell $(BINARY_YQ) -oy -e ".Name" $(DOGU_JSON_FILE) | sed "s|/.*||g") +# Version of the dogu is extracted from the dogu.json +VERSION=$(shell $(BINARY_YQ) -oy -e ".Version" $(DOGU_JSON_FILE)) +# Image of the dogu is extracted from the dogu.json +IMAGE=$(shell $(BINARY_YQ) -oy -e ".Image" $(DOGU_JSON_FILE)):$(VERSION) + +include $(BUILD_DIR)/make/k8s.mk + +##@ K8s - EcoSystem + +.PHONY: build +build: image-import install-dogu-descriptor create-dogu-resource apply-dogu-resource ## Builds a new version of the dogu and deploys it into the K8s-EcoSystem. + +##@ K8s - Dogu - Resource + +# The additional k8s yaml files +K8S_RESOURCE_PRODUCTIVE_FOLDER ?= $(WORKDIR)/k8s +K8S_RESOURCE_PRODUCTIVE_YAML ?= $(K8S_RESOURCE_PRODUCTIVE_FOLDER)/$(ARTIFACT_ID).yaml +K8S_RESOURCE_DOGU_CR_TEMPLATE_YAML ?= $(BUILD_DIR)/make/k8s-dogu.tpl +K8S_RESOURCE_DOGU ?= $(K8S_RESOURCE_TEMP_FOLDER)/$(ARTIFACT_ID).yaml +# The pre generation script creates a k8s resource yaml containing the dogu crd and the content from the k8s folder. +.PHONY: create-dogu-resource +create-dogu-resource: ${BINARY_YQ} $(K8S_RESOURCE_TEMP_FOLDER) + @echo "Generating temporary K8s resources $(K8S_RESOURCE_DOGU)..." + @rm -f $(K8S_RESOURCE_DOGU) + @sed "s|NAMESPACE|$(ARTIFACT_NAMESPACE)|g" $(K8S_RESOURCE_DOGU_CR_TEMPLATE_YAML) | sed "s|NAME|$(ARTIFACT_ID)|g" | sed "s|VERSION|$(VERSION)|g" >> $(K8S_RESOURCE_DOGU) + @echo "Done." + +.PHONY: apply-dogu-resource +apply-dogu-resource: + @kubectl --context="${KUBE_CONTEXT_NAME}" --namespace=${NAMESPACE} apply -f "$(K8S_RESOURCE_DOGU)" + +##@ K8s - Dogu + +.PHONY: install-dogu-descriptor +install-dogu-descriptor: ${BINARY_YQ} $(TARGET_DIR) ## Installs a configmap with current dogu.json into the cluster. + @echo "Generate configmap from dogu.json..." + @$(BINARY_YQ) -oj ".Image=\"${IMAGE_DEV}\" | .Version=\"${VERSION}\"" ${DOGU_JSON_FILE} > ${DOGU_JSON_DEV_FILE} + @kubectl --context="${KUBE_CONTEXT_NAME}" create configmap "$(ARTIFACT_ID)-descriptor" --from-file=$(DOGU_JSON_DEV_FILE) --dry-run=client -o yaml | kubectl --context="${KUBE_CONTEXT_NAME}" --namespace=${NAMESPACE} apply -f - + @echo "Done." diff --git a/build/make/k8s-dogu.tpl b/build/make/k8s-dogu.tpl new file mode 100644 index 0000000..91e2bb2 --- /dev/null +++ b/build/make/k8s-dogu.tpl @@ -0,0 +1,9 @@ +apiVersion: k8s.cloudogu.com/v2 +kind: Dogu +metadata: + name: NAME + labels: + app: ces +spec: + name: NAMESPACE/NAME + version: VERSION \ No newline at end of file diff --git a/build/make/k8s.mk b/build/make/k8s.mk new file mode 100644 index 0000000..2b79315 --- /dev/null +++ b/build/make/k8s.mk @@ -0,0 +1,205 @@ +# This file is optional and can be used to set personal information without committing them to the repository. +MY_ENV_FILE ?= $(WORKDIR)/.env +ifneq (,$(wildcard $(MY_ENV_FILE))) + include .env +endif + +## Variables + +BINARY_YQ = $(UTILITY_BIN_PATH)/yq +BINARY_YQ_4_VERSION?=v4.40.3 +BINARY_HELM = $(UTILITY_BIN_PATH)/helm +BINARY_HELM_VERSION?=v3.13.0 +CONTROLLER_GEN = $(UTILITY_BIN_PATH)/controller-gen +CONTROLLER_GEN_VERSION?=v0.14.0 + +# Setting SHELL to bash allows bash commands to be executed by recipes. +# Options are set to exit when a recipe line exits non-zero or a piped command fails. +SHELL = /usr/bin/env bash -o pipefail +.SHELLFLAGS = -ec + +# The productive tag of the image +IMAGE ?= + +# Set production as default stage. Use "development" as stage in your .env file to generate artifacts +# with development images pointing to CES_REGISTRY_URL_PREFIX. +STAGE?=production + +# Set the "local" as runtime-environment, to push images to the container-registry of the local cluster and to apply resources to the local cluster. +# Use "remote" as runtime-environment in your .env file to push images to the container-registry at "registry.cloudogu.com/testing" and to apply resources to the configured kubernetes-context in KUBE_CONTEXT_NAME. +RUNTIME_ENV?=local +$(info RUNTIME_ENV=$(RUNTIME_ENV)) + +# The host and port of the local cluster +K3S_CLUSTER_FQDN?=k3ces.local +K3S_LOCAL_REGISTRY_PORT?=30099 + +# The URL of the container-registry to use. Defaults to the registry of the local-cluster. +# If RUNTIME_ENV is "remote" it is "registry.cloudogu.com/testing" +CES_REGISTRY_HOST?="${K3S_CLUSTER_FQDN}:${K3S_LOCAL_REGISTRY_PORT}" +CES_REGISTRY_NAMESPACE ?= +ifeq (${RUNTIME_ENV}, remote) + CES_REGISTRY_HOST="registry.cloudogu.com" + CES_REGISTRY_NAMESPACE="/testing" +endif +$(info CES_REGISTRY_HOST=$(CES_REGISTRY_HOST)) + +# The name of the kube-context to use for applying resources. +# If KUBE_CONTEXT_NAME is empty and RUNTIME_ENV is "remote" the currently configured kube-context is used. +# If KUBE_CONTEXT_NAME is empty and RUNTIME_ENV is not "remote" the "k3ces.local" is used as kube-context. +ifeq (${KUBE_CONTEXT_NAME}, ) + ifeq (${RUNTIME_ENV}, remote) + KUBE_CONTEXT_NAME = $(shell kubectl config current-context) + else + KUBE_CONTEXT_NAME = k3ces.local + endif +endif +$(info KUBE_CONTEXT_NAME=$(KUBE_CONTEXT_NAME)) + +# The git branch-name in lowercase, shortened to 63 bytes, and with everything except 0-9 and a-z replaced with -. No leading / trailing -. +GIT_BRANCH := $(shell git rev-parse --abbrev-ref HEAD | tr '[:upper:]' '[:lower:]' | sed -E 's/[^a-z0-9]+/-/g; s/^-+|-+$$//g' | cut -c1-63) +# The short git commit-hash +GIT_HASH := $(shell git rev-parse --short HEAD) + +## Image URL to use all building/pushing image targets +IMAGE_DEV?=$(CES_REGISTRY_HOST)$(CES_REGISTRY_NAMESPACE)/$(ARTIFACT_ID)/$(GIT_BRANCH) +IMAGE_DEV_VERSION=$(IMAGE_DEV):$(VERSION) + +# Variables for the temporary yaml files. These are used as template to generate a development resource containing +# the current namespace and the dev image. +K8S_RESOURCE_TEMP_FOLDER ?= $(TARGET_DIR)/k8s + +# This can be used by components with own images to check if all image env var are set. +# These components should override this variable with `check-all-vars`. +CHECK_VAR_TARGETS?=check-all-vars-without-image + +##@ K8s - Variables + +.PHONY: check-all-vars +check-all-vars: check-all-vars-without-image check-all-image-vars ## Conduct a sanity check against selected build artefacts or local environment + +.PHONY: check-all-image-vars +check-all-image-vars: check-k8s-image-env-var check-k8s-image-dev-var check-etc-hosts check-insecure-cluster-registry + +.PHONY: check-all-vars-without-image +check-all-vars-without-image: check-k8s-artifact-id check-k8s-namespace-env-var + +.PHONY: check-k8s-namespace-env-var +check-k8s-namespace-env-var: + @$(call check_defined, NAMESPACE, k8s namespace) + +.PHONY: check-k8s-image-env-var +check-k8s-image-env-var: + @$(call check_defined, IMAGE, docker image tag) + +.PHONY: check-k8s-artifact-id +check-k8s-artifact-id: + @$(call check_defined, ARTIFACT_ID, app/dogu name) + +.PHONY: check-etc-hosts +check-etc-hosts: + @if [[ ${RUNTIME_ENV} == "local" ]]; then \ + grep -E "^.+\s+${K3S_CLUSTER_FQDN}\$$" /etc/hosts > /dev/null || \ + (echo "Missing /etc/hosts entry for ${K3S_CLUSTER_FQDN}" && exit 1) \ + fi + +.PHONY: check-insecure-cluster-registry +check-insecure-cluster-registry: + @if [[ ${RUNTIME_ENV} == "local" ]]; then \ + grep "${CES_REGISTRY_HOST}" /etc/docker/daemon.json > /dev/null || \ + (echo "Missing /etc/docker/daemon.json for ${CES_REGISTRY_HOST}" && exit 1) \ + fi + +# If the RUNTIME_ENV is "remote" checks if the current docker-client has credentials for CES_REGISTRY_HOST +# If no credentials could be found, the credentials are queried and docker-login is performed +check-docker-credentials: + @if [[ "$(RUNTIME_ENV)" == "remote" ]]; then \ + if ! grep -q $(CES_REGISTRY_HOST) ~/.docker/config.json ; then \ + echo "Error: Docker is not logged in to $(CES_REGISTRY_HOST)"; \ + read -p "Enter Docker Username for $(CES_REGISTRY_HOST): " username; \ + read -sp "Enter Docker Password for $(CES_REGISTRY_HOST): " password; \ + echo ""; \ + echo "$$password" | docker login -u "$$username" --password-stdin $(CES_REGISTRY_HOST); \ + if [ $$? -eq 0 ]; then \ + echo "Docker login to $(CES_REGISTRY_HOST) successful"; \ + else \ + echo "Docker login to $(CES_REGISTRY_HOST) failed"; \ + exit 1; \ + fi \ + fi \ + fi + +##@ K8s - Resources + +${K8S_RESOURCE_TEMP_FOLDER}: + @mkdir -p $@ + + +##@ K8s - Docker + +.PHONY: docker-build +docker-build: check-docker-credentials check-k8s-image-env-var ${BINARY_YQ} ## Builds the docker image of the K8s app. + @echo "Building docker image $(IMAGE)..." + @DOCKER_BUILDKIT=1 docker build . -t $(IMAGE) + +.PHONY: docker-dev-tag +docker-dev-tag: check-k8s-image-dev-var docker-build ## Tags a Docker image for local K3ces deployment. + @echo "Tagging image with dev tag $(IMAGE_DEV_VERSION)..." + @DOCKER_BUILDKIT=1 docker tag ${IMAGE} $(IMAGE_DEV_VERSION) + +.PHONY: check-k8s-image-dev-var +check-k8s-image-dev-var: +ifeq (${IMAGE_DEV},) + @echo "Missing make variable IMAGE_DEV detected. It should look like \$${CES_REGISTRY_HOST}/docker-image:tag" + @exit 19 +endif + +.PHONY: image-import +image-import: check-all-vars check-k8s-artifact-id docker-dev-tag ## Imports the currently available image into the configured ces-registry. + @echo "Import $(IMAGE_DEV_VERSION) into K8s cluster ${KUBE_CONTEXT_NAME}..." + @docker push $(IMAGE_DEV_VERSION) + @echo "Done." + +## Functions + +# Check that given variables are set and all have non-empty values, +# die with an error otherwise. +# +# Params: +# 1. Variable name(s) to test. +# 2. (optional) Error message to print. +check_defined = \ + $(strip $(foreach 1,$1, \ + $(call __check_defined,$1,$(strip $(value 2))))) +__check_defined = \ + $(if $(value $1),, \ + $(error Undefined $1$(if $2, ($2)))) + +##@ K8s - Download Utilities + +.PHONY: install-yq ## Installs the yq YAML editor. +install-yq: ${BINARY_YQ} + +${BINARY_YQ}: $(UTILITY_BIN_PATH) + $(call go-get-tool,$(BINARY_YQ),github.com/mikefarah/yq/v4@${BINARY_YQ_4_VERSION}) + +##@ K8s - Download Kubernetes Utilities + +.PHONY: install-helm ## Download helm locally if necessary. +install-helm: ${BINARY_HELM} + +${BINARY_HELM}: $(UTILITY_BIN_PATH) + $(call go-get-tool,$(BINARY_HELM),helm.sh/helm/v3/cmd/helm@${BINARY_HELM_VERSION}) + +.PHONY: controller-gen +controller-gen: ${CONTROLLER_GEN} ## Download controller-gen locally if necessary. + +${CONTROLLER_GEN}: + $(call go-get-tool,$(CONTROLLER_GEN),sigs.k8s.io/controller-tools/cmd/controller-gen@${CONTROLLER_GEN_VERSION}) + +ENVTEST = $(UTILITY_BIN_PATH)/setup-envtest +.PHONY: envtest +envtest: ${ENVTEST} ## Download envtest-setup locally if necessary. + +${ENVTEST}: + $(call go-get-tool,$(ENVTEST),sigs.k8s.io/controller-runtime/tools/setup-envtest@latest) diff --git a/build/make/mockery.yaml b/build/make/mockery.yaml new file mode 100644 index 0000000..67b4339 --- /dev/null +++ b/build/make/mockery.yaml @@ -0,0 +1,4 @@ +inpackage: True +testonly: True +with-expecter: True +keeptree: False \ No newline at end of file diff --git a/build/make/mocks.mk b/build/make/mocks.mk new file mode 100644 index 0000000..4c9697f --- /dev/null +++ b/build/make/mocks.mk @@ -0,0 +1,27 @@ +##@ Mocking + +MOCKERY_BIN=${UTILITY_BIN_PATH}/mockery +MOCKERY_VERSION?=v2.42.1 +MOCKERY_YAML=${WORKDIR}/.mockery.yaml + +${MOCKERY_BIN}: ${UTILITY_BIN_PATH} + $(call go-get-tool,$(MOCKERY_BIN),github.com/vektra/mockery/v2@$(MOCKERY_VERSION)) + +${MOCKERY_YAML}: + @cp ${BUILD_DIR}/make/mockery.yaml ${WORKDIR}/.mockery.yaml + +.PHONY: mocks +mocks: ${MOCKERY_BIN} ${MOCKERY_YAML} ## This target is used to generate mocks for all interfaces in a project. + @for dir in ${WORKDIR}/*/ ;\ + do \ + # removes trailing '/' \ + dir=$${dir%*/} ;\ + # removes everything before the last '/' \ + dir=$${dir##*/} ;\ + if ! echo '${MOCKERY_IGNORED}' | egrep -q "\b$${dir}\b" ;\ + then \ + echo "Creating mocks for $${dir}" ;\ + ${MOCKERY_BIN} --all --dir $${dir} ;\ + fi ;\ + done ; + @echo "Mocks successfully created." diff --git a/build/make/package-debian.mk b/build/make/package-debian.mk new file mode 100644 index 0000000..ad55ea1 --- /dev/null +++ b/build/make/package-debian.mk @@ -0,0 +1,79 @@ +##@ Debian packaging + +# This Makefile holds all targets for building a debian package +# For deployment of the deb package include the deploy-debian.mk! + +PREPARE_PACKAGE?=prepare-package +DEBIAN_PACKAGE_FORMAT_VERSION="2.0" +CONFFILES_FILE="$(DEBIAN_CONTENT_DIR)/control/conffiles" +CONFFILES_FILE_TMP="$(DEBIAN_CONTENT_DIR)/conffiles_" +DEBSRC:=$(shell find "${WORKDIR}/deb" -type f) + +.PHONY: package +package: debian-with-binary ## Build binary and package into .deb file + +.PHONY: debian +debian: $(DEBIAN_PACKAGE) ## Create .deb package without building the binary before + +.PHONY: debian-with-binary +debian-with-binary: $(BINARY) $(DEBIAN_PACKAGE) + +.PHONY: prepare-package +prepare-package: + @echo "Using default prepare-package target. To write your own, define a target and specify it in the PREPARE_PACKAGE variable, before the package-debian.mk import" + +$(DEBIAN_BUILD_DIR): + @mkdir $@ + +$(DEBIAN_BUILD_DIR)/debian-binary: $(DEBIAN_BUILD_DIR) + @echo $(DEBIAN_PACKAGE_FORMAT_VERSION) > $@ + +$(DEBIAN_CONTENT_DIR)/control: + @install -p -m 0755 -d $@ + +$(DEBIAN_CONTENT_DIR)/data: + @install -p -m 0755 -d $@ + +$(DEBIAN_PACKAGE): $(TARGET_DIR) $(DEBIAN_CONTENT_DIR)/control $(DEBIAN_CONTENT_DIR)/data $(DEBIAN_BUILD_DIR)/debian-binary $(PREPARE_PACKAGE) $(DEBSRC) + @echo "Creating .deb package..." + +# populate control directory + @sed -e "s/^Version:.*/Version: $(VERSION)/g" deb/DEBIAN/control > $(DEBIAN_CONTENT_DIR)/_control + @install -p -m 0644 $(DEBIAN_CONTENT_DIR)/_control $(DEBIAN_CONTENT_DIR)/control/control + +# populate data directory + @for dir in $$(find deb -mindepth 1 -not -name "DEBIAN" -a -type d |sed s@"^deb/"@"$(DEBIAN_CONTENT_DIR)/data/"@) ; do \ + install -m 0755 -d $${dir} ; \ + done + + @for file in $$(find deb -mindepth 1 -type f | grep -v "DEBIAN") ; do \ + cp $${file} $(DEBIAN_CONTENT_DIR)/data/$${file#deb/} ; \ + done + +# Copy binary to data/usr/sbin, if it exists + @if [ -f $(BINARY) ]; then \ + echo "Copying binary to $(DEBIAN_CONTENT_DIR)/data/usr/sbin"; \ + install -p -m 0755 -d $(DEBIAN_CONTENT_DIR)/data/usr/sbin; \ + install -p -m 0755 $(BINARY) $(DEBIAN_CONTENT_DIR)/data/usr/sbin/; \ + fi + +# create conffiles file which help to deal with config change +# in order to successfully add the conffiles file to the archive it must exist, even empty + @touch $(CONFFILES_FILE_TMP) + @for file in $$(find $(DEBIAN_CONTENT_DIR)/data/etc -mindepth 1 -type f | grep -v "DEBIAN") ; do \ + echo $$file | sed s@'.*\(/etc/\)@\1'@ >> $(CONFFILES_FILE_TMP) ; \ + done + @install -p -m 0644 $(CONFFILES_FILE_TMP) $(CONFFILES_FILE) + @rm $(CONFFILES_FILE_TMP) + +# create control.tar.gz + @tar cvfz $(DEBIAN_CONTENT_DIR)/control.tar.gz -C $(DEBIAN_CONTENT_DIR)/control $(TAR_ARGS) . + +# create data.tar.gz + @tar cvfz $(DEBIAN_CONTENT_DIR)/data.tar.gz -C $(DEBIAN_CONTENT_DIR)/data $(TAR_ARGS) . + +# create package + @ar roc $@ $(DEBIAN_BUILD_DIR)/debian-binary $(DEBIAN_CONTENT_DIR)/control.tar.gz $(DEBIAN_CONTENT_DIR)/data.tar.gz + @echo "... deb package can be found at $@" + +APTLY:=curl --silent --show-error --fail -u "${APT_API_USERNAME}":"${APT_API_PASSWORD}" diff --git a/build/make/package-tar.mk b/build/make/package-tar.mk new file mode 100644 index 0000000..9b842a3 --- /dev/null +++ b/build/make/package-tar.mk @@ -0,0 +1,10 @@ +##@ Tar packaging + +TAR_PACKAGE:=$(ARTIFACT_ID)-$(VERSION).tar.gz + +.PHONY: package +package: $(TAR_PACKAGE) ## Build binary and create tar package from it + +$(TAR_PACKAGE): $(BINARY) + # Check owner and group id + tar cvfz $(TARGET_DIR)/$(TAR_PACKAGE) -C $(TARGET_DIR) $$(basename ${BINARY}) $(TAR_ARGS) diff --git a/build/make/release.mk b/build/make/release.mk new file mode 100644 index 0000000..328f7ba --- /dev/null +++ b/build/make/release.mk @@ -0,0 +1,19 @@ +##@ Releases + +# This makefile holds the dogu-release target for starting a new dogu release + +.PHONY: dogu-release +dogu-release: ## Start a dogu release + build/make/release.sh dogu + +.PHONY: node-release +node-release: ## Start a node package release + build/make/release.sh node-pkg + +.PHONY: go-release +go-release: ## Start a go tool release + build/make/release.sh go-tool + +.PHONY: dogu-cve-release +dogu-cve-release: ## Start a dogu release of a new build if the local build fixes critical CVEs + @bash -c "build/make/release_cve.sh \"${REGISTRY_USERNAME}\" \"${REGISTRY_PASSWORD}\" \"${TRIVY_IMAGE_SCAN_FLAGS}\" \"${DRY_RUN}\" \"${CVE_SEVERITY}\"" diff --git a/build/make/release.sh b/build/make/release.sh new file mode 100755 index 0000000..ae9a722 --- /dev/null +++ b/build/make/release.sh @@ -0,0 +1,67 @@ +#!/bin/bash +set -o errexit +set -o nounset +set -o pipefail + +# Extension points in release.sh: +# +# A custom release argument file will be sourced if found. The custom release arg file may implement one or more bash +# functions which either release.sh or release_functions.sh define. If such a custom release function is found the +# release script must define the argument list which the custom release function will receive during the release. + +sourceCustomReleaseArgs() { + RELEASE_ARGS_FILE="${1}" + + if [[ -f "${RELEASE_ARGS_FILE}" ]]; then + echo "Using custom release args file ${RELEASE_ARGS_FILE}" + + local sourceCustomReleaseExitCode=0 + # shellcheck disable=SC1090 + source "${RELEASE_ARGS_FILE}" || sourceCustomReleaseExitCode=$? + if [[ ${sourceCustomReleaseExitCode} -ne 0 ]]; then + echo "Error while sourcing custom release arg file ${sourceCustomReleaseExitCode}. Exiting." + exit 9 + fi + fi +} + +PROJECT_DIR="$(pwd)" +RELEASE_ARGS_FILE="${PROJECT_DIR}/release_args.sh" + +sourceCustomReleaseArgs "${RELEASE_ARGS_FILE}" + +# shellcheck disable=SC1090 +source "$(pwd)/build/make/release_functions.sh" + +TYPE="${1}" +FIXED_CVE_LIST="${2:-""}" +DRY_RUN="${3:-""}" + +echo "=====Starting Release process=====" + +if [[ "${TYPE}" == "dogu" || "${TYPE}" == "dogu-cve-release" ]];then + CURRENT_TOOL_VERSION=$(get_current_version_by_dogu_json) +else + CURRENT_TOOL_VERSION=$(get_current_version_by_makefile) +fi + +NEW_RELEASE_VERSION="$(read_new_version)" + +validate_new_version "${NEW_RELEASE_VERSION}" +if [[ -n "${DRY_RUN}" ]]; then + start_dry_run_release "${NEW_RELEASE_VERSION}" +else + start_git_flow_release "${NEW_RELEASE_VERSION}" +fi + +update_versions "${NEW_RELEASE_VERSION}" +update_changelog "${NEW_RELEASE_VERSION}" "${FIXED_CVE_LIST}" +show_diff + +if [[ -n "${DRY_RUN}" ]]; then + abort_dry_run_release "${NEW_RELEASE_VERSION}" +else + finish_release_and_push "${CURRENT_TOOL_VERSION}" "${NEW_RELEASE_VERSION}" +fi + +echo "=====Finished Release process=====" diff --git a/build/make/release_cve.sh b/build/make/release_cve.sh new file mode 100755 index 0000000..6c5fc6d --- /dev/null +++ b/build/make/release_cve.sh @@ -0,0 +1,166 @@ +#!/bin/bash +set -o errexit +set -o pipefail +set -o nounset + +function readCveSeverityIfUnset() { + if [ -z "${CVE_SEVERITY}" ]; then + echo "CVE_SEVERITY is unset" + while [[ -z ${CVE_SEVERITY} ]]; do + read -r -p "select the desired cve severity (CRITICAL, HIGH, MEDIUM, ...): " CVE_SEVERITY + done + fi +} + +function readCredentialsIfUnset() { + if [ -z "${USERNAME}" ]; then + echo "username is unset" + while [[ -z ${USERNAME} ]]; do + read -r -p "type username for ${REGISTRY_URL}: " USERNAME + done + fi + if [ -z "${PASSWORD}" ]; then + echo "password is unset" + while [[ -z ${PASSWORD} ]]; do + read -r -s -p "type password for ${REGISTRY_URL}: " PASSWORD + done + fi +} + +function diffArrays() { + local cveListX=("$1") + local cveListY=("$2") + local result=() + + local cveX + # Disable the following shellcheck because the arrays are sufficiently whitespace delimited because of the jq parsing result. + # shellcheck disable=SC2128 + for cveX in ${cveListX}; do + local found=0 + local cveY + for cveY in ${cveListY}; do + [[ "${cveY}" == "${cveX}" ]] && { + found=1 + break + } + done + + [[ "${found}" == 0 ]] && result+=("${cveX}") + done + + echo "${result[@]}" +} + +function dockerLogin() { + docker login "${REGISTRY_URL}" -u "${USERNAME}" -p "${PASSWORD}" +} + +function dockerLogout() { + docker logout "${REGISTRY_URL}" +} + +function nameFromDogu() { + jsonPropertyFromDogu ".Name" +} + +function imageFromDogu() { + jsonPropertyFromDogu ".Image" +} + +function versionFromDogu() { + jsonPropertyFromDogu ".Version" +} + +function jsonPropertyFromDogu() { + local property="${1}" + jq -r "${property}" "${DOGU_JSON_FILE}" +} + +function pullRemoteImage() { + docker pull "$(imageFromDogu):$(versionFromDogu)" +} + +function buildLocalImage() { + docker build --no-cache . -t "$(imageFromDogu):$(versionFromDogu)" +} + +function scanImage() { + docker run -v "${TRIVY_CACHE_DIR}":"${TRIVY_DOCKER_CACHE_DIR}" -v /var/run/docker.sock:/var/run/docker.sock -v "${TRIVY_PATH}":/result aquasec/trivy --cache-dir "${TRIVY_DOCKER_CACHE_DIR}" -f json -o /result/results.json image ${TRIVY_IMAGE_SCAN_FLAGS:+"${TRIVY_IMAGE_SCAN_FLAGS}"} "$(imageFromDogu):$(versionFromDogu)" +} + +function parseTrivyJsonResult() { + local severity="${1}" + local trivy_result_file="${2}" + + # First select results which have the property "Vulnerabilities". Filter the vulnerability ids with the given severity and afterward put the values in an array. + # This array is used to format the values with join(" ") in a whitespace delimited string list. + jq -rc "[.Results[] | select(.Vulnerabilities) | .Vulnerabilities | .[] | select(.Severity == \"${severity}\") | .VulnerabilityID] | unique | join(\" \")" "${trivy_result_file}" +} + +RELEASE_SH="build/make/release.sh" + +REGISTRY_URL="registry.cloudogu.com" +DOGU_JSON_FILE="dogu.json" + +CVE_SEVERITY= + +TRIVY_PATH= +TRIVY_RESULT_FILE= +TRIVY_CACHE_DIR= +TRIVY_DOCKER_CACHE_DIR=/tmp/db +TRIVY_IMAGE_SCAN_FLAGS= + +USERNAME="" +PASSWORD="" +DRY_RUN= + +function runMain() { + readCveSeverityIfUnset + readCredentialsIfUnset + dockerLogin + + mkdir -p "${TRIVY_PATH}" # Cache will not be removed after release. rm requires root because the trivy container only runs with root. + pullRemoteImage + scanImage + local remote_trivy_cve_list + remote_trivy_cve_list=$(parseTrivyJsonResult "${CVE_SEVERITY}" "${TRIVY_RESULT_FILE}") + + buildLocalImage + scanImage + local local_trivy_cve_list + local_trivy_cve_list=$(parseTrivyJsonResult "${CVE_SEVERITY}" "${TRIVY_RESULT_FILE}") + + dockerLogout + + local cve_in_local_but_not_in_remote + cve_in_local_but_not_in_remote=$(diffArrays "${local_trivy_cve_list}" "${remote_trivy_cve_list}") + if [[ -n "${cve_in_local_but_not_in_remote}" ]]; then + echo "Abort release. Added new vulnerabilities:" + echo "${cve_in_local_but_not_in_remote[@]}" + exit 2 + fi + + local cve_in_remote_but_not_in_local + cve_in_remote_but_not_in_local=$(diffArrays "${remote_trivy_cve_list}" "${local_trivy_cve_list}") + if [[ -z "${cve_in_remote_but_not_in_local}" ]]; then + echo "Abort release. Fixed no new vulnerabilities" + exit 3 + fi + + echo "Fixed ${CVE_SEVERITY} CVEs: ${cve_in_remote_but_not_in_local}" + "${RELEASE_SH}" "dogu-cve-release" "${cve_in_remote_but_not_in_local}" "${DRY_RUN}" +} + +# make the script only runMain when executed, not when sourced from bats tests +if [[ -n "${BASH_VERSION}" && "${BASH_SOURCE[0]}" == "${0}" ]]; then + USERNAME="${1:-""}" + PASSWORD="${2:-""}" + TRIVY_IMAGE_SCAN_FLAGS="${3:-""}" + DRY_RUN="${4:-""}" + CVE_SEVERITY="${5:-""}" + + TRIVY_PATH="/tmp/trivy-dogu-cve-release-$(nameFromDogu)" + TRIVY_RESULT_FILE="${TRIVY_PATH}/results.json" + TRIVY_CACHE_DIR="${TRIVY_PATH}/db" + runMain +fi diff --git a/build/make/release_functions.sh b/build/make/release_functions.sh new file mode 100755 index 0000000..499c248 --- /dev/null +++ b/build/make/release_functions.sh @@ -0,0 +1,269 @@ +#!/bin/bash +set -o errexit +set -o nounset +set -o pipefail + +wait_for_ok() { + printf "\n" + local OK="false" + while [[ "${OK}" != "ok" ]]; do + read -r -p "${1} (type 'ok'): " OK + done +} + +ask_yes_or_no() { + local ANSWER="" + + while [ "${ANSWER}" != "y" ] && [ "${ANSWER}" != "n" ]; do + read -r -p "${1} (type 'y/n'): " ANSWER + done + + echo "${ANSWER}" +} + +get_current_version_by_makefile() { + grep '^VERSION=[0-9[:alpha:].-]*$' Makefile | sed s/VERSION=//g +} + +get_current_version_by_dogu_json() { + jq ".Version" --raw-output dogu.json +} + +read_new_version() { + local NEW_RELEASE_VERSION + read -r -p "Current Version is v${CURRENT_TOOL_VERSION}. Please provide the new version: v" NEW_RELEASE_VERSION + echo "${NEW_RELEASE_VERSION}" +} + +validate_new_version() { + local NEW_RELEASE_VERSION="${1}" + # Validate that release version does not start with vv + if [[ ${NEW_RELEASE_VERSION} = v* ]]; then + echo "WARNING: The new release version (v${NEW_RELEASE_VERSION}) starts with 'vv'." + echo "You must not enter the v when defining the new version." + local ANSWER + ANSWER=$(ask_yes_or_no "Should the first v be removed?") + if [ "${ANSWER}" == "y" ]; then + NEW_RELEASE_VERSION="${NEW_RELEASE_VERSION:1}" + echo "Release version now is: ${NEW_RELEASE_VERSION}" + fi + fi +} + +start_git_flow_release() { + local NEW_RELEASE_VERSION="${1}" + # Do gitflow + git flow init --defaults --force + + local mainBranchExists + mainBranchExists="$(git show-ref refs/remotes/origin/main || echo "")" + if [ -n "$mainBranchExists" ]; then + echo 'Using "main" branch for production releases' + git flow config set master main + git checkout main + git pull origin main + else + echo 'Using "master" branch for production releases' + git checkout master + git pull origin master + fi + + git checkout develop + git pull origin develop + git flow release start v"${NEW_RELEASE_VERSION}" +} + +start_dry_run_release() { + local NEW_RELEASE_VERSION="${1}" + + git checkout -b dryrun/v"${NEW_RELEASE_VERSION}" +} + +abort_dry_run_release() { + local NEW_RELEASE_VERSION="${1}" + + git checkout develop + git branch -D dryrun/v"${NEW_RELEASE_VERSION}" +} + +# update_versions updates files with the new release version and interactively asks the user for verification. If okay +# the updated files will be staged to git and finally committed. +# +# extension points: +# - update_versions_modify_files - update a file with the new version number +# - update_versions_stage_modified_files - stage a modified file to prepare the file for the up-coming commit +update_versions() { + local NEW_RELEASE_VERSION="${1}" + + if [[ $(type -t update_versions_modify_files) == function ]]; then + local preSkriptExitCode=0 + update_versions_modify_files "${NEW_RELEASE_VERSION}" || preSkriptExitCode=$? + if [[ ${preSkriptExitCode} -ne 0 ]]; then + echo "ERROR: custom update_versions_modify_files() exited with exit code ${preSkriptExitCode}" + exit 1 + fi + fi + + # Update version in dogu.json + if [ -f "dogu.json" ]; then + echo "Updating version in dogu.json..." + jq ".Version = \"${NEW_RELEASE_VERSION}\"" dogu.json >dogu2.json && mv dogu2.json dogu.json + fi + + # Update version in Dockerfile + if [ -f "Dockerfile" ]; then + echo "Updating version in Dockerfile..." + sed -i "s/\(^[ ]*VERSION=\"\)\([^\"]*\)\(.*$\)/\1${NEW_RELEASE_VERSION}\3/" Dockerfile + fi + + # Update version in Makefile + if [ -f "Makefile" ]; then + echo "Updating version in Makefile..." + sed -i "s/\(^VERSION=\)\(.*\)$/\1${NEW_RELEASE_VERSION}/" Makefile + fi + + # Update version in package.json + if [ -f "package.json" ]; then + echo "Updating version in package.json..." + jq ".version = \"${NEW_RELEASE_VERSION}\"" package.json >package2.json && mv package2.json package.json + fi + + # Update version in pom.xml + if [ -f "pom.xml" ]; then + echo "Updating version in pom.xml..." + mvn versions:set -DgenerateBackupPoms=false -DnewVersion="${NEW_RELEASE_VERSION}" + fi + + wait_for_ok "Please make sure that all versions have been updated correctly now (e.g. via \"git diff\")." + + ### The `git add` command has to be after the okay. Otherwise user-made changes to versions would not be added. + + if [[ $(type -t update_versions_stage_modified_files) == function ]]; then + preSkriptExitCode=0 + update_versions_stage_modified_files "${NEW_RELEASE_VERSION}" || preSkriptExitCode=$? + if [[ ${preSkriptExitCode} -ne 0 ]]; then + echo "ERROR: custom update_versions_stage_modified_files exited with exit code ${preSkriptExitCode}" + exit 1 + fi + fi + + if [ -f "dogu.json" ]; then + git add dogu.json + fi + + if [ -f "Dockerfile" ]; then + git add Dockerfile + fi + + if [ -f "Makefile" ]; then + git add Makefile + fi + + if [ -f "package.json" ]; then + git add package.json + fi + + if [ -f "pom.xml" ]; then + git add pom.xml + fi + + git commit -m "Bump version" +} + +update_changelog() { + local NEW_RELEASE_VERSION="${1}" + local FIXED_CVE_LIST="${2}" + + # Changelog update + local CURRENT_DATE + CURRENT_DATE=$(date --rfc-3339=date) + local NEW_CHANGELOG_TITLE="## [v${NEW_RELEASE_VERSION}] - ${CURRENT_DATE}" + # Check if "Unreleased" tag exists + while ! grep --silent "## \[Unreleased\]" CHANGELOG.md; do + echo "" + echo -e "\e[31mYour CHANGELOG.md does not contain a \"## [Unreleased]\" line!\e[0m" + echo "Please add one to make it comply to https://keepachangelog.com/en/1.0.0/" + wait_for_ok "Please insert a \"## [Unreleased]\" line into CHANGELOG.md now." + done + + if [[ -n "${FIXED_CVE_LIST}" ]]; then + addFixedCVEListFromReRelease "${FIXED_CVE_LIST}" + fi + + # Add new title line to changelog + sed -i "s|## \[Unreleased\]|## \[Unreleased\]\n\n${NEW_CHANGELOG_TITLE}|g" CHANGELOG.md + + # Wait for user to validate changelog changes + wait_for_ok "Please make sure your CHANGELOG.md looks as desired." + + # Check if new version tag still exists + while ! grep --silent "## \[v${NEW_RELEASE_VERSION}\] - ${CURRENT_DATE}" CHANGELOG.md; do + echo "" + echo -e "\e[31mYour CHANGELOG.md does not contain \"${NEW_CHANGELOG_TITLE}\"!\e[0m" + wait_for_ok "Please update your CHANGELOG.md now." + done + + git add CHANGELOG.md + git commit -m "Update changelog" +} + +# addFixedCVEListFromReRelease is used in dogu cve releases. The method adds the fixed CVEs under the ### Fixed header +# in the unreleased section. +addFixedCVEListFromReRelease() { + local fixed_cve_list="${1}" + + local cve_sed_search="" + local cve_sed_replace="" + local fixed_exists_in_unreleased + fixed_exists_in_unreleased=$(awk '/^\#\# \[Unreleased\]$/{flag=1;next}/^\#\# \[/{flag=0}flag' CHANGELOG.md | grep -e "^### Fixed$" || true) + if [[ -n "${fixed_exists_in_unreleased}" ]]; then + # extend fixed header with CVEs. + cve_sed_search="^\#\#\# Fixed$" + cve_sed_replace="\#\#\# Fixed\n- Fixed ${fixed_cve_list}" + else + # extend unreleased header with fixed header and CVEs. + cve_sed_search="^\#\# \[Unreleased\]$" + cve_sed_replace="\#\# \[Unreleased\]\n\#\#\# Fixed\n- Fixed ${fixed_cve_list}" + + local any_exists_unreleased + any_exists_unreleased=$(awk '/^\#\# \[Unreleased\]$/{flag=1;next}/^\#\# \[/{flag=0}flag' CHANGELOG.md | grep -e "^\#\#\# Added$" -e "^\#\#\# Fixed$" -e "^\#\#\# Changed$" || true) + if [[ -n ${any_exists_unreleased} ]]; then + cve_sed_replace+="\n" + fi + fi + + sed -i "0,/${cve_sed_search}/s//${cve_sed_replace}/" CHANGELOG.md +} + +show_diff() { + if ! git diff --exit-code >/dev/null; then + echo "There are still uncommitted changes:" + echo "" + echo "# # # # # # # # # #" + echo "" + git --no-pager diff + echo "" + echo "# # # # # # # # # #" + fi + + echo "All changes compared to develop branch:" + echo "" + echo "# # # # # # # # # #" + echo "" + git --no-pager diff develop + echo "" + echo "# # # # # # # # # #" +} + +finish_release_and_push() { + local CURRENT_VERSION="${1}" + local NEW_RELEASE_VERSION="${2}" + + # Push changes and delete release branch + wait_for_ok "Upgrade from version v${CURRENT_VERSION} to version v${NEW_RELEASE_VERSION} finished. Should the changes be pushed?" + git push origin release/v"${NEW_RELEASE_VERSION}" + + echo "Switching back to develop and deleting branch release/v${NEW_RELEASE_VERSION}..." + git checkout develop + git branch -D release/v"${NEW_RELEASE_VERSION}" +} diff --git a/build/make/self-update.mk b/build/make/self-update.mk new file mode 100644 index 0000000..175f8a6 --- /dev/null +++ b/build/make/self-update.mk @@ -0,0 +1,22 @@ +##@ Makefile management + +.PHONY: update-makefiles +update-makefiles: do-update-makefiles ## Update Makefiles to MAKEFILES_VERSION + +.PHONY: do-update-makefiles +do-update-makefiles: $(TMP_DIR) download-and-extract remove-old-files copy-new-files + @echo Updating makefiles... + +.PHONY: download-and-extract +download-and-extract: + @curl -L --silent https://github.com/cloudogu/makefiles/archive/v$(MAKEFILES_VERSION).tar.gz > $(TMP_DIR)/makefiles-v$(MAKEFILES_VERSION).tar.gz + @tar -xzf $(TMP_DIR)/makefiles-v$(MAKEFILES_VERSION).tar.gz -C $(TMP_DIR) + +.PHONY: remove-old-files +remove-old-files: + @echo "Deleting old files" + rm -rf $(BUILD_DIR)/make + +.PHONY: copy-new-files +copy-new-files: + @cp -r $(TMP_DIR)/makefiles-$(MAKEFILES_VERSION)/build/make $(BUILD_DIR) \ No newline at end of file diff --git a/build/make/static-analysis.mk b/build/make/static-analysis.mk new file mode 100644 index 0000000..00c406f --- /dev/null +++ b/build/make/static-analysis.mk @@ -0,0 +1,66 @@ +##@ Static analysis + +STATIC_ANALYSIS_DIR=$(TARGET_DIR)/static-analysis +GOIMAGE?=golang +GOTAG?=1.23 +CUSTOM_GO_MOUNT?=-v /tmp:/tmp + +REVIEW_DOG=$(TMP_DIR)/bin/reviewdog +LINT=$(TMP_DIR)/bin/golangci-lint +LINT_VERSION?=v1.61.0 +# ignore tests and mocks +LINTFLAGS=--tests=false --exclude-files="^.*_mock.go$$" --exclude-files="^.*/mock.*.go$$" --timeout 10m --issues-exit-code 0 +ADDITIONAL_LINTER=-E bodyclose -E containedctx -E contextcheck -E decorder -E dupl -E errname -E forcetypeassert -E funlen -E unparam + +.PHONY: static-analysis +static-analysis: static-analysis-$(ENVIRONMENT) ## Start a static analysis of the code + +.PHONY: static-analysis-ci +static-analysis-ci: + @make $(STATIC_ANALYSIS_DIR)/static-analysis-cs.log $(STATIC_ANALYSIS_DIR)/static-analysis.log static-analysis-ci-report-pr + +static-analysis-ci-report-pr: $(REVIEW_DOG) + @if [ X"$(CI_PULL_REQUEST)" != X"" -a X"$(CI_PULL_REQUEST)" != X"null" ] ; then \ + cat $(STATIC_ANALYSIS_DIR)/static-analysis-cs.log | CI_COMMIT=$(COMMIT_ID) $(REVIEW_DOG) -f=checkstyle -reporter="github-pr-review"; \ + fi + +.PHONY: static-analysis-local +static-analysis-local: $(PASSWD) $(ETCGROUP) $(HOME_DIR) + @docker run --rm \ + -e GOOS=$(GOOS) \ + -e GOARCH=$(GOARCH) \ + -u "$(UID_NR):$(GID_NR)" \ + -v $(PASSWD):/etc/passwd:ro \ + -v $(ETCGROUP):/etc/group:ro \ + -v $(HOME_DIR):/home/$(USER) \ + -v $(WORKDIR):/go/src/github.com/cloudogu/$(ARTIFACT_ID) \ + $(CUSTOM_GO_MOUNT) \ + -w /go/src/github.com/cloudogu/$(ARTIFACT_ID) \ + $(GOIMAGE):$(GOTAG) \ + make $(STATIC_ANALYSIS_DIR)/static-analysis-cs.log $(STATIC_ANALYSIS_DIR)/static-analysis.log static-analysis-ci-report-local + +$(STATIC_ANALYSIS_DIR)/static-analysis.log: $(STATIC_ANALYSIS_DIR) + @echo "" + @echo "complete static analysis:" + @echo "" + @$(LINT) $(LINTFLAGS) run ./... $(ADDITIONAL_LINTER) > $@ + +$(STATIC_ANALYSIS_DIR)/static-analysis-cs.log: $(STATIC_ANALYSIS_DIR) + @echo "run static analysis with export to checkstyle format" + @$(LINT) $(LINTFLAGS) run --out-format=checkstyle ./... $(ADDITIONAL_LINTER) > $@ + +$(STATIC_ANALYSIS_DIR): $(LINT) + @mkdir -p $(STATIC_ANALYSIS_DIR) + +static-analysis-ci-report-local: $(STATIC_ANALYSIS_DIR)/static-analysis-cs.log $(REVIEW_DOG) + @echo "" + @echo "differences to develop branch:" + @echo "" + @cat $(STATIC_ANALYSIS_DIR)/static-analysis-cs.log | $(REVIEW_DOG) -f checkstyle -diff "git diff develop" + +$(LINT): $(TMP_DIR) + @echo "Download golangci-lint $(LINT_VERSION)..." + @curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(TMP_DIR)/bin $(LINT_VERSION) + +$(REVIEW_DOG): $(TMP_DIR) + @curl -sfL https://raw.githubusercontent.com/reviewdog/reviewdog/master/install.sh| sh -s -- -b $(TMP_DIR)/bin diff --git a/build/make/test-common.mk b/build/make/test-common.mk new file mode 100644 index 0000000..6eaa0da --- /dev/null +++ b/build/make/test-common.mk @@ -0,0 +1,6 @@ +GO_JUNIT_REPORT=$(UTILITY_BIN_PATH)/go-junit-report +GO_JUNIT_REPORT_VERSION=v1.0.0 + +$(GO_JUNIT_REPORT): $(UTILITY_BIN_PATH) + @echo "Download go-junit-report..." + @$(call go-get-tool,$@,github.com/jstemmer/go-junit-report@$(GO_JUNIT_REPORT_VERSION)) diff --git a/build/make/test-integration.mk b/build/make/test-integration.mk new file mode 100644 index 0000000..9a3103c --- /dev/null +++ b/build/make/test-integration.mk @@ -0,0 +1,49 @@ +##@ Integration testing + +INTEGRATION_TEST_DIR=$(TARGET_DIR)/integration-tests +XUNIT_INTEGRATION_XML=$(INTEGRATION_TEST_DIR)/integration-tests.xml +INTEGRATION_TEST_LOG=$(INTEGRATION_TEST_DIR)/integration-tests.log +INTEGRATION_TEST_REPORT=$(INTEGRATION_TEST_DIR)/coverage.out +PRE_INTEGRATIONTESTS?=start-local-docker-compose +POST_INTEGRATIONTESTS?=stop-local-docker-compose +INTEGRATION_TEST_NAME_PATTERN?=.* + +.PHONY: integration-test +integration-test: $(XUNIT_INTEGRATION_XML) ## Start integration tests + +.PHONY: start-local-docker-compose +start-local-docker-compose: +ifeq ($(ENVIRONMENT), local) + echo "Found developer environment. Starting up docker-compose" + docker-compose up -d +else + echo "Found CI environment. Use existing docker configuration" +endif + + +.PHONY: stop-local-docker-compose +stop-local-docker-compose: +ifeq ($(ENVIRONMENT), local) + echo "Found developer environment. Quitting docker-compose" + docker-compose kill; +else + echo "Found CI environment. Nothing to be done" +endif + +$(XUNIT_INTEGRATION_XML): $(SRC) $(GO_JUNIT_REPORT) +ifneq ($(strip $(PRE_INTEGRATIONTESTS)),) + @make $(PRE_INTEGRATIONTESTS) +endif + + @mkdir -p $(INTEGRATION_TEST_DIR) + @echo 'mode: set' > $(INTEGRATION_TEST_REPORT) + @rm -f $(INTEGRATION_TEST_LOG) || true + @$(GO_CALL) test ./... -v -tags=${GO_BUILD_TAG_INTEGRATION_TEST} -coverpkg=./... -coverprofile=${INTEGRATION_TEST_REPORT} -run ${INTEGRATION_TEST_NAME_PATTERN} 2>&1 | tee $(INTEGRATION_TEST_LOG) + @cat $(INTEGRATION_TEST_LOG) | $(GO_JUNIT_REPORT) > $@ + @if grep '^FAIL' $(INTEGRATION_TEST_LOG); then \ + exit 1; \ + fi + +ifneq ($(strip $(POST_INTEGRATIONTESTS)),) + @make $(POST_INTEGRATIONTESTS) +endif diff --git a/build/make/test-unit.mk b/build/make/test-unit.mk new file mode 100644 index 0000000..6838b1c --- /dev/null +++ b/build/make/test-unit.mk @@ -0,0 +1,36 @@ +##@ Unit testing + +UNIT_TEST_DIR=$(TARGET_DIR)/unit-tests +XUNIT_XML=$(UNIT_TEST_DIR)/unit-tests.xml +UNIT_TEST_LOG=$(UNIT_TEST_DIR)/unit-tests.log +COVERAGE_REPORT=$(UNIT_TEST_DIR)/coverage.out + +PRE_UNITTESTS?= +POST_UNITTESTS?= + +.PHONY: unit-test +unit-test: $(XUNIT_XML) ## Start unit tests + +$(XUNIT_XML): $(SRC) $(GO_JUNIT_REPORT) +ifneq ($(strip $(PRE_UNITTESTS)),) + @make $(PRE_UNITTESTS) +endif + + @mkdir -p $(UNIT_TEST_DIR) + @echo 'mode: set' > ${COVERAGE_REPORT} + @rm -f $(UNIT_TEST_LOG) || true + @for PKG in $(PACKAGES) ; do \ + ${GO_CALL} test -v $$PKG -coverprofile=${COVERAGE_REPORT}.tmp 2>&1 | tee $(UNIT_TEST_LOG).tmp ; \ + cat ${COVERAGE_REPORT}.tmp | tail +2 >> ${COVERAGE_REPORT} ; \ + rm -f ${COVERAGE_REPORT}.tmp ; \ + cat $(UNIT_TEST_LOG).tmp >> $(UNIT_TEST_LOG) ; \ + rm -f $(UNIT_TEST_LOG).tmp ; \ + done + @cat $(UNIT_TEST_LOG) | $(GO_JUNIT_REPORT) > $@ + @if grep '^FAIL' $(UNIT_TEST_LOG); then \ + exit 1; \ + fi + +ifneq ($(strip $(POST_UNITTESTS)),) + @make $(POST_UNITTESTS) +endif diff --git a/build/make/variables.mk b/build/make/variables.mk new file mode 100644 index 0000000..639a8b1 --- /dev/null +++ b/build/make/variables.mk @@ -0,0 +1,97 @@ +TARGET_DIR=target + +WORKDIR:=$(shell pwd) +BUILD_DIR=$(WORKDIR)/build +TMP_DIR:=$(BUILD_DIR)/tmp + +BINARY:=$(TARGET_DIR)/$(ARTIFACT_ID) + +COMMIT_ID:=$(shell git rev-parse HEAD) +LAST_COMMIT_DATE=$(shell git rev-list --format=format:'%ci' --max-count=1 `git rev-parse HEAD` | tail -1) +TAR_ARGS:=--owner=0:0 --group=0:0 --mtime="$(LAST_COMMIT_DATE)" --sort=name +BRANCH=$(shell git branch | grep \* | sed 's/ /\n/g' | head -2 | tail -1) + +GO_ENVIRONMENT?= +# GO_CALL accomodates the go CLI command as well as necessary environment variables which are optional. +GO_CALL=${GO_ENVIRONMENT} go +PACKAGES=$(shell ${GO_CALL} list ./... | grep -v /vendor/) +GO_BUILD_TAG_INTEGRATION_TEST?=integration +GOMODULES=on +UTILITY_BIN_PATH?=${WORKDIR}/.bin + +SRC:=$(shell find "${WORKDIR}" -type f -name "*.go" -not -path "*/vendor/*") + +# debian stuff +DEBIAN_BUILD_DIR=$(BUILD_DIR)/deb +DEBIAN_CONTENT_DIR=$(DEBIAN_BUILD_DIR)/content +DEBIAN_PACKAGE=$(TARGET_DIR)/$(ARTIFACT_ID)_$(VERSION).deb +APT_API_BASE_URL=https://apt-api.cloudogu.com/api + +# choose the environment, if BUILD_URL environment variable is available then we are on ci (jenkins) +ifdef BUILD_URL +ENVIRONMENT=ci +else +ENVIRONMENT=local +endif + +YARN_TARGET=$(WORKDIR)/node_modules +BOWER_TARGET?=$(WORKDIR)/public/vendor +NODE_VERSION?=8 + +UID_NR:=$(shell id -u) +GID_NR:=$(shell id -g) +HOME_DIR=$(TMP_DIR)/home +PASSWD=$(TMP_DIR)/passwd +ETCGROUP=$(TMP_DIR)/group + +$(TMP_DIR): + @mkdir -p $(TMP_DIR) + +$(HOME_DIR): $(TMP_DIR) + @mkdir -p $(HOME_DIR) + +$(TARGET_DIR): + @mkdir -p $(TARGET_DIR) + +$(PASSWD): $(TMP_DIR) + @echo "$(USER):x:$(UID_NR):$(GID_NR):$(USER):/home/$(USER):/bin/bash" > $(PASSWD) + +$(ETCGROUP): $(TMP_DIR) + @echo "root:x:0:" > $(ETCGROUP) + @echo "$(USER):x:$(GID_NR):" >> $(ETCGROUP) + +$(UTILITY_BIN_PATH): + @mkdir -p $@ + +# Subdirectories of workdir where no mocks should be generated. +# Multiple directories can be separated by space, comma or whatever is not a word to regex. +MOCKERY_IGNORED=vendor,build,docs + +##@ General + +.PHONY: help +help: ## Display this help. + @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) + +.PHONY: info +info: ## Print build information + @echo "dumping build information ..." + @echo "Version : $(VERSION)" + @echo "Commit-ID : $(COMMIT_ID)" + @echo "Environment: $(ENVIRONMENT)" + @echo "Branch : $(BRANCH)" + @echo "Packages : $(PACKAGES)" + + +# go-get-tool will 'go get' any package $2 and install it to $1. +define go-get-tool + @[ -f $(1) ] || { \ + set -e ;\ + TMP_DIR=$$(mktemp -d) ;\ + cd $$TMP_DIR ;\ + go mod init tmp ;\ + echo "Downloading $(2)" ;\ + GOBIN=$(UTILITY_BIN_PATH) go install $(2) ;\ + rm -rf $$TMP_DIR ;\ + } +endef diff --git a/build/make/version-sha.mk b/build/make/version-sha.mk new file mode 100644 index 0000000..1335532 --- /dev/null +++ b/build/make/version-sha.mk @@ -0,0 +1,18 @@ +##@ Version + +# This makefile is used to get the sha256sum of a specific github tag-src.tar.gz or .zip. +# You may set any of the following variables before your make call to change the hash url. + +SHA_SUM_ORGANISATION?="cloudogu" +SHA_SUM_REPOSITORY?="ecosystem" +SHA_SUM_FILE_TYPE?="tar.gz" +SHA_SUM_VERSION?="v20.04.4-2" +SHA_SUM_URL?="https://github.com/${SHA_SUM_ORGANISATION}/${SHA_SUM_REPOSITORY}/archive/refs/tags/${SHA_SUM_VERSION}.${SHA_SUM_FILE_TYPE}" + +.PHONY: sha-sum +sha-sum: ## Print out the version + @echo "Downloading from: ${SHA_SUM_URL}" + @wget -O - -o /dev/null "${SHA_SUM_URL}" > .download.for.hash \ + || (echo "Could not be downloaded" && exit 1) \ + && cat .download.for.hash | sha256sum + @rm -f .download.for.hash diff --git a/build/make/vulnerability-scan.mk b/build/make/vulnerability-scan.mk new file mode 100644 index 0000000..5698206 --- /dev/null +++ b/build/make/vulnerability-scan.mk @@ -0,0 +1,13 @@ +##@ Vulnerability scan + +GOVULNCHECK_BIN=${UTILITY_BIN_PATH}/govulncheck +GOVULNCHECK_VERSION?=latest + +${GOVULNCHECK_BIN}: ${UTILITY_BIN_PATH} + $(call go-get-tool,$(GOVULNCHECK_BIN),golang.org/x/vuln/cmd/govulncheck@$(GOVULNCHECK_VERSION)) + +.PHONY: govulncheck +govulncheck: ${GOVULNCHECK_BIN} ## This target is used to scan the go repository against known vulnerabilities + @echo "Start vulnerability against repository" + ${GOVULNCHECK_BIN} -show verbose ./... + @echo "Finished scan" \ No newline at end of file diff --git a/build/make/yarn.mk b/build/make/yarn.mk new file mode 100644 index 0000000..6ff7de9 --- /dev/null +++ b/build/make/yarn.mk @@ -0,0 +1,39 @@ +##@ Yarn dependency management + +YARN_LOCK=$(WORKDIR)/yarn.lock + +.PHONY: yarn-install +yarn-install: $(YARN_TARGET) ## Execute yarn install + +$(YARN_TARGET): $(YARN_LOCK) $(PASSWD) + @echo "Executing yarn..." + @docker run --rm \ + -u "$(UID_NR):$(GID_NR)" \ + -v $(PASSWD):/etc/passwd:ro \ + -v $(WORKDIR):$(WORKDIR) \ + -w $(WORKDIR) \ + node:$(NODE_VERSION) \ + yarn install + @touch $@ + +.PHONY yarn-publish-ci: +yarn-publish-ci: ## Execute yarn publish with '--non-interactive' flag to suppress the version prompt + @echo "Executing yarn publish..." + @docker run --rm \ + -u "$(UID_NR):$(GID_NR)" \ + -v $(PASSWD):/etc/passwd:ro \ + -v $(WORKDIR):$(WORKDIR) \ + -w $(WORKDIR) \ + node:$(NODE_VERSION) \ + yarn publish --non-interactive + +.PHONY yarn-publish: ## Execute yarn publish +yarn-publish: $(YARN_BUILD_TARGET) + @echo "Executing yarn publish..." + @docker run --rm \ + -u "$(UID_NR):$(GID_NR)" \ + -v $(PASSWD):/etc/passwd:ro \ + -v $(WORKDIR):$(WORKDIR) \ + -w $(WORKDIR) \ + node:$(NODE_VERSION) \ + yarn publish diff --git a/dogu/doguName.go b/dogu/doguName.go new file mode 100644 index 0000000..3d85ca7 --- /dev/null +++ b/dogu/doguName.go @@ -0,0 +1,77 @@ +package dogu + +import ( + "errors" + "fmt" + "github.com/cloudogu/cesapp-lib/core" + "strings" +) + +type SimpleName string +type Namespace string + +// String returns the string representation of the SimpleName. +func (s SimpleName) String() string { + return string(s) +} + +type QualifiedVersion struct { + Name QualifiedName + Version core.Version +} +type QualifiedName struct { + SimpleName SimpleName + Namespace Namespace +} + +func NewQualifiedName(namespace Namespace, simpleName SimpleName) (QualifiedName, error) { + doguName := QualifiedName{Namespace: namespace, SimpleName: simpleName} + err := doguName.Validate() + if err != nil { + return QualifiedName{}, err + } + return doguName, nil +} + +func NewQualifiedVersion(name QualifiedName, version core.Version) (QualifiedVersion, error) { + err := name.Validate() + if err != nil { + return QualifiedVersion{}, err + } + return QualifiedVersion{ + Name: name, + Version: version, + }, nil +} + +func (name QualifiedName) Validate() error { + var errorList []error + if name.Namespace == "" { + errorList = append(errorList, fmt.Errorf("namespace of dogu %q must not be empty", name.SimpleName)) + } + if name.SimpleName == "" { + errorList = append(errorList, fmt.Errorf("dogu name must not be empty")) + } + if strings.Contains(string(name.Namespace), "/") { + errorList = append(errorList, fmt.Errorf("dogu name needs to be in the form 'namespace/dogu' but is '%s'", name)) + } + + return errors.Join(errorList...) +} + +// String returns the dogu name with namespace, e.g. official/postgresql +func (name QualifiedName) String() string { + return fmt.Sprintf("%s/%s", name.Namespace, name.SimpleName) +} + +// QualifiedNameFromString converts a qualified dogu as a string, e.g. "official/nginx", to a dedicated QualifiedName or raises an error if this is not possible. +func QualifiedNameFromString(qualifiedName string) (QualifiedName, error) { + splitName := strings.Split(qualifiedName, "/") + if len(splitName) != 2 { + return QualifiedName{}, fmt.Errorf("dogu name needs to be in the form 'namespace/dogu' but is '%s'", qualifiedName) + } + return NewQualifiedName( + Namespace(splitName[0]), + SimpleName(splitName[1]), + ) +} diff --git a/dogu/doguName_test.go b/dogu/doguName_test.go new file mode 100644 index 0000000..4cbd37f --- /dev/null +++ b/dogu/doguName_test.go @@ -0,0 +1,97 @@ +package dogu + +import ( + "fmt" + "github.com/cloudogu/cesapp-lib/core" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "testing" +) + +func TestQualifiedNameFromString(t *testing.T) { + tests := []struct { + name string + qualifiedName string + want QualifiedName + wantErr assert.ErrorAssertionFunc + }{ + { + name: "ok", + qualifiedName: "official/postgres", + want: QualifiedName{SimpleName("postgres"), Namespace("official")}, + wantErr: assert.NoError, + }, + { + name: "no ns", + qualifiedName: "postgres", + want: QualifiedName{}, + wantErr: func(t assert.TestingT, err error, i ...interface{}) bool { + return assert.ErrorContains(t, err, "dogu name needs to be in the form 'namespace/dogu' but is 'postgres'") + }, + }, + { + name: "no name", + qualifiedName: "official/", + want: QualifiedName{}, + wantErr: func(t assert.TestingT, err error, i ...interface{}) bool { + return assert.ErrorContains(t, err, "dogu name must not be empty") + }, + }, + { + name: "double ns", + qualifiedName: "official/test/postgres", + want: QualifiedName{}, + wantErr: func(t assert.TestingT, err error, i ...interface{}) bool { + return assert.ErrorContains(t, err, "dogu name needs to be in the form 'namespace/dogu' but is 'official/test/postgres'") + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := QualifiedNameFromString(tt.qualifiedName) + if !tt.wantErr(t, err, fmt.Sprintf("QualifiedNameFromString(%v)", tt.qualifiedName)) { + return + } + assert.Equalf(t, tt.want, got, "QualifiedNameFromString(%v)", tt.qualifiedName) + }) + } +} + +func TestNewQualifiedVersion(t *testing.T) { + testVersion1, err := core.ParseVersion("1.0.0") + require.NoError(t, err) + + tests := []struct { + name string + qualifiedDogu QualifiedName + version core.Version + want QualifiedVersion + wantErr assert.ErrorAssertionFunc + }{ + { + name: "create QualifiedVersion", + qualifiedDogu: QualifiedName{SimpleName: "postgres", Namespace: "official"}, + version: testVersion1, + want: QualifiedVersion{Name: QualifiedName{SimpleName: "postgres", Namespace: "official"}, Version: testVersion1}, + wantErr: assert.NoError, + }, + { + name: "create QualifiedVersion with Parse", + qualifiedDogu: QualifiedName{SimpleName: "postgres", Namespace: "official/test"}, + version: testVersion1, + want: QualifiedVersion{}, + wantErr: func(t assert.TestingT, err error, i ...interface{}) bool { + return assert.ErrorContains(t, err, "dogu name needs to be in the form 'namespace/dogu' but is 'official/test/postgres'") + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := NewQualifiedVersion(tt.qualifiedDogu, tt.version) + if !tt.wantErr(t, err, fmt.Sprintf("NewQualifiedVersion(%v, %v)", tt.qualifiedDogu, tt.version)) { + return + } + assert.Equalf(t, tt.want, got, "NewQualifiedVersion(%v, %v)", tt.qualifiedDogu, tt.version) + }) + } +} diff --git a/dogu/interfaces.go b/dogu/interfaces.go new file mode 100644 index 0000000..bc9291d --- /dev/null +++ b/dogu/interfaces.go @@ -0,0 +1,19 @@ +package dogu + +import ( + "context" + "github.com/cloudogu/cesapp-lib/core" +) + +type RemoteDoguDescriptorRepository interface { + // GetLatest returns the dogu descriptor for a dogu from the remote server. + // DoguDescriptorNotFoundError if there is no descriptor for that dogu + // ConnectionError if there are any connection issues + // Generic Error if there are any other issues + GetLatest(context.Context, QualifiedName) (*core.Dogu, error) + // Get returns a version specific dogu descriptor. + // DoguDescriptorNotFoundError if there is no descriptor for that dogu + // ConnectionError if there are any connection issues + // Generic Error if there are any other issues + Get(context.Context, QualifiedVersion) (*core.Dogu, error) +} diff --git a/errors/errors.go b/errors/errors.go new file mode 100644 index 0000000..b11c950 --- /dev/null +++ b/errors/errors.go @@ -0,0 +1,128 @@ +package errors + +import "errors" + +type errorType int + +const ( + errNotFound errorType = iota + 1 + errConflict + errConnection + errAlreadyExists + errGeneric + errWatch + errUnauthorized + errForbidden +) + +var _ error = Error{} + +type Error struct { + errType errorType + cause error +} + +func (c Error) Error() string { + return c.cause.Error() +} + +func NewGenericError(err error) Error { + return Error{ + errType: errGeneric, + cause: err, + } +} + +func NewNotFoundError(err error) Error { + return Error{ + errType: errNotFound, + cause: err, + } +} + +func NewConflictError(err error) Error { + return Error{ + errType: errConflict, + cause: err, + } +} + +func NewConnectionError(err error) Error { + return Error{ + errType: errConnection, + cause: err, + } +} + +func NewAlreadyExistsError(err error) Error { + return Error{ + errType: errAlreadyExists, + cause: err, + } +} + +func NewWatchError(err error) Error { + return Error{ + errType: errWatch, + cause: err, + } +} + +func NewUnauthorizedError(err error) Error { + return Error{ + errType: errUnauthorized, + cause: err, + } +} + +func NewForbiddenError(err error) Error { + return Error{ + errType: errForbidden, + cause: err, + } +} + +func isError(err error, t errorType) bool { + var e Error + if ok := errors.As(err, &e); !ok { + return false + } + + if e.errType == t { + return true + } + + return false +} + +func IsGenericError(err error) bool { + return isError(err, errGeneric) +} + +func IsNotFoundError(err error) bool { + return isError(err, errNotFound) +} + +func IsConflictError(err error) bool { + return isError(err, errConflict) +} + +func IsConnectionError(err error) bool { + return isError(err, errConnection) +} + +func IsAlreadyExistsError(err error) bool { + return isError(err, errAlreadyExists) +} + +func IsWatchError(err error) bool { + return isError(err, errWatch) +} + +func IsUnauthorizedError(err error) bool { + return isError(err, errUnauthorized) +} + +func IsForbiddenError(err error) bool { + return isError(err, errForbidden) +} diff --git a/errors/errors_test.go b/errors/errors_test.go new file mode 100644 index 0000000..b873f83 --- /dev/null +++ b/errors/errors_test.go @@ -0,0 +1,344 @@ +package errors + +import ( + "github.com/stretchr/testify/assert" + "testing" +) + +func TestNewNotFoundError(t *testing.T) { + err := NewNotFoundError(assert.AnError) + + assert.Equal(t, assert.AnError, err.cause) + assert.Equal(t, errNotFound, err.errType) +} + +func TestNewConflictError(t *testing.T) { + err := NewConflictError(assert.AnError) + + assert.Equal(t, assert.AnError, err.cause) + assert.Equal(t, errConflict, err.errType) +} + +func TestNewConnectionError(t *testing.T) { + err := NewConnectionError(assert.AnError) + + assert.Equal(t, assert.AnError, err.cause) + assert.Equal(t, errConnection, err.errType) +} + +func TestNewGenericError(t *testing.T) { + err := NewGenericError(assert.AnError) + + assert.Equal(t, assert.AnError, err.cause) + assert.Equal(t, errGeneric, err.errType) +} + +func TestNewWatchError(t *testing.T) { + err := NewWatchError(assert.AnError) + + assert.Equal(t, assert.AnError, err.cause) + assert.Equal(t, errWatch, err.errType) +} + +func TestNewUnauthorizedErrorError(t *testing.T) { + err := NewUnauthorizedError(assert.AnError) + + assert.Equal(t, assert.AnError, err.cause) + assert.Equal(t, errUnauthorized, err.errType) +} + +func TestNewForbiddenErrorWatchError(t *testing.T) { + err := NewForbiddenError(assert.AnError) + + assert.Equal(t, assert.AnError, err.cause) + assert.Equal(t, errForbidden, err.errType) +} + +func TestError_Error(t *testing.T) { + err := Error{ + errType: 0, + cause: assert.AnError, + } + + assert.Equal(t, assert.AnError.Error(), err.Error()) +} + +func TestIsNotFoundError(t *testing.T) { + tests := []struct { + name string + err error + xResult bool + }{ + { + name: "NotFoundError", + err: NewNotFoundError(assert.AnError), + xResult: true, + }, + { + name: "ConflictError", + err: NewConflictError(assert.AnError), + xResult: false, + }, + { + name: "No config error", + err: assert.AnError, + xResult: false, + }, + { + name: "error is nil", + err: nil, + xResult: false, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + assert.Equal(t, tc.xResult, IsNotFoundError(tc.err)) + }) + } +} + +func TestIsConflictError(t *testing.T) { + tests := []struct { + name string + err error + xResult bool + }{ + { + name: "ConflictError", + err: NewConflictError(assert.AnError), + xResult: true, + }, + { + name: "NotFoundError", + err: NewNotFoundError(assert.AnError), + xResult: false, + }, + { + name: "No config error", + err: assert.AnError, + xResult: false, + }, + { + name: "error is nil", + err: nil, + xResult: false, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + assert.Equal(t, tc.xResult, IsConflictError(tc.err)) + }) + } +} + +func TestIsConnectionError(t *testing.T) { + tests := []struct { + name string + err error + xResult bool + }{ + { + name: "ConnectionError", + err: NewConnectionError(assert.AnError), + xResult: true, + }, + { + name: "NotFoundError", + err: NewNotFoundError(assert.AnError), + xResult: false, + }, + { + name: "No config error", + err: assert.AnError, + xResult: false, + }, + { + name: "error is nil", + err: nil, + xResult: false, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + assert.Equal(t, tc.xResult, IsConnectionError(tc.err)) + }) + } +} + +func TestIsAlreadyExistsError(t *testing.T) { + tests := []struct { + name string + err error + xResult bool + }{ + { + name: "AlreadyExistsError", + err: NewAlreadyExistsError(assert.AnError), + xResult: true, + }, + { + name: "NotFoundError", + err: NewNotFoundError(assert.AnError), + xResult: false, + }, + { + name: "No config error", + err: assert.AnError, + xResult: false, + }, + { + name: "error is nil", + err: nil, + xResult: false, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + assert.Equal(t, tc.xResult, IsAlreadyExistsError(tc.err)) + }) + } +} + +func TestIsGenericError(t *testing.T) { + tests := []struct { + name string + err error + xResult bool + }{ + { + name: "GenericError", + err: NewGenericError(assert.AnError), + xResult: true, + }, + { + name: "NotFoundError", + err: NewNotFoundError(assert.AnError), + xResult: false, + }, + { + name: "No config error", + err: assert.AnError, + xResult: false, + }, + { + name: "error is nil", + err: nil, + xResult: false, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + assert.Equal(t, tc.xResult, IsGenericError(tc.err)) + }) + } +} + +func TestIsWatchError(t *testing.T) { + tests := []struct { + name string + err error + xResult bool + }{ + { + name: "WatchError", + err: NewWatchError(assert.AnError), + xResult: true, + }, + { + name: "NotFoundError", + err: NewNotFoundError(assert.AnError), + xResult: false, + }, + { + name: "No config error", + err: assert.AnError, + xResult: false, + }, + { + name: "error is nil", + err: nil, + xResult: false, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + assert.Equal(t, tc.xResult, IsWatchError(tc.err)) + }) + } +} + +func TestIsUnauthorizedError(t *testing.T) { + tests := []struct { + name string + err error + xResult bool + }{ + { + name: "UnauthorizedError", + err: NewUnauthorizedError(assert.AnError), + xResult: true, + }, + { + name: "NotFoundError", + err: NewNotFoundError(assert.AnError), + xResult: false, + }, + { + name: "No config error", + err: assert.AnError, + xResult: false, + }, + { + name: "error is nil", + err: nil, + xResult: false, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + assert.Equal(t, tc.xResult, IsUnauthorizedError(tc.err)) + }) + } +} + +func TestIsForbiddenError(t *testing.T) { + tests := []struct { + name string + err error + xResult bool + }{ + { + name: "ForbiddenError", + err: NewForbiddenError(assert.AnError), + xResult: true, + }, + { + name: "NotFoundError", + err: NewNotFoundError(assert.AnError), + xResult: false, + }, + { + name: "No config error", + err: assert.AnError, + xResult: false, + }, + { + name: "error is nil", + err: nil, + xResult: false, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + assert.Equal(t, tc.xResult, IsForbiddenError(tc.err)) + }) + } +} diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..2337f19 --- /dev/null +++ b/go.mod @@ -0,0 +1,19 @@ +module github.com/cloudogu/ces-commons-lib + +go 1.23 + +require ( + github.com/cloudogu/cesapp-lib v0.15.0 + github.com/stretchr/testify v1.8.1 +) + +require ( + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/eapache/go-resiliency v1.2.0 // indirect + github.com/gammazero/toposort v0.1.1 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/sirupsen/logrus v1.8.1 // indirect + golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..019b87e --- /dev/null +++ b/go.sum @@ -0,0 +1,39 @@ +github.com/cloudogu/cesapp-lib v0.14.2 h1:AtRsQbS31d71DY7DXUhzwO07ZFI0Gz2+IHOhyeJdBhc= +github.com/cloudogu/cesapp-lib v0.14.2/go.mod h1:52PfkrFYg54FjF8Se8P9b1zOfAhmg3PBFKr7YfS+Q+8= +github.com/cloudogu/cesapp-lib v0.15.0 h1:5jcf3A0LQ2DUjoiLWMv1V/rHmV6m3u+8cYZiuvGLaSM= +github.com/cloudogu/cesapp-lib v0.15.0/go.mod h1:52PfkrFYg54FjF8Se8P9b1zOfAhmg3PBFKr7YfS+Q+8= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/eapache/go-resiliency v1.2.0 h1:v7g92e/KSN71Rq7vSThKaWIq68fL4YHvWyiUKorFR1Q= +github.com/eapache/go-resiliency v1.2.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= +github.com/gammazero/toposort v0.1.1 h1:OivGxsWxF3U3+U80VoLJ+f50HcPU1MIqE1JlKzoJ2Eg= +github.com/gammazero/toposort v0.1.1/go.mod h1:H2cozTnNpMw0hg2VHAYsAxmkHXBYroNangj2NTBQDvw= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= +github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e h1:fLOSk5Q00efkSvAm+4xcoXD+RRmLmmulPn5I3Y9F2EM= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/sonar-project.properties b/sonar-project.properties new file mode 100644 index 0000000..8f1eca0 --- /dev/null +++ b/sonar-project.properties @@ -0,0 +1,14 @@ +sonar.projectKey=ces-commons-lib +sonar.projectName=ces-commons-lib +sonar.projectVersion=0.0.0-dev + +sonar.go.coverage.reportPaths=target/unit-tests/coverage.out,target/k8s-integration-test/report-k8s-integration.out +sonar.go.golangci-lint.reportPaths=target/static-analysis/static-analysis-cs.log + +sonar.sources=. + +sonar.exclusions=**/mocks/**,**/zz_generated.deepcopy.go,**/*Mock.go,k8s/** + +sonar.tests=. +sonar.test.inclusions=**/*_test.go +sonar.test.exclusions=**/vendor/**,**/mocks/**,**/zz_generated.deepcopy.go,**/*Mock.go \ No newline at end of file